Actual source code: gr2.c
petsc-3.7.3 2016-08-01
2: /*
3: Plots vectors obtained with DMDACreate2d()
4: */
6: #include <petsc/private/dmdaimpl.h> /*I "petscdmda.h" I*/
7: #include <petsc/private/vecimpl.h>
8: #include <petscdraw.h>
9: #include <petscviewerhdf5.h>
11: /*
12: The data that is passed into the graphics callback
13: */
14: typedef struct {
15: PetscMPIInt rank;
16: PetscInt m,n,dof,k;
17: PetscReal xmin,xmax,ymin,ymax,min,max;
18: const PetscScalar *xy,*v;
19: PetscBool showaxis,showgrid;
20: const char *name0,*name1;
21: } ZoomCtx;
23: /*
24: This does the drawing for one particular field
25: in one particular set of coordinates. It is a callback
26: called from PetscDrawZoom()
27: */
30: PetscErrorCode VecView_MPI_Draw_DA2d_Zoom(PetscDraw draw,void *ctx)
31: {
32: ZoomCtx *zctx = (ZoomCtx*)ctx;
33: PetscErrorCode ierr;
34: PetscInt m,n,i,j,k,dof,id,c1,c2,c3,c4;
35: PetscReal min,max,x1,x2,x3,x4,y_1,y2,y3,y4;
36: const PetscScalar *xy,*v;
39: m = zctx->m;
40: n = zctx->n;
41: dof = zctx->dof;
42: k = zctx->k;
43: xy = zctx->xy;
44: v = zctx->v;
45: min = zctx->min;
46: max = zctx->max;
48: /* PetscDraw the contour plot patch */
49: PetscDrawCollectiveBegin(draw);
50: for (j=0; j<n-1; j++) {
51: for (i=0; i<m-1; i++) {
52: id = i+j*m;
53: x1 = PetscRealPart(xy[2*id]);
54: y_1 = PetscRealPart(xy[2*id+1]);
55: c1 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max);
57: id = i+j*m+1;
58: x2 = PetscRealPart(xy[2*id]);
59: y2 = PetscRealPart(xy[2*id+1]);
60: c2 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max);
62: id = i+j*m+1+m;
63: x3 = PetscRealPart(xy[2*id]);
64: y3 = PetscRealPart(xy[2*id+1]);
65: c3 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max);
67: id = i+j*m+m;
68: x4 = PetscRealPart(xy[2*id]);
69: y4 = PetscRealPart(xy[2*id+1]);
70: c4 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max);
72: PetscDrawTriangle(draw,x1,y_1,x2,y2,x3,y3,c1,c2,c3);
73: PetscDrawTriangle(draw,x1,y_1,x3,y3,x4,y4,c1,c3,c4);
74: if (zctx->showgrid) {
75: PetscDrawLine(draw,x1,y_1,x2,y2,PETSC_DRAW_BLACK);
76: PetscDrawLine(draw,x2,y2,x3,y3,PETSC_DRAW_BLACK);
77: PetscDrawLine(draw,x3,y3,x4,y4,PETSC_DRAW_BLACK);
78: PetscDrawLine(draw,x4,y4,x1,y_1,PETSC_DRAW_BLACK);
79: }
80: }
81: }
82: if (zctx->showaxis && !zctx->rank) {
83: if (zctx->name0 || zctx->name1) {
84: PetscReal xl,yl,xr,yr,x,y;
85: PetscDrawGetCoordinates(draw,&xl,&yl,&xr,&yr);
86: x = xl + .30*(xr - xl);
87: xl = xl + .01*(xr - xl);
88: y = yr - .30*(yr - yl);
89: yl = yl + .01*(yr - yl);
90: if (zctx->name0) {PetscDrawString(draw,x,yl,PETSC_DRAW_BLACK,zctx->name0);}
91: if (zctx->name1) {PetscDrawStringVertical(draw,xl,y,PETSC_DRAW_BLACK,zctx->name1);}
92: }
93: /*
94: Ideally we would use the PetscDrawAxis object to manage displaying the coordinate limits
95: but that may require some refactoring.
96: */
97: {
98: double xmin = (double)zctx->xmin, ymin = (double)zctx->ymin;
99: double xmax = (double)zctx->xmax, ymax = (double)zctx->ymax;
100: char value[16]; size_t len; PetscReal w;
101: PetscSNPrintf(value,16,"%0.2e",xmin);
102: PetscDrawString(draw,xmin,ymin - .05*(ymax - ymin),PETSC_DRAW_BLACK,value);
103: PetscSNPrintf(value,16,"%0.2e",xmax);
104: PetscStrlen(value,&len);
105: PetscDrawStringGetSize(draw,&w,NULL);
106: PetscDrawString(draw,xmax - len*w,ymin - .05*(ymax - ymin),PETSC_DRAW_BLACK,value);
107: PetscSNPrintf(value,16,"%0.2e",ymin);
108: PetscDrawString(draw,xmin - .05*(xmax - xmin),ymin,PETSC_DRAW_BLACK,value);
109: PetscSNPrintf(value,16,"%0.2e",ymax);
110: PetscDrawString(draw,xmin - .05*(xmax - xmin),ymax,PETSC_DRAW_BLACK,value);
111: }
112: }
113: PetscDrawCollectiveEnd(draw);
114: return(0);
115: }
119: PetscErrorCode VecView_MPI_Draw_DA2d(Vec xin,PetscViewer viewer)
120: {
121: DM da,dac,dag;
122: PetscErrorCode ierr;
123: PetscInt N,s,M,w,ncoors = 4;
124: const PetscInt *lx,*ly;
125: PetscReal coors[4];
126: PetscDraw draw,popup;
127: PetscBool isnull,useports = PETSC_FALSE;
128: MPI_Comm comm;
129: Vec xlocal,xcoor,xcoorl;
130: DMBoundaryType bx,by;
131: DMDAStencilType st;
132: ZoomCtx zctx;
133: PetscDrawViewPorts *ports = NULL;
134: PetscViewerFormat format;
135: PetscInt *displayfields;
136: PetscInt ndisplayfields,i,nbounds;
137: const PetscReal *bounds;
140: zctx.showgrid = PETSC_FALSE;
141: zctx.showaxis = PETSC_TRUE;
143: PetscViewerDrawGetDraw(viewer,0,&draw);
144: PetscDrawIsNull(draw,&isnull);
145: if (isnull) return(0);
147: PetscViewerDrawGetBounds(viewer,&nbounds,&bounds);
149: VecGetDM(xin,&da);
150: if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA");
152: PetscObjectGetComm((PetscObject)xin,&comm);
153: MPI_Comm_rank(comm,&zctx.rank);
155: DMDAGetInfo(da,0,&M,&N,0,&zctx.m,&zctx.n,0,&w,&s,&bx,&by,0,&st);
156: DMDAGetOwnershipRanges(da,&lx,&ly,NULL);
158: /*
159: Obtain a sequential vector that is going to contain the local values plus ONE layer of
160: ghosted values to draw the graphics from. We also need its corresponding DMDA (dac) that will
161: update the local values pluse ONE layer of ghost values.
162: */
163: PetscObjectQuery((PetscObject)da,"GraphicsGhosted",(PetscObject*)&xlocal);
164: if (!xlocal) {
165: if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) {
166: /*
167: if original da is not of stencil width one, or periodic or not a box stencil then
168: create a special DMDA to handle one level of ghost points for graphics
169: */
170: DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,M,N,zctx.m,zctx.n,w,1,lx,ly,&dac);
171: PetscInfo(da,"Creating auxilary DMDA for managing graphics ghost points\n");
172: } else {
173: /* otherwise we can use the da we already have */
174: dac = da;
175: }
176: /* create local vector for holding ghosted values used in graphics */
177: DMCreateLocalVector(dac,&xlocal);
178: if (dac != da) {
179: /* don't keep any public reference of this DMDA, is is only available through xlocal */
180: PetscObjectDereference((PetscObject)dac);
181: } else {
182: /* remove association between xlocal and da, because below we compose in the opposite
183: direction and if we left this connect we'd get a loop, so the objects could
184: never be destroyed */
185: PetscObjectRemoveReference((PetscObject)xlocal,"__PETSc_dm");
186: }
187: PetscObjectCompose((PetscObject)da,"GraphicsGhosted",(PetscObject)xlocal);
188: PetscObjectDereference((PetscObject)xlocal);
189: } else {
190: if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) {
191: VecGetDM(xlocal, &dac);
192: } else {
193: dac = da;
194: }
195: }
197: /*
198: Get local (ghosted) values of vector
199: */
200: DMGlobalToLocalBegin(dac,xin,INSERT_VALUES,xlocal);
201: DMGlobalToLocalEnd(dac,xin,INSERT_VALUES,xlocal);
202: VecGetArrayRead(xlocal,&zctx.v);
204: /*
205: Get coordinates of nodes
206: */
207: DMGetCoordinates(da,&xcoor);
208: if (!xcoor) {
209: DMDASetUniformCoordinates(da,0.0,1.0,0.0,1.0,0.0,0.0);
210: DMGetCoordinates(da,&xcoor);
211: }
213: /*
214: Determine the min and max coordinates in plot
215: */
216: VecStrideMin(xcoor,0,NULL,&zctx.xmin);
217: VecStrideMax(xcoor,0,NULL,&zctx.xmax);
218: VecStrideMin(xcoor,1,NULL,&zctx.ymin);
219: VecStrideMax(xcoor,1,NULL,&zctx.ymax);
220: PetscOptionsGetBool(NULL,NULL,"-draw_contour_axis",&zctx.showaxis,NULL);
221: if (zctx.showaxis) {
222: coors[0] = zctx.xmin - .05*(zctx.xmax - zctx.xmin); coors[1] = zctx.ymin - .05*(zctx.ymax - zctx.ymin);
223: coors[2] = zctx.xmax + .05*(zctx.xmax - zctx.xmin); coors[3] = zctx.ymax + .05*(zctx.ymax - zctx.ymin);
224: } else {
225: coors[0] = zctx.xmin; coors[1] = zctx.ymin; coors[2] = zctx.xmax; coors[3] = zctx.ymax;
226: }
227: PetscOptionsGetRealArray(NULL,NULL,"-draw_coordinates",coors,&ncoors,NULL);
228: PetscInfo4(da,"Preparing DMDA 2d contour plot coordinates %g %g %g %g\n",(double)coors[0],(double)coors[1],(double)coors[2],(double)coors[3]);
230: /*
231: Get local ghosted version of coordinates
232: */
233: PetscObjectQuery((PetscObject)da,"GraphicsCoordinateGhosted",(PetscObject*)&xcoorl);
234: if (!xcoorl) {
235: /* create DMDA to get local version of graphics */
236: DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,M,N,zctx.m,zctx.n,2,1,lx,ly,&dag);
237: PetscInfo(dag,"Creating auxilary DMDA for managing graphics coordinates ghost points\n");
238: DMCreateLocalVector(dag,&xcoorl);
239: PetscObjectCompose((PetscObject)da,"GraphicsCoordinateGhosted",(PetscObject)xcoorl);
240: PetscObjectDereference((PetscObject)dag);
241: PetscObjectDereference((PetscObject)xcoorl);
242: } else {
243: VecGetDM(xcoorl,&dag);
244: }
245: DMGlobalToLocalBegin(dag,xcoor,INSERT_VALUES,xcoorl);
246: DMGlobalToLocalEnd(dag,xcoor,INSERT_VALUES,xcoorl);
247: VecGetArrayRead(xcoorl,&zctx.xy);
248: DMDAGetCoordinateName(da,0,&zctx.name0);
249: DMDAGetCoordinateName(da,1,&zctx.name1);
251: /*
252: Get information about size of area each processor must do graphics for
253: */
254: DMDAGetInfo(dac,NULL,&M,&N,NULL,NULL,NULL,NULL,&zctx.dof,NULL,&bx,&by,NULL,NULL);
255: DMDAGetGhostCorners(dac,NULL,NULL,NULL,&zctx.m,&zctx.n,NULL);
256: PetscOptionsGetBool(NULL,NULL,"-draw_contour_grid",&zctx.showgrid,NULL);
258: DMDASelectFields(da,&ndisplayfields,&displayfields);
259: PetscViewerGetFormat(viewer,&format);
260: PetscOptionsGetBool(NULL,NULL,"-draw_ports",&useports,NULL);
261: if (format == PETSC_VIEWER_DRAW_PORTS) useports = PETSC_TRUE;
262: if (useports) {
263: PetscViewerDrawGetDraw(viewer,0,&draw);
264: PetscDrawCheckResizedWindow(draw);
265: PetscDrawClear(draw);
266: PetscDrawViewPortsCreate(draw,ndisplayfields,&ports);
267: }
269: /*
270: Loop over each field; drawing each in a different window
271: */
272: for (i=0; i<ndisplayfields; i++) {
273: zctx.k = displayfields[i];
275: /* determine the min and max value in plot */
276: VecStrideMin(xin,zctx.k,NULL,&zctx.min);
277: VecStrideMax(xin,zctx.k,NULL,&zctx.max);
278: if (zctx.k < nbounds) {
279: zctx.min = bounds[2*zctx.k];
280: zctx.max = bounds[2*zctx.k+1];
281: }
282: if (zctx.min == zctx.max) {
283: zctx.min -= 1.e-12;
284: zctx.max += 1.e-12;
285: }
286: PetscInfo2(da,"DMDA 2d contour plot min %g max %g\n",(double)zctx.min,(double)zctx.max);
288: if (useports) {
289: PetscDrawViewPortsSet(ports,i);
290: } else {
291: const char *title;
292: PetscViewerDrawGetDraw(viewer,i,&draw);
293: DMDAGetFieldName(da,zctx.k,&title);
294: if (title) {PetscDrawSetTitle(draw,title);}
295: }
297: PetscDrawGetPopup(draw,&popup);
298: PetscDrawScalePopup(popup,zctx.min,zctx.max);
299: PetscDrawSetCoordinates(draw,coors[0],coors[1],coors[2],coors[3]);
300: PetscDrawZoom(draw,VecView_MPI_Draw_DA2d_Zoom,&zctx);
301: if (!useports) {PetscDrawSave(draw);}
302: }
303: if (useports) {
304: PetscViewerDrawGetDraw(viewer,0,&draw);
305: PetscDrawSave(draw);
306: }
308: PetscDrawViewPortsDestroy(ports);
309: PetscFree(displayfields);
310: VecRestoreArrayRead(xcoorl,&zctx.xy);
311: VecRestoreArrayRead(xlocal,&zctx.v);
312: return(0);
313: }
315: #if defined(PETSC_HAVE_HDF5)
318: static PetscErrorCode VecGetHDF5ChunkSize(DM_DA *da, Vec xin, PetscInt dimension, PetscInt timestep, hsize_t *chunkDims)
319: {
320: PetscMPIInt comm_size;
322: hsize_t chunk_size, target_size, dim;
323: hsize_t vec_size = sizeof(PetscScalar)*da->M*da->N*da->P*da->w;
324: hsize_t avg_local_vec_size,KiB = 1024,MiB = KiB*KiB,GiB = MiB*KiB,min_size = MiB;
325: hsize_t max_chunks = 64*KiB; /* HDF5 internal limitation */
326: hsize_t max_chunk_size = 4*GiB; /* HDF5 internal limitation */
327: PetscInt zslices=da->p, yslices=da->n, xslices=da->m;
330: MPI_Comm_size(PetscObjectComm((PetscObject)xin), &comm_size);
331: avg_local_vec_size = (hsize_t) ceil(vec_size*1.0/comm_size); /* we will attempt to use this as the chunk size */
333: target_size = (hsize_t) ceil(PetscMin(vec_size,PetscMin(max_chunk_size,PetscMax(avg_local_vec_size,PetscMax(ceil(vec_size*1.0/max_chunks),min_size)))));
334: /* following line uses sizeof(PetscReal) instead of sizeof(PetscScalar) because the last dimension of chunkDims[] captures the 2* when complex numbers are being used */
335: chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(PetscReal);
337: /*
338: if size/rank > max_chunk_size, we need radical measures: even going down to
339: avg_local_vec_size is not enough, so we simply use chunk size of 4 GiB no matter
340: what, composed in the most efficient way possible.
341: N.B. this minimises the number of chunks, which may or may not be the optimal
342: solution. In a BG, for example, the optimal solution is probably to make # chunks = #
343: IO nodes involved, but this author has no access to a BG to figure out how to
344: reliably find the right number. And even then it may or may not be enough.
345: */
346: if (avg_local_vec_size > max_chunk_size) {
347: /* check if we can just split local z-axis: is that enough? */
348: zslices = (PetscInt)ceil(vec_size*1.0/(da->p*max_chunk_size))*zslices;
349: if (zslices > da->P) {
350: /* lattice is too large in xy-directions, splitting z only is not enough */
351: zslices = da->P;
352: yslices= (PetscInt)ceil(vec_size*1.0/(zslices*da->n*max_chunk_size))*yslices;
353: if (yslices > da->N) {
354: /* lattice is too large in x-direction, splitting along z, y is not enough */
355: yslices = da->N;
356: xslices= (PetscInt)ceil(vec_size*1.0/(zslices*yslices*da->m*max_chunk_size))*xslices;
357: }
358: }
359: dim = 0;
360: if (timestep >= 0) {
361: ++dim;
362: }
363: /* prefer to split z-axis, even down to planar slices */
364: if (dimension == 3) {
365: chunkDims[dim++] = (hsize_t) da->P/zslices;
366: chunkDims[dim++] = (hsize_t) da->N/yslices;
367: chunkDims[dim++] = (hsize_t) da->M/xslices;
368: } else {
369: /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */
370: chunkDims[dim++] = (hsize_t) da->N/yslices;
371: chunkDims[dim++] = (hsize_t) da->M/xslices;
372: }
373: chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(double);
374: } else {
375: if (target_size < chunk_size) {
376: /* only change the defaults if target_size < chunk_size */
377: dim = 0;
378: if (timestep >= 0) {
379: ++dim;
380: }
381: /* prefer to split z-axis, even down to planar slices */
382: if (dimension == 3) {
383: /* try splitting the z-axis to core-size bits, i.e. divide chunk size by # comm_size in z-direction */
384: if (target_size >= chunk_size/da->p) {
385: /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */
386: chunkDims[dim] = (hsize_t) ceil(da->P*1.0/da->p);
387: } else {
388: /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be
389: radical and let everyone write all they've got */
390: chunkDims[dim++] = (hsize_t) ceil(da->P*1.0/da->p);
391: chunkDims[dim++] = (hsize_t) ceil(da->N*1.0/da->n);
392: chunkDims[dim++] = (hsize_t) ceil(da->M*1.0/da->m);
393: }
394: } else {
395: /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */
396: if (target_size >= chunk_size/da->n) {
397: /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */
398: chunkDims[dim] = (hsize_t) ceil(da->N*1.0/da->n);
399: } else {
400: /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be
401: radical and let everyone write all they've got */
402: chunkDims[dim++] = (hsize_t) ceil(da->N*1.0/da->n);
403: chunkDims[dim++] = (hsize_t) ceil(da->M*1.0/da->m);
404: }
406: }
407: chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(double);
408: } else {
409: /* precomputed chunks are fine, we don't need to do anything */
410: }
411: }
412: return(0);
413: }
414: #endif
416: #if defined(PETSC_HAVE_HDF5)
419: PetscErrorCode VecView_MPI_HDF5_DA(Vec xin,PetscViewer viewer)
420: {
421: DM dm;
422: DM_DA *da;
423: hid_t filespace; /* file dataspace identifier */
424: hid_t chunkspace; /* chunk dataset property identifier */
425: hid_t plist_id; /* property list identifier */
426: hid_t dset_id; /* dataset identifier */
427: hid_t memspace; /* memory dataspace identifier */
428: hid_t file_id;
429: hid_t group;
430: hid_t memscalartype; /* scalar type for mem (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
431: hid_t filescalartype; /* scalar type for file (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
432: hsize_t dim;
433: hsize_t maxDims[6]={0}, dims[6]={0}, chunkDims[6]={0}, count[6]={0}, offset[6]={0}; /* we depend on these being sane later on */
434: PetscInt timestep, dimension;
435: const PetscScalar *x;
436: const char *vecname;
437: PetscErrorCode ierr;
438: PetscBool dim2;
439: PetscBool spoutput;
442: PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
443: PetscViewerHDF5GetTimestep(viewer, ×tep);
444: PetscViewerHDF5GetBaseDimension2(viewer,&dim2);
445: PetscViewerHDF5GetSPOutput(viewer,&spoutput);
447: VecGetDM(xin,&dm);
448: if (!dm) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA");
449: da = (DM_DA*)dm->data;
450: DMGetDimension(dm, &dimension);
452: /* Create the dataspace for the dataset.
453: *
454: * dims - holds the current dimensions of the dataset
455: *
456: * maxDims - holds the maximum dimensions of the dataset (unlimited
457: * for the number of time steps with the current dimensions for the
458: * other dimensions; so only additional time steps can be added).
459: *
460: * chunkDims - holds the size of a single time step (required to
461: * permit extending dataset).
462: */
463: dim = 0;
464: if (timestep >= 0) {
465: dims[dim] = timestep+1;
466: maxDims[dim] = H5S_UNLIMITED;
467: chunkDims[dim] = 1;
468: ++dim;
469: }
470: if (dimension == 3) {
471: PetscHDF5IntCast(da->P,dims+dim);
472: maxDims[dim] = dims[dim];
473: chunkDims[dim] = dims[dim];
474: ++dim;
475: }
476: if (dimension > 1) {
477: PetscHDF5IntCast(da->N,dims+dim);
478: maxDims[dim] = dims[dim];
479: chunkDims[dim] = dims[dim];
480: ++dim;
481: }
482: PetscHDF5IntCast(da->M,dims+dim);
483: maxDims[dim] = dims[dim];
484: chunkDims[dim] = dims[dim];
485: ++dim;
486: if (da->w > 1 || dim2) {
487: PetscHDF5IntCast(da->w,dims+dim);
488: maxDims[dim] = dims[dim];
489: chunkDims[dim] = dims[dim];
490: ++dim;
491: }
492: #if defined(PETSC_USE_COMPLEX)
493: dims[dim] = 2;
494: maxDims[dim] = dims[dim];
495: chunkDims[dim] = dims[dim];
496: ++dim;
497: #endif
499: VecGetHDF5ChunkSize(da, xin, dimension, timestep, chunkDims);
501: PetscStackCallHDF5Return(filespace,H5Screate_simple,(dim, dims, maxDims));
503: #if defined(PETSC_USE_REAL_SINGLE)
504: memscalartype = H5T_NATIVE_FLOAT;
505: filescalartype = H5T_NATIVE_FLOAT;
506: #elif defined(PETSC_USE_REAL___FLOAT128)
507: #error "HDF5 output with 128 bit floats not supported."
508: #else
509: memscalartype = H5T_NATIVE_DOUBLE;
510: if (spoutput == PETSC_TRUE) filescalartype = H5T_NATIVE_FLOAT;
511: else filescalartype = H5T_NATIVE_DOUBLE;
512: #endif
514: /* Create the dataset with default properties and close filespace */
515: PetscObjectGetName((PetscObject)xin,&vecname);
516: if (!H5Lexists(group, vecname, H5P_DEFAULT)) {
517: /* Create chunk */
518: PetscStackCallHDF5Return(chunkspace,H5Pcreate,(H5P_DATASET_CREATE));
519: PetscStackCallHDF5(H5Pset_chunk,(chunkspace, dim, chunkDims));
521: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
522: PetscStackCallHDF5Return(dset_id,H5Dcreate2,(group, vecname, filescalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT));
523: #else
524: PetscStackCallHDF5Return(dset_id,H5Dcreate,(group, vecname, filescalartype, filespace, H5P_DEFAULT));
525: #endif
526: } else {
527: PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT));
528: PetscStackCallHDF5(H5Dset_extent,(dset_id, dims));
529: }
530: PetscStackCallHDF5(H5Sclose,(filespace));
532: /* Each process defines a dataset and writes it to the hyperslab in the file */
533: dim = 0;
534: if (timestep >= 0) {
535: offset[dim] = timestep;
536: ++dim;
537: }
538: if (dimension == 3) {PetscHDF5IntCast(da->zs,offset + dim++);}
539: if (dimension > 1) {PetscHDF5IntCast(da->ys,offset + dim++);}
540: PetscHDF5IntCast(da->xs/da->w,offset + dim++);
541: if (da->w > 1 || dim2) offset[dim++] = 0;
542: #if defined(PETSC_USE_COMPLEX)
543: offset[dim++] = 0;
544: #endif
545: dim = 0;
546: if (timestep >= 0) {
547: count[dim] = 1;
548: ++dim;
549: }
550: if (dimension == 3) {PetscHDF5IntCast(da->ze - da->zs,count + dim++);}
551: if (dimension > 1) {PetscHDF5IntCast(da->ye - da->ys,count + dim++);}
552: PetscHDF5IntCast((da->xe - da->xs)/da->w,count + dim++);
553: if (da->w > 1 || dim2) {PetscHDF5IntCast(da->w,count + dim++);}
554: #if defined(PETSC_USE_COMPLEX)
555: count[dim++] = 2;
556: #endif
557: PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL));
558: PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
559: PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL));
561: /* Create property list for collective dataset write */
562: PetscStackCallHDF5Return(plist_id,H5Pcreate,(H5P_DATASET_XFER));
563: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
564: PetscStackCallHDF5(H5Pset_dxpl_mpio,(plist_id, H5FD_MPIO_COLLECTIVE));
565: #endif
566: /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */
568: VecGetArrayRead(xin, &x);
569: PetscStackCallHDF5(H5Dwrite,(dset_id, memscalartype, memspace, filespace, plist_id, x));
570: PetscStackCallHDF5(H5Fflush,(file_id, H5F_SCOPE_GLOBAL));
571: VecRestoreArrayRead(xin, &x);
573: /* Close/release resources */
574: if (group != file_id) {
575: PetscStackCallHDF5(H5Gclose,(group));
576: }
577: PetscStackCallHDF5(H5Pclose,(plist_id));
578: PetscStackCallHDF5(H5Sclose,(filespace));
579: PetscStackCallHDF5(H5Sclose,(memspace));
580: PetscStackCallHDF5(H5Dclose,(dset_id));
581: PetscInfo1(xin,"Wrote Vec object with name %s\n",vecname);
582: return(0);
583: }
584: #endif
586: extern PetscErrorCode VecView_MPI_Draw_DA1d(Vec,PetscViewer);
588: #if defined(PETSC_HAVE_MPIIO)
591: static PetscErrorCode DMDAArrayMPIIO(DM da,PetscViewer viewer,Vec xin,PetscBool write)
592: {
593: PetscErrorCode ierr;
594: MPI_File mfdes;
595: PetscMPIInt gsizes[4],lsizes[4],lstarts[4],asiz,dof;
596: MPI_Datatype view;
597: const PetscScalar *array;
598: MPI_Offset off;
599: MPI_Aint ub,ul;
600: PetscInt type,rows,vecrows,tr[2];
601: DM_DA *dd = (DM_DA*)da->data;
602: PetscBool skipheader;
605: VecGetSize(xin,&vecrows);
606: PetscViewerBinaryGetSkipHeader(viewer,&skipheader);
607: if (!write) {
608: /* Read vector header. */
609: if (!skipheader) {
610: PetscViewerBinaryRead(viewer,tr,2,NULL,PETSC_INT);
611: type = tr[0];
612: rows = tr[1];
613: if (type != VEC_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONG,"Not vector next in file");
614: if (rows != vecrows) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Vector in file not same size as DMDA vector");
615: }
616: } else {
617: tr[0] = VEC_FILE_CLASSID;
618: tr[1] = vecrows;
619: if (!skipheader) {
620: PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT,PETSC_TRUE);
621: }
622: }
624: PetscMPIIntCast(dd->w,&dof);
625: gsizes[0] = dof;
626: PetscMPIIntCast(dd->M,gsizes+1);
627: PetscMPIIntCast(dd->N,gsizes+2);
628: PetscMPIIntCast(dd->P,gsizes+3);
629: lsizes[0] = dof;
630: PetscMPIIntCast((dd->xe-dd->xs)/dof,lsizes+1);
631: PetscMPIIntCast(dd->ye-dd->ys,lsizes+2);
632: PetscMPIIntCast(dd->ze-dd->zs,lsizes+3);
633: lstarts[0] = 0;
634: PetscMPIIntCast(dd->xs/dof,lstarts+1);
635: PetscMPIIntCast(dd->ys,lstarts+2);
636: PetscMPIIntCast(dd->zs,lstarts+3);
637: MPI_Type_create_subarray(da->dim+1,gsizes,lsizes,lstarts,MPI_ORDER_FORTRAN,MPIU_SCALAR,&view);
638: MPI_Type_commit(&view);
640: PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
641: PetscViewerBinaryGetMPIIOOffset(viewer,&off);
642: MPI_File_set_view(mfdes,off,MPIU_SCALAR,view,(char*)"native",MPI_INFO_NULL);
643: VecGetArrayRead(xin,&array);
644: asiz = lsizes[1]*(lsizes[2] > 0 ? lsizes[2] : 1)*(lsizes[3] > 0 ? lsizes[3] : 1)*dof;
645: if (write) {
646: MPIU_File_write_all(mfdes,(PetscScalar*)array,asiz,MPIU_SCALAR,MPI_STATUS_IGNORE);
647: } else {
648: MPIU_File_read_all(mfdes,(PetscScalar*)array,asiz,MPIU_SCALAR,MPI_STATUS_IGNORE);
649: }
650: MPI_Type_get_extent(view,&ul,&ub);
651: PetscViewerBinaryAddMPIIOOffset(viewer,ub);
652: VecRestoreArrayRead(xin,&array);
653: MPI_Type_free(&view);
654: return(0);
655: }
656: #endif
660: PetscErrorCode VecView_MPI_DA(Vec xin,PetscViewer viewer)
661: {
662: DM da;
663: PetscErrorCode ierr;
664: PetscInt dim;
665: Vec natural;
666: PetscBool isdraw,isvtk;
667: #if defined(PETSC_HAVE_HDF5)
668: PetscBool ishdf5;
669: #endif
670: const char *prefix,*name;
671: PetscViewerFormat format;
674: VecGetDM(xin,&da);
675: if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA");
676: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
677: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk);
678: #if defined(PETSC_HAVE_HDF5)
679: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
680: #endif
681: if (isdraw) {
682: DMDAGetInfo(da,&dim,0,0,0,0,0,0,0,0,0,0,0,0);
683: if (dim == 1) {
684: VecView_MPI_Draw_DA1d(xin,viewer);
685: } else if (dim == 2) {
686: VecView_MPI_Draw_DA2d(xin,viewer);
687: } else SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot graphically view vector associated with this dimensional DMDA %D",dim);
688: } else if (isvtk) { /* Duplicate the Vec and hold a reference to the DM */
689: Vec Y;
690: PetscObjectReference((PetscObject)da);
691: VecDuplicate(xin,&Y);
692: if (((PetscObject)xin)->name) {
693: /* If xin was named, copy the name over to Y. The duplicate names are safe because nobody else will ever see Y. */
694: PetscObjectSetName((PetscObject)Y,((PetscObject)xin)->name);
695: }
696: VecCopy(xin,Y);
697: PetscViewerVTKAddField(viewer,(PetscObject)da,DMDAVTKWriteAll,PETSC_VTK_POINT_FIELD,(PetscObject)Y);
698: #if defined(PETSC_HAVE_HDF5)
699: } else if (ishdf5) {
700: VecView_MPI_HDF5_DA(xin,viewer);
701: #endif
702: } else {
703: #if defined(PETSC_HAVE_MPIIO)
704: PetscBool isbinary,isMPIIO;
706: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
707: if (isbinary) {
708: PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO);
709: if (isMPIIO) {
710: DMDAArrayMPIIO(da,viewer,xin,PETSC_TRUE);
711: return(0);
712: }
713: }
714: #endif
716: /* call viewer on natural ordering */
717: PetscObjectGetOptionsPrefix((PetscObject)xin,&prefix);
718: DMDACreateNaturalVector(da,&natural);
719: PetscObjectSetOptionsPrefix((PetscObject)natural,prefix);
720: DMDAGlobalToNaturalBegin(da,xin,INSERT_VALUES,natural);
721: DMDAGlobalToNaturalEnd(da,xin,INSERT_VALUES,natural);
722: PetscObjectGetName((PetscObject)xin,&name);
723: PetscObjectSetName((PetscObject)natural,name);
725: PetscViewerGetFormat(viewer,&format);
726: if (format == PETSC_VIEWER_BINARY_MATLAB) {
727: /* temporarily remove viewer format so it won't trigger in the VecView() */
728: PetscViewerPushFormat(viewer,PETSC_VIEWER_DEFAULT);
729: }
731: VecView(natural,viewer);
733: if (format == PETSC_VIEWER_BINARY_MATLAB) {
734: MPI_Comm comm;
735: FILE *info;
736: const char *fieldname;
737: char fieldbuf[256];
738: PetscInt dim,ni,nj,nk,pi,pj,pk,dof,n;
740: /* set the viewer format back into the viewer */
741: PetscViewerPopFormat(viewer);
742: PetscObjectGetComm((PetscObject)viewer,&comm);
743: PetscViewerBinaryGetInfoPointer(viewer,&info);
744: DMDAGetInfo(da,&dim,&ni,&nj,&nk,&pi,&pj,&pk,&dof,0,0,0,0,0);
745: PetscFPrintf(comm,info,"#--- begin code written by PetscViewerBinary for MATLAB format ---#\n");
746: PetscFPrintf(comm,info,"#$$ tmp = PetscBinaryRead(fd); \n");
747: if (dim == 1) { PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d);\n",dof,ni); }
748: if (dim == 2) { PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d,%d);\n",dof,ni,nj); }
749: if (dim == 3) { PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d,%d,%d);\n",dof,ni,nj,nk); }
751: for (n=0; n<dof; n++) {
752: DMDAGetFieldName(da,n,&fieldname);
753: if (!fieldname || !fieldname[0]) {
754: PetscSNPrintf(fieldbuf,sizeof fieldbuf,"field%D",n);
755: fieldname = fieldbuf;
756: }
757: if (dim == 1) { PetscFPrintf(comm,info,"#$$ Set.%s.%s = squeeze(tmp(%d,:))';\n",name,fieldname,n+1); }
758: if (dim == 2) { PetscFPrintf(comm,info,"#$$ Set.%s.%s = squeeze(tmp(%d,:,:))';\n",name,fieldname,n+1); }
759: if (dim == 3) { PetscFPrintf(comm,info,"#$$ Set.%s.%s = permute(squeeze(tmp(%d,:,:,:)),[2 1 3]);\n",name,fieldname,n+1);}
760: }
761: PetscFPrintf(comm,info,"#$$ clear tmp; \n");
762: PetscFPrintf(comm,info,"#--- end code written by PetscViewerBinary for MATLAB format ---#\n\n");
763: }
765: VecDestroy(&natural);
766: }
767: return(0);
768: }
770: #if defined(PETSC_HAVE_HDF5)
773: PetscErrorCode VecLoad_HDF5_DA(Vec xin, PetscViewer viewer)
774: {
775: DM da;
777: hsize_t dim,rdim;
778: hsize_t dims[6]={0},count[6]={0},offset[6]={0};
779: PetscInt dimension,timestep,dofInd;
780: PetscScalar *x;
781: const char *vecname;
782: hid_t filespace; /* file dataspace identifier */
783: hid_t plist_id; /* property list identifier */
784: hid_t dset_id; /* dataset identifier */
785: hid_t memspace; /* memory dataspace identifier */
786: hid_t file_id,group;
787: hid_t scalartype; /* scalar type (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
788: DM_DA *dd;
789: PetscBool dim2 = PETSC_FALSE;
792: #if defined(PETSC_USE_REAL_SINGLE)
793: scalartype = H5T_NATIVE_FLOAT;
794: #elif defined(PETSC_USE_REAL___FLOAT128)
795: #error "HDF5 output with 128 bit floats not supported."
796: #else
797: scalartype = H5T_NATIVE_DOUBLE;
798: #endif
800: PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
801: PetscViewerHDF5GetTimestep(viewer, ×tep);
802: PetscObjectGetName((PetscObject)xin,&vecname);
803: VecGetDM(xin,&da);
804: dd = (DM_DA*)da->data;
805: DMGetDimension(da, &dimension);
807: /* Open dataset */
808: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
809: PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT));
810: #else
811: PetscStackCallHDF5Return(dset_id,H5Dopen,(group, vecname));
812: #endif
814: /* Retrieve the dataspace for the dataset */
815: PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
816: PetscStackCallHDF5Return(rdim,H5Sget_simple_extent_dims,(filespace, dims, NULL));
818: /* Expected dimension for holding the dof's */
819: #if defined(PETSC_USE_COMPLEX)
820: dofInd = rdim-2;
821: #else
822: dofInd = rdim-1;
823: #endif
825: /* The expected number of dimensions, assuming basedimension2 = false */
826: dim = dimension;
827: if (dd->w > 1) ++dim;
828: if (timestep >= 0) ++dim;
829: #if defined(PETSC_USE_COMPLEX)
830: ++dim;
831: #endif
833: /* In this case the input dataset have one extra, unexpected dimension. */
834: if (rdim == dim+1) {
835: /* In this case the block size unity */
836: if (dd->w == 1 && dims[dofInd] == 1) dim2 = PETSC_TRUE;
838: /* Special error message for the case where dof does not match the input file */
839: else if (dd->w != (PetscInt) dims[dofInd]) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Number of dofs in file is %D, not %D as expected",(PetscInt)dims[dofInd],dd->w);
841: /* Other cases where rdim != dim cannot be handled currently */
842: } else if (rdim != dim) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file is %d, not %d as expected with dof = %D",rdim,dim,dd->w);
844: /* Set up the hyperslab size */
845: dim = 0;
846: if (timestep >= 0) {
847: offset[dim] = timestep;
848: count[dim] = 1;
849: ++dim;
850: }
851: if (dimension == 3) {
852: PetscHDF5IntCast(dd->zs,offset + dim);
853: PetscHDF5IntCast(dd->ze - dd->zs,count + dim);
854: ++dim;
855: }
856: if (dimension > 1) {
857: PetscHDF5IntCast(dd->ys,offset + dim);
858: PetscHDF5IntCast(dd->ye - dd->ys,count + dim);
859: ++dim;
860: }
861: PetscHDF5IntCast(dd->xs/dd->w,offset + dim);
862: PetscHDF5IntCast((dd->xe - dd->xs)/dd->w,count + dim);
863: ++dim;
864: if (dd->w > 1 || dim2) {
865: offset[dim] = 0;
866: PetscHDF5IntCast(dd->w,count + dim);
867: ++dim;
868: }
869: #if defined(PETSC_USE_COMPLEX)
870: offset[dim] = 0;
871: count[dim] = 2;
872: ++dim;
873: #endif
875: /* Create the memory and filespace */
876: PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL));
877: PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL));
879: /* Create property list for collective dataset write */
880: PetscStackCallHDF5Return(plist_id,H5Pcreate,(H5P_DATASET_XFER));
881: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
882: PetscStackCallHDF5(H5Pset_dxpl_mpio,(plist_id, H5FD_MPIO_COLLECTIVE));
883: #endif
884: /* To read dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */
886: VecGetArray(xin, &x);
887: PetscStackCallHDF5(H5Dread,(dset_id, scalartype, memspace, filespace, plist_id, x));
888: VecRestoreArray(xin, &x);
890: /* Close/release resources */
891: if (group != file_id) {
892: PetscStackCallHDF5(H5Gclose,(group));
893: }
894: PetscStackCallHDF5(H5Pclose,(plist_id));
895: PetscStackCallHDF5(H5Sclose,(filespace));
896: PetscStackCallHDF5(H5Sclose,(memspace));
897: PetscStackCallHDF5(H5Dclose,(dset_id));
898: return(0);
899: }
900: #endif
904: PetscErrorCode VecLoad_Binary_DA(Vec xin, PetscViewer viewer)
905: {
906: DM da;
908: Vec natural;
909: const char *prefix;
910: PetscInt bs;
911: PetscBool flag;
912: DM_DA *dd;
913: #if defined(PETSC_HAVE_MPIIO)
914: PetscBool isMPIIO;
915: #endif
918: VecGetDM(xin,&da);
919: dd = (DM_DA*)da->data;
920: #if defined(PETSC_HAVE_MPIIO)
921: PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO);
922: if (isMPIIO) {
923: DMDAArrayMPIIO(da,viewer,xin,PETSC_FALSE);
924: return(0);
925: }
926: #endif
928: PetscObjectGetOptionsPrefix((PetscObject)xin,&prefix);
929: DMDACreateNaturalVector(da,&natural);
930: PetscObjectSetName((PetscObject)natural,((PetscObject)xin)->name);
931: PetscObjectSetOptionsPrefix((PetscObject)natural,prefix);
932: VecLoad(natural,viewer);
933: DMDANaturalToGlobalBegin(da,natural,INSERT_VALUES,xin);
934: DMDANaturalToGlobalEnd(da,natural,INSERT_VALUES,xin);
935: VecDestroy(&natural);
936: PetscInfo(xin,"Loading vector from natural ordering into DMDA\n");
937: PetscOptionsGetInt(NULL,((PetscObject)xin)->prefix,"-vecload_block_size",&bs,&flag);
938: if (flag && bs != dd->w) {
939: PetscInfo2(xin,"Block size in file %D not equal to DMDA's dof %D\n",bs,dd->w);
940: }
941: return(0);
942: }
946: PetscErrorCode VecLoad_Default_DA(Vec xin, PetscViewer viewer)
947: {
949: DM da;
950: PetscBool isbinary;
951: #if defined(PETSC_HAVE_HDF5)
952: PetscBool ishdf5;
953: #endif
956: VecGetDM(xin,&da);
957: if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA");
959: #if defined(PETSC_HAVE_HDF5)
960: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
961: #endif
962: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
964: if (isbinary) {
965: VecLoad_Binary_DA(xin,viewer);
966: #if defined(PETSC_HAVE_HDF5)
967: } else if (ishdf5) {
968: VecLoad_HDF5_DA(xin,viewer);
969: #endif
970: } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for vector loading", ((PetscObject)viewer)->type_name);
971: return(0);
972: }