Actual source code: gr2.c
petsc-3.14.6 2021-03-30
2: /*
3: Plots vectors obtained with DMDACreate2d()
4: */
6: #include <petsc/private/dmdaimpl.h>
7: #include <petsc/private/glvisvecimpl.h>
8: #include <petsc/private/viewerimpl.h>
9: #include <petsc/private/viewerhdf5impl.h>
10: #include <petscdraw.h>
12: /*
13: The data that is passed into the graphics callback
14: */
15: typedef struct {
16: PetscMPIInt rank;
17: PetscInt m,n,dof,k;
18: PetscReal xmin,xmax,ymin,ymax,min,max;
19: const PetscScalar *xy,*v;
20: PetscBool showaxis,showgrid;
21: const char *name0,*name1;
22: } ZoomCtx;
24: /*
25: This does the drawing for one particular field
26: in one particular set of coordinates. It is a callback
27: called from PetscDrawZoom()
28: */
29: PetscErrorCode VecView_MPI_Draw_DA2d_Zoom(PetscDraw draw,void *ctx)
30: {
31: ZoomCtx *zctx = (ZoomCtx*)ctx;
32: PetscErrorCode ierr;
33: PetscInt m,n,i,j,k,dof,id,c1,c2,c3,c4;
34: PetscReal min,max,x1,x2,x3,x4,y_1,y2,y3,y4;
35: const PetscScalar *xy,*v;
38: m = zctx->m;
39: n = zctx->n;
40: dof = zctx->dof;
41: k = zctx->k;
42: xy = zctx->xy;
43: v = zctx->v;
44: min = zctx->min;
45: max = zctx->max;
47: /* PetscDraw the contour plot patch */
48: PetscDrawCollectiveBegin(draw);
49: for (j=0; j<n-1; j++) {
50: for (i=0; i<m-1; i++) {
51: id = i+j*m;
52: x1 = PetscRealPart(xy[2*id]);
53: y_1 = PetscRealPart(xy[2*id+1]);
54: c1 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max);
56: id = i+j*m+1;
57: x2 = PetscRealPart(xy[2*id]);
58: y2 = PetscRealPart(xy[2*id+1]);
59: c2 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max);
61: id = i+j*m+1+m;
62: x3 = PetscRealPart(xy[2*id]);
63: y3 = PetscRealPart(xy[2*id+1]);
64: c3 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max);
66: id = i+j*m+m;
67: x4 = PetscRealPart(xy[2*id]);
68: y4 = PetscRealPart(xy[2*id+1]);
69: c4 = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max);
71: PetscDrawTriangle(draw,x1,y_1,x2,y2,x3,y3,c1,c2,c3);
72: PetscDrawTriangle(draw,x1,y_1,x3,y3,x4,y4,c1,c3,c4);
73: if (zctx->showgrid) {
74: PetscDrawLine(draw,x1,y_1,x2,y2,PETSC_DRAW_BLACK);
75: PetscDrawLine(draw,x2,y2,x3,y3,PETSC_DRAW_BLACK);
76: PetscDrawLine(draw,x3,y3,x4,y4,PETSC_DRAW_BLACK);
77: PetscDrawLine(draw,x4,y4,x1,y_1,PETSC_DRAW_BLACK);
78: }
79: }
80: }
81: if (zctx->showaxis && !zctx->rank) {
82: if (zctx->name0 || zctx->name1) {
83: PetscReal xl,yl,xr,yr,x,y;
84: PetscDrawGetCoordinates(draw,&xl,&yl,&xr,&yr);
85: x = xl + .30*(xr - xl);
86: xl = xl + .01*(xr - xl);
87: y = yr - .30*(yr - yl);
88: yl = yl + .01*(yr - yl);
89: if (zctx->name0) {PetscDrawString(draw,x,yl,PETSC_DRAW_BLACK,zctx->name0);}
90: if (zctx->name1) {PetscDrawStringVertical(draw,xl,y,PETSC_DRAW_BLACK,zctx->name1);}
91: }
92: /*
93: Ideally we would use the PetscDrawAxis object to manage displaying the coordinate limits
94: but that may require some refactoring.
95: */
96: {
97: double xmin = (double)zctx->xmin, ymin = (double)zctx->ymin;
98: double xmax = (double)zctx->xmax, ymax = (double)zctx->ymax;
99: char value[16]; size_t len; PetscReal w;
100: PetscSNPrintf(value,16,"%0.2e",xmin);
101: PetscDrawString(draw,xmin,ymin - .05*(ymax - ymin),PETSC_DRAW_BLACK,value);
102: PetscSNPrintf(value,16,"%0.2e",xmax);
103: PetscStrlen(value,&len);
104: PetscDrawStringGetSize(draw,&w,NULL);
105: PetscDrawString(draw,xmax - len*w,ymin - .05*(ymax - ymin),PETSC_DRAW_BLACK,value);
106: PetscSNPrintf(value,16,"%0.2e",ymin);
107: PetscDrawString(draw,xmin - .05*(xmax - xmin),ymin,PETSC_DRAW_BLACK,value);
108: PetscSNPrintf(value,16,"%0.2e",ymax);
109: PetscDrawString(draw,xmin - .05*(xmax - xmin),ymax,PETSC_DRAW_BLACK,value);
110: }
111: }
112: PetscDrawCollectiveEnd(draw);
113: return(0);
114: }
116: PetscErrorCode VecView_MPI_Draw_DA2d(Vec xin,PetscViewer viewer)
117: {
118: DM da,dac,dag;
119: PetscErrorCode ierr;
120: PetscInt N,s,M,w,ncoors = 4;
121: const PetscInt *lx,*ly;
122: PetscReal coors[4];
123: PetscDraw draw,popup;
124: PetscBool isnull,useports = PETSC_FALSE;
125: MPI_Comm comm;
126: Vec xlocal,xcoor,xcoorl;
127: DMBoundaryType bx,by;
128: DMDAStencilType st;
129: ZoomCtx zctx;
130: PetscDrawViewPorts *ports = NULL;
131: PetscViewerFormat format;
132: PetscInt *displayfields;
133: PetscInt ndisplayfields,i,nbounds;
134: const PetscReal *bounds;
137: zctx.showgrid = PETSC_FALSE;
138: zctx.showaxis = PETSC_TRUE;
140: PetscViewerDrawGetDraw(viewer,0,&draw);
141: PetscDrawIsNull(draw,&isnull);
142: if (isnull) return(0);
144: PetscViewerDrawGetBounds(viewer,&nbounds,&bounds);
146: VecGetDM(xin,&da);
147: if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA");
149: PetscObjectGetComm((PetscObject)xin,&comm);
150: MPI_Comm_rank(comm,&zctx.rank);
152: DMDAGetInfo(da,NULL,&M,&N,NULL,&zctx.m,&zctx.n,NULL,&w,&s,&bx,&by,NULL,&st);
153: DMDAGetOwnershipRanges(da,&lx,&ly,NULL);
155: /*
156: Obtain a sequential vector that is going to contain the local values plus ONE layer of
157: ghosted values to draw the graphics from. We also need its corresponding DMDA (dac) that will
158: update the local values pluse ONE layer of ghost values.
159: */
160: PetscObjectQuery((PetscObject)da,"GraphicsGhosted",(PetscObject*)&xlocal);
161: if (!xlocal) {
162: if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) {
163: /*
164: if original da is not of stencil width one, or periodic or not a box stencil then
165: create a special DMDA to handle one level of ghost points for graphics
166: */
167: DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,M,N,zctx.m,zctx.n,w,1,lx,ly,&dac);
168: DMSetUp(dac);
169: PetscInfo(da,"Creating auxilary DMDA for managing graphics ghost points\n");
170: } else {
171: /* otherwise we can use the da we already have */
172: dac = da;
173: }
174: /* create local vector for holding ghosted values used in graphics */
175: DMCreateLocalVector(dac,&xlocal);
176: if (dac != da) {
177: /* don't keep any public reference of this DMDA, is is only available through xlocal */
178: PetscObjectDereference((PetscObject)dac);
179: } else {
180: /* remove association between xlocal and da, because below we compose in the opposite
181: direction and if we left this connect we'd get a loop, so the objects could
182: never be destroyed */
183: PetscObjectRemoveReference((PetscObject)xlocal,"__PETSc_dm");
184: }
185: PetscObjectCompose((PetscObject)da,"GraphicsGhosted",(PetscObject)xlocal);
186: PetscObjectDereference((PetscObject)xlocal);
187: } else {
188: if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) {
189: VecGetDM(xlocal, &dac);
190: } else {
191: dac = da;
192: }
193: }
195: /*
196: Get local (ghosted) values of vector
197: */
198: DMGlobalToLocalBegin(dac,xin,INSERT_VALUES,xlocal);
199: DMGlobalToLocalEnd(dac,xin,INSERT_VALUES,xlocal);
200: VecGetArrayRead(xlocal,&zctx.v);
202: /*
203: Get coordinates of nodes
204: */
205: DMGetCoordinates(da,&xcoor);
206: if (!xcoor) {
207: DMDASetUniformCoordinates(da,0.0,1.0,0.0,1.0,0.0,0.0);
208: DMGetCoordinates(da,&xcoor);
209: }
211: /*
212: Determine the min and max coordinates in plot
213: */
214: VecStrideMin(xcoor,0,NULL,&zctx.xmin);
215: VecStrideMax(xcoor,0,NULL,&zctx.xmax);
216: VecStrideMin(xcoor,1,NULL,&zctx.ymin);
217: VecStrideMax(xcoor,1,NULL,&zctx.ymax);
218: PetscOptionsGetBool(NULL,NULL,"-draw_contour_axis",&zctx.showaxis,NULL);
219: if (zctx.showaxis) {
220: coors[0] = zctx.xmin - .05*(zctx.xmax - zctx.xmin); coors[1] = zctx.ymin - .05*(zctx.ymax - zctx.ymin);
221: coors[2] = zctx.xmax + .05*(zctx.xmax - zctx.xmin); coors[3] = zctx.ymax + .05*(zctx.ymax - zctx.ymin);
222: } else {
223: coors[0] = zctx.xmin; coors[1] = zctx.ymin; coors[2] = zctx.xmax; coors[3] = zctx.ymax;
224: }
225: PetscOptionsGetRealArray(NULL,NULL,"-draw_coordinates",coors,&ncoors,NULL);
226: PetscInfo4(da,"Preparing DMDA 2d contour plot coordinates %g %g %g %g\n",(double)coors[0],(double)coors[1],(double)coors[2],(double)coors[3]);
228: /*
229: Get local ghosted version of coordinates
230: */
231: PetscObjectQuery((PetscObject)da,"GraphicsCoordinateGhosted",(PetscObject*)&xcoorl);
232: if (!xcoorl) {
233: /* create DMDA to get local version of graphics */
234: DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,M,N,zctx.m,zctx.n,2,1,lx,ly,&dag);
235: DMSetUp(dag);
236: PetscInfo(dag,"Creating auxilary DMDA for managing graphics coordinates ghost points\n");
237: DMCreateLocalVector(dag,&xcoorl);
238: PetscObjectCompose((PetscObject)da,"GraphicsCoordinateGhosted",(PetscObject)xcoorl);
239: PetscObjectDereference((PetscObject)dag);
240: PetscObjectDereference((PetscObject)xcoorl);
241: } else {
242: VecGetDM(xcoorl,&dag);
243: }
244: DMGlobalToLocalBegin(dag,xcoor,INSERT_VALUES,xcoorl);
245: DMGlobalToLocalEnd(dag,xcoor,INSERT_VALUES,xcoorl);
246: VecGetArrayRead(xcoorl,&zctx.xy);
247: DMDAGetCoordinateName(da,0,&zctx.name0);
248: DMDAGetCoordinateName(da,1,&zctx.name1);
250: /*
251: Get information about size of area each processor must do graphics for
252: */
253: DMDAGetInfo(dac,NULL,&M,&N,NULL,NULL,NULL,NULL,&zctx.dof,NULL,&bx,&by,NULL,NULL);
254: DMDAGetGhostCorners(dac,NULL,NULL,NULL,&zctx.m,&zctx.n,NULL);
255: PetscOptionsGetBool(NULL,NULL,"-draw_contour_grid",&zctx.showgrid,NULL);
257: DMDASelectFields(da,&ndisplayfields,&displayfields);
258: PetscViewerGetFormat(viewer,&format);
259: PetscOptionsGetBool(NULL,NULL,"-draw_ports",&useports,NULL);
260: if (format == PETSC_VIEWER_DRAW_PORTS) useports = PETSC_TRUE;
261: if (useports) {
262: PetscViewerDrawGetDraw(viewer,0,&draw);
263: PetscDrawCheckResizedWindow(draw);
264: PetscDrawClear(draw);
265: PetscDrawViewPortsCreate(draw,ndisplayfields,&ports);
266: }
268: /*
269: Loop over each field; drawing each in a different window
270: */
271: for (i=0; i<ndisplayfields; i++) {
272: zctx.k = displayfields[i];
274: /* determine the min and max value in plot */
275: VecStrideMin(xin,zctx.k,NULL,&zctx.min);
276: VecStrideMax(xin,zctx.k,NULL,&zctx.max);
277: if (zctx.k < nbounds) {
278: zctx.min = bounds[2*zctx.k];
279: zctx.max = bounds[2*zctx.k+1];
280: }
281: if (zctx.min == zctx.max) {
282: zctx.min -= 1.e-12;
283: zctx.max += 1.e-12;
284: }
285: PetscInfo2(da,"DMDA 2d contour plot min %g max %g\n",(double)zctx.min,(double)zctx.max);
287: if (useports) {
288: PetscDrawViewPortsSet(ports,i);
289: } else {
290: const char *title;
291: PetscViewerDrawGetDraw(viewer,i,&draw);
292: DMDAGetFieldName(da,zctx.k,&title);
293: if (title) {PetscDrawSetTitle(draw,title);}
294: }
296: PetscDrawGetPopup(draw,&popup);
297: PetscDrawScalePopup(popup,zctx.min,zctx.max);
298: PetscDrawSetCoordinates(draw,coors[0],coors[1],coors[2],coors[3]);
299: PetscDrawZoom(draw,VecView_MPI_Draw_DA2d_Zoom,&zctx);
300: if (!useports) {PetscDrawSave(draw);}
301: }
302: if (useports) {
303: PetscViewerDrawGetDraw(viewer,0,&draw);
304: PetscDrawSave(draw);
305: }
307: PetscDrawViewPortsDestroy(ports);
308: PetscFree(displayfields);
309: VecRestoreArrayRead(xcoorl,&zctx.xy);
310: VecRestoreArrayRead(xlocal,&zctx.v);
311: return(0);
312: }
314: #if defined(PETSC_HAVE_HDF5)
315: static PetscErrorCode VecGetHDF5ChunkSize(DM_DA *da, Vec xin, PetscInt dimension, PetscInt timestep, hsize_t *chunkDims)
316: {
317: PetscMPIInt comm_size;
319: hsize_t chunk_size, target_size, dim;
320: hsize_t vec_size = sizeof(PetscScalar)*da->M*da->N*da->P*da->w;
321: hsize_t avg_local_vec_size,KiB = 1024,MiB = KiB*KiB,GiB = MiB*KiB,min_size = MiB;
322: hsize_t max_chunks = 64*KiB; /* HDF5 internal limitation */
323: hsize_t max_chunk_size = 4*GiB; /* HDF5 internal limitation */
324: PetscInt zslices=da->p, yslices=da->n, xslices=da->m;
327: MPI_Comm_size(PetscObjectComm((PetscObject)xin), &comm_size);
328: avg_local_vec_size = (hsize_t) ceil(vec_size*1.0/comm_size); /* we will attempt to use this as the chunk size */
330: target_size = (hsize_t) ceil(PetscMin(vec_size,PetscMin(max_chunk_size,PetscMax(avg_local_vec_size,PetscMax(ceil(vec_size*1.0/max_chunks),min_size)))));
331: /* following line uses sizeof(PetscReal) instead of sizeof(PetscScalar) because the last dimension of chunkDims[] captures the 2* when complex numbers are being used */
332: chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(PetscReal);
334: /*
335: if size/rank > max_chunk_size, we need radical measures: even going down to
336: avg_local_vec_size is not enough, so we simply use chunk size of 4 GiB no matter
337: what, composed in the most efficient way possible.
338: N.B. this minimises the number of chunks, which may or may not be the optimal
339: solution. In a BG, for example, the optimal solution is probably to make # chunks = #
340: IO nodes involved, but this author has no access to a BG to figure out how to
341: reliably find the right number. And even then it may or may not be enough.
342: */
343: if (avg_local_vec_size > max_chunk_size) {
344: /* check if we can just split local z-axis: is that enough? */
345: zslices = (PetscInt)ceil(vec_size*1.0/(da->p*max_chunk_size))*zslices;
346: if (zslices > da->P) {
347: /* lattice is too large in xy-directions, splitting z only is not enough */
348: zslices = da->P;
349: yslices= (PetscInt)ceil(vec_size*1.0/(zslices*da->n*max_chunk_size))*yslices;
350: if (yslices > da->N) {
351: /* lattice is too large in x-direction, splitting along z, y is not enough */
352: yslices = da->N;
353: xslices= (PetscInt)ceil(vec_size*1.0/(zslices*yslices*da->m*max_chunk_size))*xslices;
354: }
355: }
356: dim = 0;
357: if (timestep >= 0) {
358: ++dim;
359: }
360: /* prefer to split z-axis, even down to planar slices */
361: if (dimension == 3) {
362: chunkDims[dim++] = (hsize_t) da->P/zslices;
363: chunkDims[dim++] = (hsize_t) da->N/yslices;
364: chunkDims[dim++] = (hsize_t) da->M/xslices;
365: } else {
366: /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */
367: chunkDims[dim++] = (hsize_t) da->N/yslices;
368: chunkDims[dim++] = (hsize_t) da->M/xslices;
369: }
370: chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(double);
371: } else {
372: if (target_size < chunk_size) {
373: /* only change the defaults if target_size < chunk_size */
374: dim = 0;
375: if (timestep >= 0) {
376: ++dim;
377: }
378: /* prefer to split z-axis, even down to planar slices */
379: if (dimension == 3) {
380: /* try splitting the z-axis to core-size bits, i.e. divide chunk size by # comm_size in z-direction */
381: if (target_size >= chunk_size/da->p) {
382: /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */
383: chunkDims[dim] = (hsize_t) ceil(da->P*1.0/da->p);
384: } else {
385: /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be
386: radical and let everyone write all they've got */
387: chunkDims[dim++] = (hsize_t) ceil(da->P*1.0/da->p);
388: chunkDims[dim++] = (hsize_t) ceil(da->N*1.0/da->n);
389: chunkDims[dim++] = (hsize_t) ceil(da->M*1.0/da->m);
390: }
391: } else {
392: /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */
393: if (target_size >= chunk_size/da->n) {
394: /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */
395: chunkDims[dim] = (hsize_t) ceil(da->N*1.0/da->n);
396: } else {
397: /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be
398: radical and let everyone write all they've got */
399: chunkDims[dim++] = (hsize_t) ceil(da->N*1.0/da->n);
400: chunkDims[dim++] = (hsize_t) ceil(da->M*1.0/da->m);
401: }
403: }
404: chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(double);
405: } else {
406: /* precomputed chunks are fine, we don't need to do anything */
407: }
408: }
409: return(0);
410: }
411: #endif
413: #if defined(PETSC_HAVE_HDF5)
414: PetscErrorCode VecView_MPI_HDF5_DA(Vec xin,PetscViewer viewer)
415: {
416: PetscViewer_HDF5 *hdf5 = (PetscViewer_HDF5*) viewer->data;
417: DM dm;
418: DM_DA *da;
419: hid_t filespace; /* file dataspace identifier */
420: hid_t chunkspace; /* chunk dataset property identifier */
421: hid_t dset_id; /* dataset identifier */
422: hid_t memspace; /* memory dataspace identifier */
423: hid_t file_id;
424: hid_t group;
425: hid_t memscalartype; /* scalar type for mem (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
426: hid_t filescalartype; /* scalar type for file (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
427: hsize_t dim;
428: hsize_t maxDims[6]={0}, dims[6]={0}, chunkDims[6]={0}, count[6]={0}, offset[6]={0}; /* we depend on these being sane later on */
429: PetscInt timestep, dimension;
430: const PetscScalar *x;
431: const char *vecname;
432: PetscErrorCode ierr;
433: PetscBool dim2;
434: PetscBool spoutput;
437: PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
438: PetscViewerHDF5GetTimestep(viewer, ×tep);
439: PetscViewerHDF5GetBaseDimension2(viewer,&dim2);
440: PetscViewerHDF5GetSPOutput(viewer,&spoutput);
442: VecGetDM(xin,&dm);
443: if (!dm) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA");
444: da = (DM_DA*)dm->data;
445: DMGetDimension(dm, &dimension);
447: /* Create the dataspace for the dataset.
448: *
449: * dims - holds the current dimensions of the dataset
450: *
451: * maxDims - holds the maximum dimensions of the dataset (unlimited
452: * for the number of time steps with the current dimensions for the
453: * other dimensions; so only additional time steps can be added).
454: *
455: * chunkDims - holds the size of a single time step (required to
456: * permit extending dataset).
457: */
458: dim = 0;
459: if (timestep >= 0) {
460: dims[dim] = timestep+1;
461: maxDims[dim] = H5S_UNLIMITED;
462: chunkDims[dim] = 1;
463: ++dim;
464: }
465: if (dimension == 3) {
466: PetscHDF5IntCast(da->P,dims+dim);
467: maxDims[dim] = dims[dim];
468: chunkDims[dim] = dims[dim];
469: ++dim;
470: }
471: if (dimension > 1) {
472: PetscHDF5IntCast(da->N,dims+dim);
473: maxDims[dim] = dims[dim];
474: chunkDims[dim] = dims[dim];
475: ++dim;
476: }
477: PetscHDF5IntCast(da->M,dims+dim);
478: maxDims[dim] = dims[dim];
479: chunkDims[dim] = dims[dim];
480: ++dim;
481: if (da->w > 1 || dim2) {
482: PetscHDF5IntCast(da->w,dims+dim);
483: maxDims[dim] = dims[dim];
484: chunkDims[dim] = dims[dim];
485: ++dim;
486: }
487: #if defined(PETSC_USE_COMPLEX)
488: dims[dim] = 2;
489: maxDims[dim] = dims[dim];
490: chunkDims[dim] = dims[dim];
491: ++dim;
492: #endif
494: VecGetHDF5ChunkSize(da, xin, dimension, timestep, chunkDims);
496: PetscStackCallHDF5Return(filespace,H5Screate_simple,(dim, dims, maxDims));
498: #if defined(PETSC_USE_REAL_SINGLE)
499: memscalartype = H5T_NATIVE_FLOAT;
500: filescalartype = H5T_NATIVE_FLOAT;
501: #elif defined(PETSC_USE_REAL___FLOAT128)
502: #error "HDF5 output with 128 bit floats not supported."
503: #elif defined(PETSC_USE_REAL___FP16)
504: #error "HDF5 output with 16 bit floats not supported."
505: #else
506: memscalartype = H5T_NATIVE_DOUBLE;
507: if (spoutput == PETSC_TRUE) filescalartype = H5T_NATIVE_FLOAT;
508: else filescalartype = H5T_NATIVE_DOUBLE;
509: #endif
511: /* Create the dataset with default properties and close filespace */
512: PetscObjectGetName((PetscObject)xin,&vecname);
513: if (!H5Lexists(group, vecname, H5P_DEFAULT)) {
514: /* Create chunk */
515: PetscStackCallHDF5Return(chunkspace,H5Pcreate,(H5P_DATASET_CREATE));
516: PetscStackCallHDF5(H5Pset_chunk,(chunkspace, dim, chunkDims));
518: PetscStackCallHDF5Return(dset_id,H5Dcreate2,(group, vecname, filescalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT));
519: } else {
520: PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT));
521: PetscStackCallHDF5(H5Dset_extent,(dset_id, dims));
522: }
523: PetscStackCallHDF5(H5Sclose,(filespace));
525: /* Each process defines a dataset and writes it to the hyperslab in the file */
526: dim = 0;
527: if (timestep >= 0) {
528: offset[dim] = timestep;
529: ++dim;
530: }
531: if (dimension == 3) {PetscHDF5IntCast(da->zs,offset + dim++);}
532: if (dimension > 1) {PetscHDF5IntCast(da->ys,offset + dim++);}
533: PetscHDF5IntCast(da->xs/da->w,offset + dim++);
534: if (da->w > 1 || dim2) offset[dim++] = 0;
535: #if defined(PETSC_USE_COMPLEX)
536: offset[dim++] = 0;
537: #endif
538: dim = 0;
539: if (timestep >= 0) {
540: count[dim] = 1;
541: ++dim;
542: }
543: if (dimension == 3) {PetscHDF5IntCast(da->ze - da->zs,count + dim++);}
544: if (dimension > 1) {PetscHDF5IntCast(da->ye - da->ys,count + dim++);}
545: PetscHDF5IntCast((da->xe - da->xs)/da->w,count + dim++);
546: if (da->w > 1 || dim2) {PetscHDF5IntCast(da->w,count + dim++);}
547: #if defined(PETSC_USE_COMPLEX)
548: count[dim++] = 2;
549: #endif
550: PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL));
551: PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
552: PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL));
554: VecGetArrayRead(xin, &x);
555: PetscStackCallHDF5(H5Dwrite,(dset_id, memscalartype, memspace, filespace, hdf5->dxpl_id, x));
556: PetscStackCallHDF5(H5Fflush,(file_id, H5F_SCOPE_GLOBAL));
557: VecRestoreArrayRead(xin, &x);
559: #if defined(PETSC_USE_COMPLEX)
560: {
561: PetscBool tru = PETSC_TRUE;
562: PetscViewerHDF5WriteObjectAttribute(viewer,(PetscObject)xin,"complex",PETSC_BOOL,&tru);
563: }
564: #endif
566: /* Close/release resources */
567: if (group != file_id) {
568: PetscStackCallHDF5(H5Gclose,(group));
569: }
570: PetscStackCallHDF5(H5Sclose,(filespace));
571: PetscStackCallHDF5(H5Sclose,(memspace));
572: PetscStackCallHDF5(H5Dclose,(dset_id));
573: PetscInfo1(xin,"Wrote Vec object with name %s\n",vecname);
574: return(0);
575: }
576: #endif
578: extern PetscErrorCode VecView_MPI_Draw_DA1d(Vec,PetscViewer);
580: #if defined(PETSC_HAVE_MPIIO)
581: static PetscErrorCode DMDAArrayMPIIO(DM da,PetscViewer viewer,Vec xin,PetscBool write)
582: {
583: PetscErrorCode ierr;
584: MPI_File mfdes;
585: PetscMPIInt gsizes[4],lsizes[4],lstarts[4],asiz,dof;
586: MPI_Datatype view;
587: const PetscScalar *array;
588: MPI_Offset off;
589: MPI_Aint ub,ul;
590: PetscInt type,rows,vecrows,tr[2];
591: DM_DA *dd = (DM_DA*)da->data;
592: PetscBool skipheader;
595: VecGetSize(xin,&vecrows);
596: PetscViewerBinaryGetSkipHeader(viewer,&skipheader);
597: if (!write) {
598: /* Read vector header. */
599: if (!skipheader) {
600: PetscViewerBinaryRead(viewer,tr,2,NULL,PETSC_INT);
601: type = tr[0];
602: rows = tr[1];
603: if (type != VEC_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONG,"Not vector next in file");
604: if (rows != vecrows) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Vector in file not same size as DMDA vector");
605: }
606: } else {
607: tr[0] = VEC_FILE_CLASSID;
608: tr[1] = vecrows;
609: if (!skipheader) {
610: PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT);
611: }
612: }
614: PetscMPIIntCast(dd->w,&dof);
615: gsizes[0] = dof;
616: PetscMPIIntCast(dd->M,gsizes+1);
617: PetscMPIIntCast(dd->N,gsizes+2);
618: PetscMPIIntCast(dd->P,gsizes+3);
619: lsizes[0] = dof;
620: PetscMPIIntCast((dd->xe-dd->xs)/dof,lsizes+1);
621: PetscMPIIntCast(dd->ye-dd->ys,lsizes+2);
622: PetscMPIIntCast(dd->ze-dd->zs,lsizes+3);
623: lstarts[0] = 0;
624: PetscMPIIntCast(dd->xs/dof,lstarts+1);
625: PetscMPIIntCast(dd->ys,lstarts+2);
626: PetscMPIIntCast(dd->zs,lstarts+3);
627: MPI_Type_create_subarray(da->dim+1,gsizes,lsizes,lstarts,MPI_ORDER_FORTRAN,MPIU_SCALAR,&view);
628: MPI_Type_commit(&view);
630: PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
631: PetscViewerBinaryGetMPIIOOffset(viewer,&off);
632: MPI_File_set_view(mfdes,off,MPIU_SCALAR,view,(char*)"native",MPI_INFO_NULL);
633: VecGetArrayRead(xin,&array);
634: asiz = lsizes[1]*(lsizes[2] > 0 ? lsizes[2] : 1)*(lsizes[3] > 0 ? lsizes[3] : 1)*dof;
635: if (write) {
636: MPIU_File_write_all(mfdes,(PetscScalar*)array,asiz,MPIU_SCALAR,MPI_STATUS_IGNORE);
637: } else {
638: MPIU_File_read_all(mfdes,(PetscScalar*)array,asiz,MPIU_SCALAR,MPI_STATUS_IGNORE);
639: }
640: MPI_Type_get_extent(view,&ul,&ub);
641: PetscViewerBinaryAddMPIIOOffset(viewer,ub);
642: VecRestoreArrayRead(xin,&array);
643: MPI_Type_free(&view);
644: return(0);
645: }
646: #endif
648: PetscErrorCode VecView_MPI_DA(Vec xin,PetscViewer viewer)
649: {
650: DM da;
651: PetscErrorCode ierr;
652: PetscInt dim;
653: Vec natural;
654: PetscBool isdraw,isvtk,isglvis;
655: #if defined(PETSC_HAVE_HDF5)
656: PetscBool ishdf5;
657: #endif
658: const char *prefix,*name;
659: PetscViewerFormat format;
662: VecGetDM(xin,&da);
663: if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA");
664: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
665: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk);
666: #if defined(PETSC_HAVE_HDF5)
667: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
668: #endif
669: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERGLVIS,&isglvis);
670: if (isdraw) {
671: DMDAGetInfo(da,&dim,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
672: if (dim == 1) {
673: VecView_MPI_Draw_DA1d(xin,viewer);
674: } else if (dim == 2) {
675: VecView_MPI_Draw_DA2d(xin,viewer);
676: } else SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot graphically view vector associated with this dimensional DMDA %D",dim);
677: } else if (isvtk) { /* Duplicate the Vec */
678: Vec Y;
679: VecDuplicate(xin,&Y);
680: if (((PetscObject)xin)->name) {
681: /* If xin was named, copy the name over to Y. The duplicate names are safe because nobody else will ever see Y. */
682: PetscObjectSetName((PetscObject)Y,((PetscObject)xin)->name);
683: }
684: VecCopy(xin,Y);
685: {
686: PetscObject dmvtk;
687: PetscBool compatible,compatibleSet;
688: PetscViewerVTKGetDM(viewer,&dmvtk);
689: if (dmvtk) {
691: DMGetCompatibility(da,(DM)dmvtk,&compatible,&compatibleSet);
692: if (!compatibleSet || !compatible) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_INCOMP,"Cannot confirm compatibility of DMs associated with Vecs viewed in the same VTK file. Check that grids are the same.");
693: }
694: PetscViewerVTKAddField(viewer,(PetscObject)da,DMDAVTKWriteAll,PETSC_DEFAULT,PETSC_VTK_POINT_FIELD,PETSC_FALSE,(PetscObject)Y);
695: }
696: #if defined(PETSC_HAVE_HDF5)
697: } else if (ishdf5) {
698: VecView_MPI_HDF5_DA(xin,viewer);
699: #endif
700: } else if (isglvis) {
701: VecView_GLVis(xin,viewer);
702: } else {
703: #if defined(PETSC_HAVE_MPIIO)
704: PetscBool isbinary,isMPIIO;
706: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
707: if (isbinary) {
708: PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO);
709: if (isMPIIO) {
710: DMDAArrayMPIIO(da,viewer,xin,PETSC_TRUE);
711: return(0);
712: }
713: }
714: #endif
716: /* call viewer on natural ordering */
717: PetscObjectGetOptionsPrefix((PetscObject)xin,&prefix);
718: DMDACreateNaturalVector(da,&natural);
719: PetscObjectSetOptionsPrefix((PetscObject)natural,prefix);
720: DMDAGlobalToNaturalBegin(da,xin,INSERT_VALUES,natural);
721: DMDAGlobalToNaturalEnd(da,xin,INSERT_VALUES,natural);
722: PetscObjectGetName((PetscObject)xin,&name);
723: PetscObjectSetName((PetscObject)natural,name);
725: PetscViewerGetFormat(viewer,&format);
726: if (format == PETSC_VIEWER_BINARY_MATLAB) {
727: /* temporarily remove viewer format so it won't trigger in the VecView() */
728: PetscViewerPushFormat(viewer,PETSC_VIEWER_DEFAULT);
729: }
731: ((PetscObject)natural)->donotPetscObjectPrintClassNamePrefixType = PETSC_TRUE;
732: VecView(natural,viewer);
733: ((PetscObject)natural)->donotPetscObjectPrintClassNamePrefixType = PETSC_FALSE;
735: if (format == PETSC_VIEWER_BINARY_MATLAB) {
736: MPI_Comm comm;
737: FILE *info;
738: const char *fieldname;
739: char fieldbuf[256];
740: PetscInt dim,ni,nj,nk,pi,pj,pk,dof,n;
742: /* set the viewer format back into the viewer */
743: PetscViewerPopFormat(viewer);
744: PetscObjectGetComm((PetscObject)viewer,&comm);
745: PetscViewerBinaryGetInfoPointer(viewer,&info);
746: DMDAGetInfo(da,&dim,&ni,&nj,&nk,&pi,&pj,&pk,&dof,NULL,NULL,NULL,NULL,NULL);
747: PetscFPrintf(comm,info,"#--- begin code written by PetscViewerBinary for MATLAB format ---#\n");
748: PetscFPrintf(comm,info,"#$$ tmp = PetscBinaryRead(fd); \n");
749: if (dim == 1) { PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d);\n",dof,ni); }
750: if (dim == 2) { PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d,%d);\n",dof,ni,nj); }
751: if (dim == 3) { PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d,%d,%d);\n",dof,ni,nj,nk); }
753: for (n=0; n<dof; n++) {
754: DMDAGetFieldName(da,n,&fieldname);
755: if (!fieldname || !fieldname[0]) {
756: PetscSNPrintf(fieldbuf,sizeof fieldbuf,"field%D",n);
757: fieldname = fieldbuf;
758: }
759: if (dim == 1) { PetscFPrintf(comm,info,"#$$ Set.%s.%s = squeeze(tmp(%d,:))';\n",name,fieldname,n+1); }
760: if (dim == 2) { PetscFPrintf(comm,info,"#$$ Set.%s.%s = squeeze(tmp(%d,:,:))';\n",name,fieldname,n+1); }
761: if (dim == 3) { PetscFPrintf(comm,info,"#$$ Set.%s.%s = permute(squeeze(tmp(%d,:,:,:)),[2 1 3]);\n",name,fieldname,n+1);}
762: }
763: PetscFPrintf(comm,info,"#$$ clear tmp; \n");
764: PetscFPrintf(comm,info,"#--- end code written by PetscViewerBinary for MATLAB format ---#\n\n");
765: }
767: VecDestroy(&natural);
768: }
769: return(0);
770: }
772: #if defined(PETSC_HAVE_HDF5)
773: PetscErrorCode VecLoad_HDF5_DA(Vec xin, PetscViewer viewer)
774: {
775: PetscViewer_HDF5 *hdf5 = (PetscViewer_HDF5*) viewer->data;
776: DM da;
778: int dim,rdim;
779: hsize_t dims[6]={0},count[6]={0},offset[6]={0};
780: PetscInt dimension,timestep,dofInd;
781: PetscScalar *x;
782: const char *vecname;
783: hid_t filespace; /* file dataspace identifier */
784: hid_t dset_id; /* dataset identifier */
785: hid_t memspace; /* memory dataspace identifier */
786: hid_t file_id,group;
787: hid_t scalartype; /* scalar type (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
788: DM_DA *dd;
789: PetscBool dim2 = PETSC_FALSE;
792: #if defined(PETSC_USE_REAL_SINGLE)
793: scalartype = H5T_NATIVE_FLOAT;
794: #elif defined(PETSC_USE_REAL___FLOAT128)
795: #error "HDF5 output with 128 bit floats not supported."
796: #elif defined(PETSC_USE_REAL___FP16)
797: #error "HDF5 output with 16 bit floats not supported."
798: #else
799: scalartype = H5T_NATIVE_DOUBLE;
800: #endif
802: PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
803: PetscViewerHDF5GetTimestep(viewer, ×tep);
804: PetscObjectGetName((PetscObject)xin,&vecname);
805: VecGetDM(xin,&da);
806: dd = (DM_DA*)da->data;
807: DMGetDimension(da, &dimension);
809: /* Open dataset */
810: PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT));
812: /* Retrieve the dataspace for the dataset */
813: PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
814: PetscStackCallHDF5Return(rdim,H5Sget_simple_extent_dims,(filespace, dims, NULL));
816: /* Expected dimension for holding the dof's */
817: #if defined(PETSC_USE_COMPLEX)
818: dofInd = rdim-2;
819: #else
820: dofInd = rdim-1;
821: #endif
823: /* The expected number of dimensions, assuming basedimension2 = false */
824: dim = dimension;
825: if (dd->w > 1) ++dim;
826: if (timestep >= 0) ++dim;
827: #if defined(PETSC_USE_COMPLEX)
828: ++dim;
829: #endif
831: /* In this case the input dataset have one extra, unexpected dimension. */
832: if (rdim == dim+1) {
833: /* In this case the block size unity */
834: if (dd->w == 1 && dims[dofInd] == 1) dim2 = PETSC_TRUE;
836: /* Special error message for the case where dof does not match the input file */
837: else if (dd->w != (PetscInt) dims[dofInd]) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Number of dofs in file is %D, not %D as expected",(PetscInt)dims[dofInd],dd->w);
839: /* Other cases where rdim != dim cannot be handled currently */
840: } else if (rdim != dim) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file is %d, not %d as expected with dof = %D",rdim,dim,dd->w);
842: /* Set up the hyperslab size */
843: dim = 0;
844: if (timestep >= 0) {
845: offset[dim] = timestep;
846: count[dim] = 1;
847: ++dim;
848: }
849: if (dimension == 3) {
850: PetscHDF5IntCast(dd->zs,offset + dim);
851: PetscHDF5IntCast(dd->ze - dd->zs,count + dim);
852: ++dim;
853: }
854: if (dimension > 1) {
855: PetscHDF5IntCast(dd->ys,offset + dim);
856: PetscHDF5IntCast(dd->ye - dd->ys,count + dim);
857: ++dim;
858: }
859: PetscHDF5IntCast(dd->xs/dd->w,offset + dim);
860: PetscHDF5IntCast((dd->xe - dd->xs)/dd->w,count + dim);
861: ++dim;
862: if (dd->w > 1 || dim2) {
863: offset[dim] = 0;
864: PetscHDF5IntCast(dd->w,count + dim);
865: ++dim;
866: }
867: #if defined(PETSC_USE_COMPLEX)
868: offset[dim] = 0;
869: count[dim] = 2;
870: ++dim;
871: #endif
873: /* Create the memory and filespace */
874: PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL));
875: PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL));
877: VecGetArray(xin, &x);
878: PetscStackCallHDF5(H5Dread,(dset_id, scalartype, memspace, filespace, hdf5->dxpl_id, x));
879: VecRestoreArray(xin, &x);
881: /* Close/release resources */
882: if (group != file_id) {
883: PetscStackCallHDF5(H5Gclose,(group));
884: }
885: PetscStackCallHDF5(H5Sclose,(filespace));
886: PetscStackCallHDF5(H5Sclose,(memspace));
887: PetscStackCallHDF5(H5Dclose,(dset_id));
888: return(0);
889: }
890: #endif
892: PetscErrorCode VecLoad_Binary_DA(Vec xin, PetscViewer viewer)
893: {
894: DM da;
896: Vec natural;
897: const char *prefix;
898: PetscInt bs;
899: PetscBool flag;
900: DM_DA *dd;
901: #if defined(PETSC_HAVE_MPIIO)
902: PetscBool isMPIIO;
903: #endif
906: VecGetDM(xin,&da);
907: dd = (DM_DA*)da->data;
908: #if defined(PETSC_HAVE_MPIIO)
909: PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO);
910: if (isMPIIO) {
911: DMDAArrayMPIIO(da,viewer,xin,PETSC_FALSE);
912: return(0);
913: }
914: #endif
916: PetscObjectGetOptionsPrefix((PetscObject)xin,&prefix);
917: DMDACreateNaturalVector(da,&natural);
918: PetscObjectSetName((PetscObject)natural,((PetscObject)xin)->name);
919: PetscObjectSetOptionsPrefix((PetscObject)natural,prefix);
920: VecLoad(natural,viewer);
921: DMDANaturalToGlobalBegin(da,natural,INSERT_VALUES,xin);
922: DMDANaturalToGlobalEnd(da,natural,INSERT_VALUES,xin);
923: VecDestroy(&natural);
924: PetscInfo(xin,"Loading vector from natural ordering into DMDA\n");
925: PetscOptionsGetInt(NULL,((PetscObject)xin)->prefix,"-vecload_block_size",&bs,&flag);
926: if (flag && bs != dd->w) {
927: PetscInfo2(xin,"Block size in file %D not equal to DMDA's dof %D\n",bs,dd->w);
928: }
929: return(0);
930: }
932: PetscErrorCode VecLoad_Default_DA(Vec xin, PetscViewer viewer)
933: {
935: DM da;
936: PetscBool isbinary;
937: #if defined(PETSC_HAVE_HDF5)
938: PetscBool ishdf5;
939: #endif
942: VecGetDM(xin,&da);
943: if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA");
945: #if defined(PETSC_HAVE_HDF5)
946: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
947: #endif
948: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
950: if (isbinary) {
951: VecLoad_Binary_DA(xin,viewer);
952: #if defined(PETSC_HAVE_HDF5)
953: } else if (ishdf5) {
954: VecLoad_HDF5_DA(xin,viewer);
955: #endif
956: } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for vector loading", ((PetscObject)viewer)->type_name);
957: return(0);
958: }