Actual source code: grvtk.c
petsc-3.8.4 2018-03-24
1: #include <petsc/private/dmdaimpl.h>
2: #include <../src/sys/classes/viewer/impls/vtk/vtkvimpl.h>
4: static PetscErrorCode DMDAVTKWriteAll_VTS(DM da,PetscViewer viewer)
5: {
6: #if defined(PETSC_USE_REAL_SINGLE)
7: const char precision[] = "Float32";
8: #elif defined(PETSC_USE_REAL_DOUBLE)
9: const char precision[] = "Float64";
10: #else
11: const char precision[] = "UnknownPrecision";
12: #endif
13: MPI_Comm comm;
14: Vec Coords;
15: PetscViewer_VTK *vtk = (PetscViewer_VTK*)viewer->data;
16: PetscViewerVTKObjectLink link;
17: FILE *fp;
18: PetscMPIInt rank,size,tag;
19: DMDALocalInfo info;
20: PetscInt dim,mx,my,mz,cdim,bs,boffset,maxnnodes,i,j,k,f,r;
21: PetscInt rloc[6],(*grloc)[6] = NULL;
22: PetscScalar *array,*array2;
23: PetscReal gmin[3],gmax[3];
24: PetscErrorCode ierr;
27: PetscObjectGetComm((PetscObject)da,&comm);
28: #if defined(PETSC_USE_COMPLEX)
29: SETERRQ(comm,PETSC_ERR_SUP,"Complex values not supported");
30: #endif
31: MPI_Comm_size(comm,&size);
32: MPI_Comm_rank(comm,&rank);
33: DMDAGetInfo(da,&dim, &mx,&my,&mz, 0,0,0, &bs,0,0,0,0,0);
34: DMDAGetLocalInfo(da,&info);
35: DMDAGetBoundingBox(da,gmin,gmax);
36: DMGetCoordinates(da,&Coords);
37: if (Coords) {
38: PetscInt csize;
39: VecGetSize(Coords,&csize);
40: if (csize % (mx*my*mz)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch");
41: cdim = csize/(mx*my*mz);
42: if (cdim < dim || cdim > 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch");
43: } else {
44: cdim = dim;
45: }
47: PetscFOpen(comm,vtk->filename,"wb",&fp);
48: PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");
49: #if defined(PETSC_WORDS_BIGENDIAN)
50: PetscFPrintf(comm,fp,"<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"BigEndian\">\n");
51: #else
52: PetscFPrintf(comm,fp,"<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"LittleEndian\">\n");
53: #endif
54: PetscFPrintf(comm,fp," <StructuredGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);
56: if (!rank) {PetscMalloc1(size*6,&grloc);}
57: rloc[0] = info.xs;
58: rloc[1] = info.xm;
59: rloc[2] = info.ys;
60: rloc[3] = info.ym;
61: rloc[4] = info.zs;
62: rloc[5] = info.zm;
63: MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);
65: /* Write XML header */
66: maxnnodes = 0; /* Used for the temporary array size on rank 0 */
67: boffset = 0; /* Offset into binary file */
68: for (r=0; r<size; r++) {
69: PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
70: if (!rank) {
71: xs = grloc[r][0];
72: xm = grloc[r][1];
73: ys = grloc[r][2];
74: ym = grloc[r][3];
75: zs = grloc[r][4];
76: zm = grloc[r][5];
77: nnodes = xm*ym*zm;
78: }
79: maxnnodes = PetscMax(maxnnodes,nnodes);
80: #if 0
81: switch (dim) {
82: case 1:
83: PetscFPrintf(comm,fp," <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,0,0,0,0);
84: break;
85: case 2:
86: PetscFPrintf(comm,fp," <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm,ys+ym-1,xs,xs+xm-1,0,0);
87: break;
88: case 3:
89: PetscFPrintf(comm,fp," <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);
90: break;
91: default: SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"No support for dimension %D",dim);
92: }
93: #endif
94: PetscFPrintf(comm,fp," <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);
95: PetscFPrintf(comm,fp," <Points>\n");
96: PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"Position\" NumberOfComponents=\"3\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);
97: boffset += 3*nnodes*sizeof(PetscScalar) + sizeof(int);
98: PetscFPrintf(comm,fp," </Points>\n");
100: PetscFPrintf(comm,fp," <PointData Scalars=\"ScalarPointData\">\n");
101: for (link=vtk->link; link; link=link->next) {
102: Vec X = (Vec)link->vec;
103: const char *vecname = "";
104: if (((PetscObject)X)->name || link != vtk->link) { /* If the object is already named, use it. If it is past the first link, name it to disambiguate. */
105: PetscObjectGetName((PetscObject)X,&vecname);
106: }
107: for (i=0; i<bs; i++) {
108: char buf[256];
109: const char *fieldname;
110: DMDAGetFieldName(da,i,&fieldname);
111: if (!fieldname) {
112: PetscSNPrintf(buf,sizeof(buf),"Unnamed%D",i);
113: fieldname = buf;
114: }
115: PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"%s%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);
116: boffset += nnodes*sizeof(PetscScalar) + sizeof(int);
117: }
118: }
119: PetscFPrintf(comm,fp," </PointData>\n");
120: PetscFPrintf(comm,fp," </Piece>\n");
121: }
122: PetscFPrintf(comm,fp," </StructuredGrid>\n");
123: PetscFPrintf(comm,fp," <AppendedData encoding=\"raw\">\n");
124: PetscFPrintf(comm,fp,"_");
126: /* Now write the arrays. */
127: tag = ((PetscObject)viewer)->tag;
128: PetscMalloc2(maxnnodes*PetscMax(3,bs),&array,maxnnodes*3,&array2);
129: for (r=0; r<size; r++) {
130: MPI_Status status;
131: PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
132: if (!rank) {
133: xs = grloc[r][0];
134: xm = grloc[r][1];
135: ys = grloc[r][2];
136: ym = grloc[r][3];
137: zs = grloc[r][4];
138: zm = grloc[r][5];
139: nnodes = xm*ym*zm;
140: } else if (r == rank) {
141: nnodes = info.xm*info.ym*info.zm;
142: }
144: /* Write the coordinates */
145: if (Coords) {
146: const PetscScalar *coords;
147: VecGetArrayRead(Coords,&coords);
148: if (!rank) {
149: if (r) {
150: PetscMPIInt nn;
151: MPI_Recv(array,nnodes*cdim,MPIU_SCALAR,r,tag,comm,&status);
152: MPI_Get_count(&status,MPIU_SCALAR,&nn);
153: if (nn != nnodes*cdim) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch");
154: } else {
155: PetscMemcpy(array,coords,nnodes*cdim*sizeof(PetscScalar));
156: }
157: /* Transpose coordinates to VTK (C-style) ordering */
158: for (k=0; k<zm; k++) {
159: for (j=0; j<ym; j++) {
160: for (i=0; i<xm; i++) {
161: PetscInt Iloc = i+xm*(j+ym*k);
162: array2[Iloc*3+0] = array[Iloc*cdim + 0];
163: array2[Iloc*3+1] = cdim > 1 ? array[Iloc*cdim + 1] : 0.0;
164: array2[Iloc*3+2] = cdim > 2 ? array[Iloc*cdim + 2] : 0.0;
165: }
166: }
167: }
168: } else if (r == rank) {
169: MPI_Send((void*)coords,nnodes*cdim,MPIU_SCALAR,0,tag,comm);
170: }
171: VecRestoreArrayRead(Coords,&coords);
172: } else { /* Fabricate some coordinates using grid index */
173: for (k=0; k<zm; k++) {
174: for (j=0; j<ym; j++) {
175: for (i=0; i<xm; i++) {
176: PetscInt Iloc = i+xm*(j+ym*k);
177: array2[Iloc*3+0] = xs+i;
178: array2[Iloc*3+1] = ys+j;
179: array2[Iloc*3+2] = zs+k;
180: }
181: }
182: }
183: }
184: PetscViewerVTKFWrite(viewer,fp,array2,nnodes*3,PETSC_SCALAR);
186: /* Write each of the objects queued up for this file */
187: for (link=vtk->link; link; link=link->next) {
188: Vec X = (Vec)link->vec;
189: const PetscScalar *x;
191: VecGetArrayRead(X,&x);
192: if (!rank) {
193: if (r) {
194: PetscMPIInt nn;
195: MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);
196: MPI_Get_count(&status,MPIU_SCALAR,&nn);
197: if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r);
198: } else {
199: PetscMemcpy(array,x,nnodes*bs*sizeof(PetscScalar));
200: }
201: for (f=0; f<bs; f++) {
202: /* Extract and transpose the f'th field */
203: for (k=0; k<zm; k++) {
204: for (j=0; j<ym; j++) {
205: for (i=0; i<xm; i++) {
206: PetscInt Iloc = i+xm*(j+ym*k);
207: array2[Iloc] = array[Iloc*bs + f];
208: }
209: }
210: }
211: PetscViewerVTKFWrite(viewer,fp,array2,nnodes,PETSC_SCALAR);
212: }
213: } else if (r == rank) {
214: MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);
215: }
216: VecRestoreArrayRead(X,&x);
217: }
218: }
219: PetscFree2(array,array2);
220: PetscFree(grloc);
222: PetscFPrintf(comm,fp,"\n </AppendedData>\n");
223: PetscFPrintf(comm,fp,"</VTKFile>\n");
224: PetscFClose(comm,fp);
225: return(0);
226: }
229: static PetscErrorCode DMDAVTKWriteAll_VTR(DM da,PetscViewer viewer)
230: {
231: #if defined(PETSC_USE_REAL_SINGLE)
232: const char precision[] = "Float32";
233: #elif defined(PETSC_USE_REAL_DOUBLE)
234: const char precision[] = "Float64";
235: #else
236: const char precision[] = "UnknownPrecision";
237: #endif
238: MPI_Comm comm;
239: PetscViewer_VTK *vtk = (PetscViewer_VTK*)viewer->data;
240: PetscViewerVTKObjectLink link;
241: FILE *fp;
242: PetscMPIInt rank,size,tag;
243: DMDALocalInfo info;
244: PetscInt dim,mx,my,mz,bs,boffset,maxnnodes,i,j,k,f,r;
245: PetscInt rloc[6],(*grloc)[6] = NULL;
246: PetscScalar *array,*array2;
247: PetscReal gmin[3],gmax[3];
248: PetscErrorCode ierr;
251: PetscObjectGetComm((PetscObject)da,&comm);
252: #if defined(PETSC_USE_COMPLEX)
253: SETERRQ(comm,PETSC_ERR_SUP,"Complex values not supported");
254: #endif
255: MPI_Comm_size(comm,&size);
256: MPI_Comm_rank(comm,&rank);
257: DMDAGetInfo(da,&dim, &mx,&my,&mz, 0,0,0, &bs,0,0,0,0,0);
258: DMDAGetLocalInfo(da,&info);
259: DMDAGetBoundingBox(da,gmin,gmax);
260: PetscFOpen(comm,vtk->filename,"wb",&fp);
261: PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");
262: #if defined(PETSC_WORDS_BIGENDIAN)
263: PetscFPrintf(comm,fp,"<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"BigEndian\">\n");
264: #else
265: PetscFPrintf(comm,fp,"<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"LittleEndian\">\n");
266: #endif
267: PetscFPrintf(comm,fp," <RectilinearGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);
269: if (!rank) {PetscMalloc1(size*6,&grloc);}
270: rloc[0] = info.xs;
271: rloc[1] = info.xm;
272: rloc[2] = info.ys;
273: rloc[3] = info.ym;
274: rloc[4] = info.zs;
275: rloc[5] = info.zm;
276: MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);
278: /* Write XML header */
279: maxnnodes = 0; /* Used for the temporary array size on rank 0 */
280: boffset = 0; /* Offset into binary file */
281: for (r=0; r<size; r++) {
282: PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
283: if (!rank) {
284: xs = grloc[r][0];
285: xm = grloc[r][1];
286: ys = grloc[r][2];
287: ym = grloc[r][3];
288: zs = grloc[r][4];
289: zm = grloc[r][5];
290: nnodes = xm*ym*zm;
291: }
292: maxnnodes = PetscMax(maxnnodes,nnodes);
293: PetscFPrintf(comm,fp," <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);
294: PetscFPrintf(comm,fp," <Coordinates>\n");
295: PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"Xcoord\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);
296: boffset += xm*sizeof(PetscScalar) + sizeof(int);
297: PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"Ycoord\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);
298: boffset += ym*sizeof(PetscScalar) + sizeof(int);
299: PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"Zcoord\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);
300: boffset += zm*sizeof(PetscScalar) + sizeof(int);
301: PetscFPrintf(comm,fp," </Coordinates>\n");
302: PetscFPrintf(comm,fp," <PointData Scalars=\"ScalarPointData\">\n");
303: for (link=vtk->link; link; link=link->next) {
304: Vec X = (Vec)link->vec;
305: const char *vecname = "";
306: if (((PetscObject)X)->name || link != vtk->link) { /* If the object is already named, use it. If it is past the first link, name it to disambiguate. */
307: PetscObjectGetName((PetscObject)X,&vecname);
308: }
309: for (i=0; i<bs; i++) {
310: char buf[256];
311: const char *fieldname;
312: DMDAGetFieldName(da,i,&fieldname);
313: if (!fieldname) {
314: PetscSNPrintf(buf,sizeof(buf),"Unnamed%D",i);
315: fieldname = buf;
316: }
317: PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"%s%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);
318: boffset += nnodes*sizeof(PetscScalar) + sizeof(int);
319: }
320: }
321: PetscFPrintf(comm,fp," </PointData>\n");
322: PetscFPrintf(comm,fp," </Piece>\n");
323: }
324: PetscFPrintf(comm,fp," </RectilinearGrid>\n");
325: PetscFPrintf(comm,fp," <AppendedData encoding=\"raw\">\n");
326: PetscFPrintf(comm,fp,"_");
328: /* Now write the arrays. */
329: tag = ((PetscObject)viewer)->tag;
330: PetscMalloc2(PetscMax(maxnnodes*bs,info.xm+info.ym+info.zm),&array,PetscMax(maxnnodes*3,info.xm+info.ym+info.zm),&array2);
331: for (r=0; r<size; r++) {
332: MPI_Status status;
333: PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
334: if (!rank) {
335: xs = grloc[r][0];
336: xm = grloc[r][1];
337: ys = grloc[r][2];
338: ym = grloc[r][3];
339: zs = grloc[r][4];
340: zm = grloc[r][5];
341: nnodes = xm*ym*zm;
342: } else if (r == rank) {
343: nnodes = info.xm*info.ym*info.zm;
344: }
345: { /* Write the coordinates */
346: Vec Coords;
347: DMGetCoordinates(da,&Coords);
348: if (Coords) {
349: const PetscScalar *coords;
350: VecGetArrayRead(Coords,&coords);
351: if (!rank) {
352: if (r) {
353: PetscMPIInt nn;
354: MPI_Recv(array,xm+ym+zm,MPIU_SCALAR,r,tag,comm,&status);
355: MPI_Get_count(&status,MPIU_SCALAR,&nn);
356: if (nn != xm+ym+zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch");
357: } else {
358: /* x coordinates */
359: for (j=0, k=0, i=0; i<xm; i++) {
360: PetscInt Iloc = i+xm*(j+ym*k);
361: array[i] = coords[Iloc*dim + 0];
362: }
363: /* y coordinates */
364: for (i=0, k=0, j=0; j<ym; j++) {
365: PetscInt Iloc = i+xm*(j+ym*k);
366: array[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
367: }
368: /* z coordinates */
369: for (i=0, j=0, k=0; k<zm; k++) {
370: PetscInt Iloc = i+xm*(j+ym*k);
371: array[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
372: }
373: }
374: } else if (r == rank) {
375: xm = info.xm;
376: ym = info.ym;
377: zm = info.zm;
378: for (j=0, k=0, i=0; i<xm; i++) {
379: PetscInt Iloc = i+xm*(j+ym*k);
380: array2[i] = coords[Iloc*dim + 0];
381: }
382: for (i=0, k=0, j=0; j<ym; j++) {
383: PetscInt Iloc = i+xm*(j+ym*k);
384: array2[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
385: }
386: for (i=0, j=0, k=0; k<zm; k++) {
387: PetscInt Iloc = i+xm*(j+ym*k);
388: array2[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
389: }
390: MPI_Send((void*)array2,xm+ym+zm,MPIU_SCALAR,0,tag,comm);
391: }
392: VecRestoreArrayRead(Coords,&coords);
393: } else { /* Fabricate some coordinates using grid index */
394: for (i=0; i<xm; i++) {
395: array[i] = xs+i;
396: }
397: for (j=0; j<ym; j++) {
398: array[j+xm] = ys+j;
399: }
400: for (k=0; k<zm; k++) {
401: array[k+xm+ym] = zs+k;
402: }
403: }
404: if (!rank) {
405: PetscViewerVTKFWrite(viewer,fp,&(array[0]),xm,PETSC_SCALAR);
406: PetscViewerVTKFWrite(viewer,fp,&(array[xm]),ym,PETSC_SCALAR);
407: PetscViewerVTKFWrite(viewer,fp,&(array[xm+ym]),zm,PETSC_SCALAR);
408: }
409: }
411: /* Write each of the objects queued up for this file */
412: for (link=vtk->link; link; link=link->next) {
413: Vec X = (Vec)link->vec;
414: const PetscScalar *x;
416: VecGetArrayRead(X,&x);
417: if (!rank) {
418: if (r) {
419: PetscMPIInt nn;
420: MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);
421: MPI_Get_count(&status,MPIU_SCALAR,&nn);
422: if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r);
423: } else {
424: PetscMemcpy(array,x,nnodes*bs*sizeof(PetscScalar));
425: }
426: for (f=0; f<bs; f++) {
427: /* Extract and transpose the f'th field */
428: for (k=0; k<zm; k++) {
429: for (j=0; j<ym; j++) {
430: for (i=0; i<xm; i++) {
431: PetscInt Iloc = i+xm*(j+ym*k);
432: array2[Iloc] = array[Iloc*bs + f];
433: }
434: }
435: }
436: PetscViewerVTKFWrite(viewer,fp,array2,nnodes,PETSC_SCALAR);
437: }
438: } else if (r == rank) {
439: MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);
440: }
441: VecRestoreArrayRead(X,&x);
442: }
443: }
444: PetscFree2(array,array2);
445: PetscFree(grloc);
447: PetscFPrintf(comm,fp,"\n </AppendedData>\n");
448: PetscFPrintf(comm,fp,"</VTKFile>\n");
449: PetscFClose(comm,fp);
450: return(0);
451: }
453: /*@C
454: DMDAVTKWriteAll - Write a file containing all the fields that have been provided to the viewer
456: Collective
458: Input Arguments:
459: odm - DM specifying the grid layout, passed as a PetscObject
460: viewer - viewer of type VTK
462: Level: developer
464: Note:
465: This function is a callback used by the VTK viewer to actually write the file.
466: The reason for this odd model is that the VTK file format does not provide any way to write one field at a time.
467: Instead, metadata for the entire file needs to be available up-front before you can start writing the file.
469: .seealso: PETSCVIEWERVTK
470: @*/
471: PetscErrorCode DMDAVTKWriteAll(PetscObject odm,PetscViewer viewer)
472: {
473: DM dm = (DM)odm;
474: PetscBool isvtk;
480: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk);
481: if (!isvtk) SETERRQ1(PetscObjectComm((PetscObject)viewer),PETSC_ERR_ARG_INCOMP,"Cannot use viewer type %s",((PetscObject)viewer)->type_name);
482: switch (viewer->format) {
483: case PETSC_VIEWER_VTK_VTS:
484: DMDAVTKWriteAll_VTS(dm,viewer);
485: break;
486: case PETSC_VIEWER_VTK_VTR:
487: DMDAVTKWriteAll_VTR(dm,viewer);
488: break;
489: default: SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for format '%s'",PetscViewerFormats[viewer->format]);
490: }
491: return(0);
492: }