Actual source code: grvtk.c

petsc-3.9.4 2018-09-11
Report Typos and Errors
  1:  #include <petsc/private/dmdaimpl.h>
  2: /*
  3:    Note that the API for using PETSCVIEWERVTK is totally wrong since its use requires
  4:    including the private vtkvimpl.h file. The code should be refactored.
  5: */
  6:  #include <../src/sys/classes/viewer/impls/vtk/vtkvimpl.h>

  8: static PetscErrorCode DMDAVTKWriteAll_VTS(DM da,PetscViewer viewer)
  9: {
 10: #if defined(PETSC_USE_REAL_SINGLE)
 11:   const char precision[] = "Float32";
 12: #elif defined(PETSC_USE_REAL_DOUBLE)
 13:   const char precision[] = "Float64";
 14: #else
 15:   const char precision[] = "UnknownPrecision";
 16: #endif
 17:   MPI_Comm                 comm;
 18:   Vec                      Coords;
 19:   PetscViewer_VTK          *vtk = (PetscViewer_VTK*)viewer->data;
 20:   PetscViewerVTKObjectLink link;
 21:   FILE                     *fp;
 22:   PetscMPIInt              rank,size,tag;
 23:   DMDALocalInfo            info;
 24:   PetscInt                 dim,mx,my,mz,cdim,bs,boffset,maxnnodes,i,j,k,f,r;
 25:   PetscInt                 rloc[6],(*grloc)[6] = NULL;
 26:   PetscScalar              *array,*array2;
 27:   PetscReal                gmin[3],gmax[3];
 28:   PetscErrorCode           ierr;

 31:   PetscObjectGetComm((PetscObject)da,&comm);
 32: #if defined(PETSC_USE_COMPLEX)
 33:   SETERRQ(comm,PETSC_ERR_SUP,"Complex values not supported");
 34: #endif
 35:   MPI_Comm_size(comm,&size);
 36:   MPI_Comm_rank(comm,&rank);
 37:   DMDAGetInfo(da,&dim, &mx,&my,&mz, 0,0,0, &bs,0,0,0,0,0);
 38:   DMDAGetLocalInfo(da,&info);
 39:   DMDAGetBoundingBox(da,gmin,gmax);
 40:   DMGetCoordinates(da,&Coords);
 41:   if (Coords) {
 42:     PetscInt csize;
 43:     VecGetSize(Coords,&csize);
 44:     if (csize % (mx*my*mz)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch");
 45:     cdim = csize/(mx*my*mz);
 46:     if (cdim < dim || cdim > 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch");
 47:   } else {
 48:     cdim = dim;
 49:   }

 51:   PetscFOpen(comm,vtk->filename,"wb",&fp);
 52:   PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");
 53: #if defined(PETSC_WORDS_BIGENDIAN)
 54:   PetscFPrintf(comm,fp,"<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"BigEndian\">\n");
 55: #else
 56:   PetscFPrintf(comm,fp,"<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"LittleEndian\">\n");
 57: #endif
 58:   PetscFPrintf(comm,fp,"  <StructuredGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);

 60:   if (!rank) {PetscMalloc1(size*6,&grloc);}
 61:   rloc[0] = info.xs;
 62:   rloc[1] = info.xm;
 63:   rloc[2] = info.ys;
 64:   rloc[3] = info.ym;
 65:   rloc[4] = info.zs;
 66:   rloc[5] = info.zm;
 67:   MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);

 69:   /* Write XML header */
 70:   maxnnodes = 0;                /* Used for the temporary array size on rank 0 */
 71:   boffset   = 0;                /* Offset into binary file */
 72:   for (r=0; r<size; r++) {
 73:     PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
 74:     if (!rank) {
 75:       xs     = grloc[r][0];
 76:       xm     = grloc[r][1];
 77:       ys     = grloc[r][2];
 78:       ym     = grloc[r][3];
 79:       zs     = grloc[r][4];
 80:       zm     = grloc[r][5];
 81:       nnodes = xm*ym*zm;
 82:     }
 83:     maxnnodes = PetscMax(maxnnodes,nnodes);
 84: #if 0
 85:     switch (dim) {
 86:     case 1:
 87:       PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,0,0,0,0);
 88:       break;
 89:     case 2:
 90:       PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm,ys+ym-1,xs,xs+xm-1,0,0);
 91:       break;
 92:     case 3:
 93:       PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);
 94:       break;
 95:     default: SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"No support for dimension %D",dim);
 96:     }
 97: #endif
 98:     PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);
 99:     PetscFPrintf(comm,fp,"      <Points>\n");
100:     PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Position\" NumberOfComponents=\"3\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);
101:     boffset += 3*nnodes*sizeof(PetscScalar) + sizeof(int);
102:     PetscFPrintf(comm,fp,"      </Points>\n");

104:     PetscFPrintf(comm,fp,"      <PointData Scalars=\"ScalarPointData\">\n");
105:     for (link=vtk->link; link; link=link->next) {
106:       Vec        X        = (Vec)link->vec;
107:       const char *vecname = "";
108:       if (((PetscObject)X)->name || link != vtk->link) { /* If the object is already named, use it. If it is past the first link, name it to disambiguate. */
109:         PetscObjectGetName((PetscObject)X,&vecname);
110:       }
111:       for (i=0; i<bs; i++) {
112:         char       buf[256];
113:         const char *fieldname;
114:         DMDAGetFieldName(da,i,&fieldname);
115:         if (!fieldname) {
116:           PetscSNPrintf(buf,sizeof(buf),"Unnamed%D",i);
117:           fieldname = buf;
118:         }
119:         PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);
120:         boffset += nnodes*sizeof(PetscScalar) + sizeof(int);
121:       }
122:     }
123:     PetscFPrintf(comm,fp,"      </PointData>\n");
124:     PetscFPrintf(comm,fp,"    </Piece>\n");
125:   }
126:   PetscFPrintf(comm,fp,"  </StructuredGrid>\n");
127:   PetscFPrintf(comm,fp,"  <AppendedData encoding=\"raw\">\n");
128:   PetscFPrintf(comm,fp,"_");

130:   /* Now write the arrays. */
131:   tag  = ((PetscObject)viewer)->tag;
132:   PetscMalloc2(maxnnodes*PetscMax(3,bs),&array,maxnnodes*3,&array2);
133:   for (r=0; r<size; r++) {
134:     MPI_Status status;
135:     PetscInt   xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
136:     if (!rank) {
137:       xs     = grloc[r][0];
138:       xm     = grloc[r][1];
139:       ys     = grloc[r][2];
140:       ym     = grloc[r][3];
141:       zs     = grloc[r][4];
142:       zm     = grloc[r][5];
143:       nnodes = xm*ym*zm;
144:     } else if (r == rank) {
145:       nnodes = info.xm*info.ym*info.zm;
146:     }

148:     /* Write the coordinates */
149:     if (Coords) {
150:       const PetscScalar *coords;
151:       VecGetArrayRead(Coords,&coords);
152:       if (!rank) {
153:         if (r) {
154:           PetscMPIInt nn;
155:           MPI_Recv(array,nnodes*cdim,MPIU_SCALAR,r,tag,comm,&status);
156:           MPI_Get_count(&status,MPIU_SCALAR,&nn);
157:           if (nn != nnodes*cdim) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch");
158:         } else {
159:           PetscMemcpy(array,coords,nnodes*cdim*sizeof(PetscScalar));
160:         }
161:         /* Transpose coordinates to VTK (C-style) ordering */
162:         for (k=0; k<zm; k++) {
163:           for (j=0; j<ym; j++) {
164:             for (i=0; i<xm; i++) {
165:               PetscInt Iloc = i+xm*(j+ym*k);
166:               array2[Iloc*3+0] = array[Iloc*cdim + 0];
167:               array2[Iloc*3+1] = cdim > 1 ? array[Iloc*cdim + 1] : 0.0;
168:               array2[Iloc*3+2] = cdim > 2 ? array[Iloc*cdim + 2] : 0.0;
169:             }
170:           }
171:         }
172:       } else if (r == rank) {
173:         MPI_Send((void*)coords,nnodes*cdim,MPIU_SCALAR,0,tag,comm);
174:       }
175:       VecRestoreArrayRead(Coords,&coords);
176:     } else {       /* Fabricate some coordinates using grid index */
177:       for (k=0; k<zm; k++) {
178:         for (j=0; j<ym; j++) {
179:           for (i=0; i<xm; i++) {
180:             PetscInt Iloc = i+xm*(j+ym*k);
181:             array2[Iloc*3+0] = xs+i;
182:             array2[Iloc*3+1] = ys+j;
183:             array2[Iloc*3+2] = zs+k;
184:           }
185:         }
186:       }
187:     }
188:     PetscViewerVTKFWrite(viewer,fp,array2,nnodes*3,MPIU_SCALAR);

190:     /* Write each of the objects queued up for this file */
191:     for (link=vtk->link; link; link=link->next) {
192:       Vec               X = (Vec)link->vec;
193:       const PetscScalar *x;

195:       VecGetArrayRead(X,&x);
196:       if (!rank) {
197:         if (r) {
198:           PetscMPIInt nn;
199:           MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);
200:           MPI_Get_count(&status,MPIU_SCALAR,&nn);
201:           if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r);
202:         } else {
203:           PetscMemcpy(array,x,nnodes*bs*sizeof(PetscScalar));
204:         }
205:         for (f=0; f<bs; f++) {
206:           /* Extract and transpose the f'th field */
207:           for (k=0; k<zm; k++) {
208:             for (j=0; j<ym; j++) {
209:               for (i=0; i<xm; i++) {
210:                 PetscInt Iloc = i+xm*(j+ym*k);
211:                 array2[Iloc] = array[Iloc*bs + f];
212:               }
213:             }
214:           }
215:           PetscViewerVTKFWrite(viewer,fp,array2,nnodes,MPIU_SCALAR);
216:         }
217:       } else if (r == rank) {
218:         MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);
219:       }
220:       VecRestoreArrayRead(X,&x);
221:     }
222:   }
223:   PetscFree2(array,array2);
224:   PetscFree(grloc);

226:   PetscFPrintf(comm,fp,"\n </AppendedData>\n");
227:   PetscFPrintf(comm,fp,"</VTKFile>\n");
228:   PetscFClose(comm,fp);
229:   return(0);
230: }


233: static PetscErrorCode DMDAVTKWriteAll_VTR(DM da,PetscViewer viewer)
234: {
235: #if defined(PETSC_USE_REAL_SINGLE)
236:   const char precision[] = "Float32";
237: #elif defined(PETSC_USE_REAL_DOUBLE)
238:   const char precision[] = "Float64";
239: #else
240:   const char precision[] = "UnknownPrecision";
241: #endif
242:   MPI_Comm                 comm;
243:   PetscViewer_VTK          *vtk = (PetscViewer_VTK*)viewer->data;
244:   PetscViewerVTKObjectLink link;
245:   FILE                     *fp;
246:   PetscMPIInt              rank,size,tag;
247:   DMDALocalInfo            info;
248:   PetscInt                 dim,mx,my,mz,bs,boffset,maxnnodes,i,j,k,f,r;
249:   PetscInt                 rloc[6],(*grloc)[6] = NULL;
250:   PetscScalar              *array,*array2;
251:   PetscReal                gmin[3],gmax[3];
252:   PetscErrorCode           ierr;

255:   PetscObjectGetComm((PetscObject)da,&comm);
256: #if defined(PETSC_USE_COMPLEX)
257:   SETERRQ(comm,PETSC_ERR_SUP,"Complex values not supported");
258: #endif
259:   MPI_Comm_size(comm,&size);
260:   MPI_Comm_rank(comm,&rank);
261:   DMDAGetInfo(da,&dim, &mx,&my,&mz, 0,0,0, &bs,0,0,0,0,0);
262:   DMDAGetLocalInfo(da,&info);
263:   DMDAGetBoundingBox(da,gmin,gmax);
264:   PetscFOpen(comm,vtk->filename,"wb",&fp);
265:   PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");
266: #if defined(PETSC_WORDS_BIGENDIAN)
267:   PetscFPrintf(comm,fp,"<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"BigEndian\">\n");
268: #else
269:   PetscFPrintf(comm,fp,"<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"LittleEndian\">\n");
270: #endif
271:   PetscFPrintf(comm,fp,"  <RectilinearGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);

273:   if (!rank) {PetscMalloc1(size*6,&grloc);}
274:   rloc[0] = info.xs;
275:   rloc[1] = info.xm;
276:   rloc[2] = info.ys;
277:   rloc[3] = info.ym;
278:   rloc[4] = info.zs;
279:   rloc[5] = info.zm;
280:   MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);

282:   /* Write XML header */
283:   maxnnodes = 0;                /* Used for the temporary array size on rank 0 */
284:   boffset   = 0;                /* Offset into binary file */
285:   for (r=0; r<size; r++) {
286:     PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
287:     if (!rank) {
288:       xs     = grloc[r][0];
289:       xm     = grloc[r][1];
290:       ys     = grloc[r][2];
291:       ym     = grloc[r][3];
292:       zs     = grloc[r][4];
293:       zm     = grloc[r][5];
294:       nnodes = xm*ym*zm;
295:     }
296:     maxnnodes = PetscMax(maxnnodes,nnodes);
297:     PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);
298:     PetscFPrintf(comm,fp,"      <Coordinates>\n");
299:     PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Xcoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);
300:     boffset += xm*sizeof(PetscScalar) + sizeof(int);
301:     PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Ycoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);
302:     boffset += ym*sizeof(PetscScalar) + sizeof(int);
303:     PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Zcoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);
304:     boffset += zm*sizeof(PetscScalar) + sizeof(int);
305:     PetscFPrintf(comm,fp,"      </Coordinates>\n");
306:     PetscFPrintf(comm,fp,"      <PointData Scalars=\"ScalarPointData\">\n");
307:     for (link=vtk->link; link; link=link->next) {
308:       Vec        X        = (Vec)link->vec;
309:       const char *vecname = "";
310:       if (((PetscObject)X)->name || link != vtk->link) { /* If the object is already named, use it. If it is past the first link, name it to disambiguate. */
311:         PetscObjectGetName((PetscObject)X,&vecname);
312:       }
313:       for (i=0; i<bs; i++) {
314:         char       buf[256];
315:         const char *fieldname;
316:         DMDAGetFieldName(da,i,&fieldname);
317:         if (!fieldname) {
318:           PetscSNPrintf(buf,sizeof(buf),"Unnamed%D",i);
319:           fieldname = buf;
320:         }
321:         PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);
322:         boffset += nnodes*sizeof(PetscScalar) + sizeof(int);
323:       }
324:     }
325:     PetscFPrintf(comm,fp,"      </PointData>\n");
326:     PetscFPrintf(comm,fp,"    </Piece>\n");
327:   }
328:   PetscFPrintf(comm,fp,"  </RectilinearGrid>\n");
329:   PetscFPrintf(comm,fp,"  <AppendedData encoding=\"raw\">\n");
330:   PetscFPrintf(comm,fp,"_");

332:   /* Now write the arrays. */
333:   tag  = ((PetscObject)viewer)->tag;
334:   PetscMalloc2(PetscMax(maxnnodes*bs,info.xm+info.ym+info.zm),&array,PetscMax(maxnnodes*3,info.xm+info.ym+info.zm),&array2);
335:   for (r=0; r<size; r++) {
336:     MPI_Status status;
337:     PetscInt   xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
338:     if (!rank) {
339:       xs     = grloc[r][0];
340:       xm     = grloc[r][1];
341:       ys     = grloc[r][2];
342:       ym     = grloc[r][3];
343:       zs     = grloc[r][4];
344:       zm     = grloc[r][5];
345:       nnodes = xm*ym*zm;
346:     } else if (r == rank) {
347:       nnodes = info.xm*info.ym*info.zm;
348:     }
349:     {                           /* Write the coordinates */
350:       Vec Coords;
351:       DMGetCoordinates(da,&Coords);
352:       if (Coords) {
353:         const PetscScalar *coords;
354:         VecGetArrayRead(Coords,&coords);
355:         if (!rank) {
356:           if (r) {
357:             PetscMPIInt nn;
358:             MPI_Recv(array,xm+ym+zm,MPIU_SCALAR,r,tag,comm,&status);
359:             MPI_Get_count(&status,MPIU_SCALAR,&nn);
360:             if (nn != xm+ym+zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch");
361:           } else {
362:             /* x coordinates */
363:             for (j=0, k=0, i=0; i<xm; i++) {
364:               PetscInt Iloc = i+xm*(j+ym*k);
365:               array[i] = coords[Iloc*dim + 0];
366:             }
367:             /* y coordinates */
368:             for (i=0, k=0, j=0; j<ym; j++) {
369:               PetscInt Iloc = i+xm*(j+ym*k);
370:               array[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
371:             }
372:             /* z coordinates */
373:             for (i=0, j=0, k=0; k<zm; k++) {
374:               PetscInt Iloc = i+xm*(j+ym*k);
375:               array[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
376:             }
377:           }
378:         } else if (r == rank) {
379:           xm = info.xm;
380:           ym = info.ym;
381:           zm = info.zm;
382:           for (j=0, k=0, i=0; i<xm; i++) {
383:             PetscInt Iloc = i+xm*(j+ym*k);
384:             array2[i] = coords[Iloc*dim + 0];
385:           }
386:           for (i=0, k=0, j=0; j<ym; j++) {
387:             PetscInt Iloc = i+xm*(j+ym*k);
388:             array2[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
389:           }
390:           for (i=0, j=0, k=0; k<zm; k++) {
391:             PetscInt Iloc = i+xm*(j+ym*k);
392:             array2[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
393:           }
394:           MPI_Send((void*)array2,xm+ym+zm,MPIU_SCALAR,0,tag,comm);
395:         }
396:         VecRestoreArrayRead(Coords,&coords);
397:       } else {       /* Fabricate some coordinates using grid index */
398:         for (i=0; i<xm; i++) {
399:           array[i] = xs+i;
400:         }
401:         for (j=0; j<ym; j++) {
402:           array[j+xm] = ys+j;
403:         }
404:         for (k=0; k<zm; k++) {
405:           array[k+xm+ym] = zs+k;
406:         }
407:       }
408:       if (!rank) {
409:         PetscViewerVTKFWrite(viewer,fp,&(array[0]),xm,MPIU_SCALAR);
410:         PetscViewerVTKFWrite(viewer,fp,&(array[xm]),ym,MPIU_SCALAR);
411:         PetscViewerVTKFWrite(viewer,fp,&(array[xm+ym]),zm,MPIU_SCALAR);
412:       }
413:     }

415:     /* Write each of the objects queued up for this file */
416:     for (link=vtk->link; link; link=link->next) {
417:       Vec               X = (Vec)link->vec;
418:       const PetscScalar *x;

420:       VecGetArrayRead(X,&x);
421:       if (!rank) {
422:         if (r) {
423:           PetscMPIInt nn;
424:           MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);
425:           MPI_Get_count(&status,MPIU_SCALAR,&nn);
426:           if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r);
427:         } else {
428:           PetscMemcpy(array,x,nnodes*bs*sizeof(PetscScalar));
429:         }
430:         for (f=0; f<bs; f++) {
431:           /* Extract and transpose the f'th field */
432:           for (k=0; k<zm; k++) {
433:             for (j=0; j<ym; j++) {
434:               for (i=0; i<xm; i++) {
435:                 PetscInt Iloc = i+xm*(j+ym*k);
436:                 array2[Iloc] = array[Iloc*bs + f];
437:               }
438:             }
439:           }
440:           PetscViewerVTKFWrite(viewer,fp,array2,nnodes,MPIU_SCALAR);
441:         }
442:       } else if (r == rank) {
443:         MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);
444:       }
445:       VecRestoreArrayRead(X,&x);
446:     }
447:   }
448:   PetscFree2(array,array2);
449:   PetscFree(grloc);

451:   PetscFPrintf(comm,fp,"\n </AppendedData>\n");
452:   PetscFPrintf(comm,fp,"</VTKFile>\n");
453:   PetscFClose(comm,fp);
454:   return(0);
455: }

457: /*@C
458:    DMDAVTKWriteAll - Write a file containing all the fields that have been provided to the viewer

460:    Collective

462:    Input Arguments:
463:    odm - DM specifying the grid layout, passed as a PetscObject
464:    viewer - viewer of type VTK

466:    Level: developer

468:    Note:
469:    This function is a callback used by the VTK viewer to actually write the file.
470:    The reason for this odd model is that the VTK file format does not provide any way to write one field at a time.
471:    Instead, metadata for the entire file needs to be available up-front before you can start writing the file.

473: .seealso: PETSCVIEWERVTK
474: @*/
475: PetscErrorCode DMDAVTKWriteAll(PetscObject odm,PetscViewer viewer)
476: {
477:   DM             dm = (DM)odm;
478:   PetscBool      isvtk;

484:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk);
485:   if (!isvtk) SETERRQ1(PetscObjectComm((PetscObject)viewer),PETSC_ERR_ARG_INCOMP,"Cannot use viewer type %s",((PetscObject)viewer)->type_name);
486:   switch (viewer->format) {
487:   case PETSC_VIEWER_VTK_VTS:
488:     DMDAVTKWriteAll_VTS(dm,viewer);
489:     break;
490:   case PETSC_VIEWER_VTK_VTR:
491:     DMDAVTKWriteAll_VTR(dm,viewer);
492:     break;
493:   default: SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for format '%s'",PetscViewerFormats[viewer->format]);
494:   }
495:   return(0);
496: }