Actual source code: grvtk.c

petsc-3.14.6 2021-03-30
Report Typos and Errors
  1: #include <petsc/private/dmdaimpl.h>
  2: /*
  3:    Note that the API for using PETSCVIEWERVTK is totally wrong since its use requires
  4:    including the private vtkvimpl.h file. The code should be refactored.
  5: */
  6: #include <../src/sys/classes/viewer/impls/vtk/vtkvimpl.h>

  8: /* Helper function which determines if any DMDA fields are named.  This is used
  9:    as a proxy for the user's intention to use DMDA fields as distinct
 10:    scalar-valued fields as opposed to a single vector-valued field */
 11: static PetscErrorCode DMDAGetFieldsNamed(DM da,PetscBool *fieldsnamed)
 12: {
 14:   PetscInt       f,bs;

 17:   *fieldsnamed = PETSC_FALSE;
 18:   DMDAGetDof(da,&bs);
 19:   for (f=0; f<bs; ++f) {
 20:     const char * fieldname;
 21:     DMDAGetFieldName(da,f,&fieldname);
 22:     if (fieldname) {
 23:       *fieldsnamed = PETSC_TRUE;
 24:       break;
 25:     }
 26:   }
 27:   return(0);
 28: }

 30: static PetscErrorCode DMDAVTKWriteAll_VTS(DM da,PetscViewer viewer)
 31: {
 32:   const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
 33: #if defined(PETSC_USE_REAL_SINGLE)
 34:   const char precision[] = "Float32";
 35: #elif defined(PETSC_USE_REAL_DOUBLE)
 36:   const char precision[] = "Float64";
 37: #else
 38:   const char precision[] = "UnknownPrecision";
 39: #endif
 40:   MPI_Comm                 comm;
 41:   Vec                      Coords;
 42:   PetscViewer_VTK          *vtk = (PetscViewer_VTK*)viewer->data;
 43:   PetscViewerVTKObjectLink link;
 44:   FILE                     *fp;
 45:   PetscMPIInt              rank,size,tag;
 46:   DMDALocalInfo            info;
 47:   PetscInt                 dim,mx,my,mz,cdim,bs,boffset,maxnnodes,maxbs,i,j,k,r;
 48:   PetscInt                 rloc[6],(*grloc)[6] = NULL;
 49:   PetscScalar              *array,*array2;
 50:   PetscErrorCode           ierr;

 53:   PetscObjectGetComm((PetscObject)da,&comm);
 54: #if defined(PETSC_USE_COMPLEX)
 55:   SETERRQ(comm,PETSC_ERR_SUP,"Complex values not supported");
 56: #endif
 57:   MPI_Comm_size(comm,&size);
 58:   MPI_Comm_rank(comm,&rank);
 59:   DMDAGetInfo(da,&dim,&mx,&my,&mz,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);
 60:   DMDAGetLocalInfo(da,&info);
 61:   DMGetCoordinates(da,&Coords);
 62:   if (Coords) {
 63:     PetscInt csize;
 64:     VecGetSize(Coords,&csize);
 65:     if (csize % (mx*my*mz)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch");
 66:     cdim = csize/(mx*my*mz);
 67:     if (cdim < dim || cdim > 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch");
 68:   } else {
 69:     cdim = dim;
 70:   }

 72:   PetscFOpen(comm,vtk->filename,"wb",&fp);
 73:   PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");
 74:   PetscFPrintf(comm,fp,"<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"%s\">\n",byte_order);
 75:   PetscFPrintf(comm,fp,"  <StructuredGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);

 77:   if (!rank) {PetscMalloc1(size*6,&grloc);}
 78:   rloc[0] = info.xs;
 79:   rloc[1] = info.xm;
 80:   rloc[2] = info.ys;
 81:   rloc[3] = info.ym;
 82:   rloc[4] = info.zs;
 83:   rloc[5] = info.zm;
 84:   MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);

 86:   /* Write XML header */
 87:   maxnnodes = 0;                /* Used for the temporary array size on rank 0 */
 88:   maxbs     = 0;                /* Used for the temporary array size on rank 0 */
 89:   boffset   = 0;                /* Offset into binary file */
 90:   for (r=0; r<size; r++) {
 91:     PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
 92:     if (!rank) {
 93:       xs     = grloc[r][0];
 94:       xm     = grloc[r][1];
 95:       ys     = grloc[r][2];
 96:       ym     = grloc[r][3];
 97:       zs     = grloc[r][4];
 98:       zm     = grloc[r][5];
 99:       nnodes = xm*ym*zm;
100:     }
101:     maxnnodes = PetscMax(maxnnodes,nnodes);
102:     PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);
103:     PetscFPrintf(comm,fp,"      <Points>\n");
104:     PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Position\" NumberOfComponents=\"3\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);
105:     boffset += 3*nnodes*sizeof(PetscScalar) + sizeof(int);
106:     PetscFPrintf(comm,fp,"      </Points>\n");

108:     PetscFPrintf(comm,fp,"      <PointData Scalars=\"ScalarPointData\">\n");
109:     for (link=vtk->link; link; link=link->next) {
110:       Vec        X = (Vec)link->vec;
111:       PetscInt   bs,f;
112:       DM         daCurr;
113:       PetscBool  fieldsnamed;
114:       const char *vecname = "Unnamed";

116:       VecGetDM(X,&daCurr);
117:       DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);
118:       maxbs = PetscMax(maxbs,bs);

120:       if (((PetscObject)X)->name || link != vtk->link) {
121:         PetscObjectGetName((PetscObject)X,&vecname);
122:       }

124:       /* If any fields are named, add scalar fields. Otherwise, add a vector field */
125:       DMDAGetFieldsNamed(daCurr,&fieldsnamed);
126:       if (fieldsnamed) {
127:         for (f=0; f<bs; f++) {
128:           char       buf[256];
129:           const char *fieldname;
130:           DMDAGetFieldName(daCurr,f,&fieldname);
131:           if (!fieldname) {
132:             PetscSNPrintf(buf,sizeof(buf),"%D",f);
133:             fieldname = buf;
134:           }
135:           PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);
136:           boffset += nnodes*sizeof(PetscScalar) + sizeof(int);
137:         }
138:       } else {
139:         PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%D\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,bs,boffset);
140:         boffset += bs*nnodes*sizeof(PetscScalar) + sizeof(int);
141:       }
142:     }
143:     PetscFPrintf(comm,fp,"      </PointData>\n");
144:     PetscFPrintf(comm,fp,"    </Piece>\n");
145:   }
146:   PetscFPrintf(comm,fp,"  </StructuredGrid>\n");
147:   PetscFPrintf(comm,fp,"  <AppendedData encoding=\"raw\">\n");
148:   PetscFPrintf(comm,fp,"_");

150:   /* Now write the arrays. */
151:   tag  = ((PetscObject)viewer)->tag;
152:   PetscMalloc2(maxnnodes*PetscMax(3,maxbs),&array,maxnnodes*PetscMax(3,maxbs),&array2);
153:   for (r=0; r<size; r++) {
154:     MPI_Status status;
155:     PetscInt   xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
156:     if (!rank) {
157:       xs     = grloc[r][0];
158:       xm     = grloc[r][1];
159:       ys     = grloc[r][2];
160:       ym     = grloc[r][3];
161:       zs     = grloc[r][4];
162:       zm     = grloc[r][5];
163:       nnodes = xm*ym*zm;
164:     } else if (r == rank) {
165:       nnodes = info.xm*info.ym*info.zm;
166:     }

168:     /* Write the coordinates */
169:     if (Coords) {
170:       const PetscScalar *coords;
171:       VecGetArrayRead(Coords,&coords);
172:       if (!rank) {
173:         if (r) {
174:           PetscMPIInt nn;
175:           MPI_Recv(array,nnodes*cdim,MPIU_SCALAR,r,tag,comm,&status);
176:           MPI_Get_count(&status,MPIU_SCALAR,&nn);
177:           if (nn != nnodes*cdim) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch");
178:         } else {
179:           PetscArraycpy(array,coords,nnodes*cdim);
180:         }
181:         /* Transpose coordinates to VTK (C-style) ordering */
182:         for (k=0; k<zm; k++) {
183:           for (j=0; j<ym; j++) {
184:             for (i=0; i<xm; i++) {
185:               PetscInt Iloc = i+xm*(j+ym*k);
186:               array2[Iloc*3+0] = array[Iloc*cdim + 0];
187:               array2[Iloc*3+1] = cdim > 1 ? array[Iloc*cdim + 1] : 0.0;
188:               array2[Iloc*3+2] = cdim > 2 ? array[Iloc*cdim + 2] : 0.0;
189:             }
190:           }
191:         }
192:       } else if (r == rank) {
193:         MPI_Send((void*)coords,nnodes*cdim,MPIU_SCALAR,0,tag,comm);
194:       }
195:       VecRestoreArrayRead(Coords,&coords);
196:     } else {       /* Fabricate some coordinates using grid index */
197:       for (k=0; k<zm; k++) {
198:         for (j=0; j<ym; j++) {
199:           for (i=0; i<xm; i++) {
200:             PetscInt Iloc = i+xm*(j+ym*k);
201:             array2[Iloc*3+0] = xs+i;
202:             array2[Iloc*3+1] = ys+j;
203:             array2[Iloc*3+2] = zs+k;
204:           }
205:         }
206:       }
207:     }
208:     PetscViewerVTKFWrite(viewer,fp,array2,nnodes*3,MPIU_SCALAR);

210:     /* Write each of the objects queued up for this file */
211:     for (link=vtk->link; link; link=link->next) {
212:       Vec               X = (Vec)link->vec;
213:       const PetscScalar *x;
214:       PetscInt          bs,f;
215:       DM                daCurr;
216:       PetscBool         fieldsnamed;
217:       VecGetDM(X,&daCurr);
218:       DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL, NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);
219:       VecGetArrayRead(X,&x);
220:       if (!rank) {
221:         if (r) {
222:           PetscMPIInt nn;
223:           MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);
224:           MPI_Get_count(&status,MPIU_SCALAR,&nn);
225:           if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r);
226:         } else {
227:           PetscArraycpy(array,x,nnodes*bs);
228:         }

230:         /* If any fields are named, add scalar fields. Otherwise, add a vector field */
231:         DMDAGetFieldsNamed(daCurr,&fieldsnamed);
232:         if (fieldsnamed) {
233:           for (f=0; f<bs; f++) {
234:             /* Extract and transpose the f'th field */
235:             for (k=0; k<zm; k++) {
236:               for (j=0; j<ym; j++) {
237:                 for (i=0; i<xm; i++) {
238:                   PetscInt Iloc = i+xm*(j+ym*k);
239:                   array2[Iloc] = array[Iloc*bs + f];
240:                 }
241:               }
242:             }
243:             PetscViewerVTKFWrite(viewer,fp,array2,nnodes,MPIU_SCALAR);
244:           }
245:         } else {
246:           PetscViewerVTKFWrite(viewer,fp,array,bs*nnodes,MPIU_SCALAR);
247:         }
248:       } else if (r == rank) {
249:         MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);
250:       }
251:       VecRestoreArrayRead(X,&x);
252:     }
253:   }
254:   PetscFree2(array,array2);
255:   PetscFree(grloc);

257:   PetscFPrintf(comm,fp,"\n </AppendedData>\n");
258:   PetscFPrintf(comm,fp,"</VTKFile>\n");
259:   PetscFClose(comm,fp);
260:   return(0);
261: }


264: static PetscErrorCode DMDAVTKWriteAll_VTR(DM da,PetscViewer viewer)
265: {
266:   const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
267: #if defined(PETSC_USE_REAL_SINGLE)
268:   const char precision[] = "Float32";
269: #elif defined(PETSC_USE_REAL_DOUBLE)
270:   const char precision[] = "Float64";
271: #else
272:   const char precision[] = "UnknownPrecision";
273: #endif
274:   MPI_Comm                 comm;
275:   PetscViewer_VTK          *vtk = (PetscViewer_VTK*)viewer->data;
276:   PetscViewerVTKObjectLink link;
277:   FILE                     *fp;
278:   PetscMPIInt              rank,size,tag;
279:   DMDALocalInfo            info;
280:   PetscInt                 dim,mx,my,mz,boffset,maxnnodes,maxbs,i,j,k,r;
281:   PetscInt                 rloc[6],(*grloc)[6] = NULL;
282:   PetscScalar              *array,*array2;
283:   PetscErrorCode           ierr;

286:   PetscObjectGetComm((PetscObject)da,&comm);
287: #if defined(PETSC_USE_COMPLEX)
288:   SETERRQ(comm,PETSC_ERR_SUP,"Complex values not supported");
289: #endif
290:   MPI_Comm_size(comm,&size);
291:   MPI_Comm_rank(comm,&rank);
292:   DMDAGetInfo(da,&dim,&mx,&my,&mz,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
293:   DMDAGetLocalInfo(da,&info);
294:   PetscFOpen(comm,vtk->filename,"wb",&fp);
295:   PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");
296:   PetscFPrintf(comm,fp,"<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"%s\">\n",byte_order);
297:   PetscFPrintf(comm,fp,"  <RectilinearGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);

299:   if (!rank) {PetscMalloc1(size*6,&grloc);}
300:   rloc[0] = info.xs;
301:   rloc[1] = info.xm;
302:   rloc[2] = info.ys;
303:   rloc[3] = info.ym;
304:   rloc[4] = info.zs;
305:   rloc[5] = info.zm;
306:   MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);

308:   /* Write XML header */
309:   maxnnodes = 0;                /* Used for the temporary array size on rank 0 */
310:   maxbs     = 0;                /* Used for the temporary array size on rank 0 */
311:   boffset   = 0;                /* Offset into binary file */
312:   for (r=0; r<size; r++) {
313:     PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
314:     if (!rank) {
315:       xs     = grloc[r][0];
316:       xm     = grloc[r][1];
317:       ys     = grloc[r][2];
318:       ym     = grloc[r][3];
319:       zs     = grloc[r][4];
320:       zm     = grloc[r][5];
321:       nnodes = xm*ym*zm;
322:     }
323:     maxnnodes = PetscMax(maxnnodes,nnodes);
324:     PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);
325:     PetscFPrintf(comm,fp,"      <Coordinates>\n");
326:     PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Xcoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);
327:     boffset += xm*sizeof(PetscScalar) + sizeof(int);
328:     PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Ycoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);
329:     boffset += ym*sizeof(PetscScalar) + sizeof(int);
330:     PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Zcoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);
331:     boffset += zm*sizeof(PetscScalar) + sizeof(int);
332:     PetscFPrintf(comm,fp,"      </Coordinates>\n");
333:     PetscFPrintf(comm,fp,"      <PointData Scalars=\"ScalarPointData\">\n");
334:     for (link=vtk->link; link; link=link->next) {
335:       Vec        X = (Vec)link->vec;
336:       PetscInt   bs,f;
337:       DM         daCurr;
338:       PetscBool  fieldsnamed;
339:       const char *vecname = "Unnamed";

341:       VecGetDM(X,&daCurr);
342:       DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);
343:       maxbs = PetscMax(maxbs,bs);
344:       if (((PetscObject)X)->name || link != vtk->link) {
345:         PetscObjectGetName((PetscObject)X,&vecname);
346:       }

348:       /* If any fields are named, add scalar fields. Otherwise, add a vector field */
349:       DMDAGetFieldsNamed(daCurr,&fieldsnamed);
350:       if (fieldsnamed) {
351:         for (f=0; f<bs; f++) {
352:           char       buf[256];
353:           const char *fieldname;
354:           DMDAGetFieldName(daCurr,f,&fieldname);
355:           if (!fieldname) {
356:             PetscSNPrintf(buf,sizeof(buf),"%D",f);
357:             fieldname = buf;
358:           }
359:           PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);
360:           boffset += nnodes*sizeof(PetscScalar) + sizeof(int);
361:         }
362:       } else {
363:         PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%D\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,bs,boffset);
364:         boffset += bs*nnodes*sizeof(PetscScalar) + sizeof(int);
365:       }
366:     }
367:     PetscFPrintf(comm,fp,"      </PointData>\n");
368:     PetscFPrintf(comm,fp,"    </Piece>\n");
369:   }
370:   PetscFPrintf(comm,fp,"  </RectilinearGrid>\n");
371:   PetscFPrintf(comm,fp,"  <AppendedData encoding=\"raw\">\n");
372:   PetscFPrintf(comm,fp,"_");

374:   /* Now write the arrays. */
375:   tag  = ((PetscObject)viewer)->tag;
376:   PetscMalloc2(PetscMax(maxnnodes*maxbs,info.xm+info.ym+info.zm),&array,PetscMax(maxnnodes*maxbs,info.xm+info.ym+info.zm),&array2);
377:   for (r=0; r<size; r++) {
378:     MPI_Status status;
379:     PetscInt   xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
380:     if (!rank) {
381:       xs     = grloc[r][0];
382:       xm     = grloc[r][1];
383:       ys     = grloc[r][2];
384:       ym     = grloc[r][3];
385:       zs     = grloc[r][4];
386:       zm     = grloc[r][5];
387:       nnodes = xm*ym*zm;
388:     } else if (r == rank) {
389:       nnodes = info.xm*info.ym*info.zm;
390:     }
391:     {                           /* Write the coordinates */
392:       Vec Coords;

394:       DMGetCoordinates(da,&Coords);
395:       if (Coords) {
396:         const PetscScalar *coords;
397:         VecGetArrayRead(Coords,&coords);
398:         if (!rank) {
399:           if (r) {
400:             PetscMPIInt nn;
401:             MPI_Recv(array,xm+ym+zm,MPIU_SCALAR,r,tag,comm,&status);
402:             MPI_Get_count(&status,MPIU_SCALAR,&nn);
403:             if (nn != xm+ym+zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch");
404:           } else {
405:             /* x coordinates */
406:             for (j=0, k=0, i=0; i<xm; i++) {
407:               PetscInt Iloc = i+xm*(j+ym*k);
408:               array[i] = coords[Iloc*dim + 0];
409:             }
410:             /* y coordinates */
411:             for (i=0, k=0, j=0; j<ym; j++) {
412:               PetscInt Iloc = i+xm*(j+ym*k);
413:               array[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
414:             }
415:             /* z coordinates */
416:             for (i=0, j=0, k=0; k<zm; k++) {
417:               PetscInt Iloc = i+xm*(j+ym*k);
418:               array[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
419:             }
420:           }
421:         } else if (r == rank) {
422:           xm = info.xm;
423:           ym = info.ym;
424:           zm = info.zm;
425:           for (j=0, k=0, i=0; i<xm; i++) {
426:             PetscInt Iloc = i+xm*(j+ym*k);
427:             array2[i] = coords[Iloc*dim + 0];
428:           }
429:           for (i=0, k=0, j=0; j<ym; j++) {
430:             PetscInt Iloc = i+xm*(j+ym*k);
431:             array2[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
432:           }
433:           for (i=0, j=0, k=0; k<zm; k++) {
434:             PetscInt Iloc = i+xm*(j+ym*k);
435:             array2[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
436:           }
437:           MPI_Send((void*)array2,xm+ym+zm,MPIU_SCALAR,0,tag,comm);
438:         }
439:         VecRestoreArrayRead(Coords,&coords);
440:       } else {       /* Fabricate some coordinates using grid index */
441:         for (i=0; i<xm; i++) {
442:           array[i] = xs+i;
443:         }
444:         for (j=0; j<ym; j++) {
445:           array[j+xm] = ys+j;
446:         }
447:         for (k=0; k<zm; k++) {
448:           array[k+xm+ym] = zs+k;
449:         }
450:       }
451:       if (!rank) {
452:         PetscViewerVTKFWrite(viewer,fp,&(array[0]),xm,MPIU_SCALAR);
453:         PetscViewerVTKFWrite(viewer,fp,&(array[xm]),ym,MPIU_SCALAR);
454:         PetscViewerVTKFWrite(viewer,fp,&(array[xm+ym]),zm,MPIU_SCALAR);
455:       }
456:     }

458:     /* Write each of the objects queued up for this file */
459:     for (link=vtk->link; link; link=link->next) {
460:       Vec               X = (Vec)link->vec;
461:       const PetscScalar *x;
462:       PetscInt          bs,f;
463:       DM                daCurr;
464:       PetscBool         fieldsnamed;
465:       VecGetDM(X,&daCurr);
466:       DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);

468:       VecGetArrayRead(X,&x);
469:       if (!rank) {
470:         if (r) {
471:           PetscMPIInt nn;
472:           MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);
473:           MPI_Get_count(&status,MPIU_SCALAR,&nn);
474:           if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r);
475:         } else {
476:           PetscArraycpy(array,x,nnodes*bs);
477:         }
478:         /* If any fields are named, add scalar fields. Otherwise, add a vector field */
479:         DMDAGetFieldsNamed(daCurr,&fieldsnamed);
480:         if (fieldsnamed) {
481:           for (f=0; f<bs; f++) {
482:             /* Extract and transpose the f'th field */
483:             for (k=0; k<zm; k++) {
484:               for (j=0; j<ym; j++) {
485:                 for (i=0; i<xm; i++) {
486:                   PetscInt Iloc = i+xm*(j+ym*k);
487:                   array2[Iloc] = array[Iloc*bs + f];
488:                 }
489:               }
490:             }
491:             PetscViewerVTKFWrite(viewer,fp,array2,nnodes,MPIU_SCALAR);
492:           }
493:         }
494:         PetscViewerVTKFWrite(viewer,fp,array,nnodes*bs,MPIU_SCALAR);

496:       } else if (r == rank) {
497:         MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);
498:       }
499:       VecRestoreArrayRead(X,&x);
500:     }
501:   }
502:   PetscFree2(array,array2);
503:   PetscFree(grloc);

505:   PetscFPrintf(comm,fp,"\n </AppendedData>\n");
506:   PetscFPrintf(comm,fp,"</VTKFile>\n");
507:   PetscFClose(comm,fp);
508:   return(0);
509: }

511: /*@C
512:    DMDAVTKWriteAll - Write a file containing all the fields that have been provided to the viewer

514:    Collective

516:    Input Arguments:
517:    odm - DM specifying the grid layout, passed as a PetscObject
518:    viewer - viewer of type VTK

520:    Level: developer

522:    Notes:
523:    This function is a callback used by the VTK viewer to actually write the file.
524:    The reason for this odd model is that the VTK file format does not provide any way to write one field at a time.
525:    Instead, metadata for the entire file needs to be available up-front before you can start writing the file.

527:    If any fields have been named (see e.g. DMDASetFieldName()), then individual scalar
528:    fields are written. Otherwise, a single multi-dof (vector) field is written.

530: .seealso: PETSCVIEWERVTK
531: @*/
532: PetscErrorCode DMDAVTKWriteAll(PetscObject odm,PetscViewer viewer)
533: {
534:   DM             dm = (DM)odm;
535:   PetscBool      isvtk;

541:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk);
542:   if (!isvtk) SETERRQ1(PetscObjectComm((PetscObject)viewer),PETSC_ERR_ARG_INCOMP,"Cannot use viewer type %s",((PetscObject)viewer)->type_name);
543:   switch (viewer->format) {
544:   case PETSC_VIEWER_VTK_VTS:
545:     DMDAVTKWriteAll_VTS(dm,viewer);
546:     break;
547:   case PETSC_VIEWER_VTK_VTR:
548:     DMDAVTKWriteAll_VTR(dm,viewer);
549:     break;
550:   default: SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for format '%s'",PetscViewerFormats[viewer->format]);
551:   }
552:   return(0);
553: }