Actual source code: grvtk.c

petsc-3.10.5 2019-03-28
Report Typos and Errors
  1:  #include <petsc/private/dmdaimpl.h>
  2: /*
  3:    Note that the API for using PETSCVIEWERVTK is totally wrong since its use requires
  4:    including the private vtkvimpl.h file. The code should be refactored.
  5: */
  6:  #include <../src/sys/classes/viewer/impls/vtk/vtkvimpl.h>

  8: static PetscErrorCode DMDAVTKWriteAll_VTS(DM da,PetscViewer viewer)
  9: {
 10: #if defined(PETSC_USE_REAL_SINGLE)
 11:   const char precision[] = "Float32";
 12: #elif defined(PETSC_USE_REAL_DOUBLE)
 13:   const char precision[] = "Float64";
 14: #else
 15:   const char precision[] = "UnknownPrecision";
 16: #endif
 17:   MPI_Comm                 comm;
 18:   Vec                      Coords;
 19:   PetscViewer_VTK          *vtk = (PetscViewer_VTK*)viewer->data;
 20:   PetscViewerVTKObjectLink link;
 21:   FILE                     *fp;
 22:   PetscMPIInt              rank,size,tag;
 23:   DMDALocalInfo            info;
 24:   PetscInt                 dim,mx,my,mz,cdim,bs,boffset,maxnnodes,maxbs,i,j,k,r;
 25:   PetscInt                 rloc[6],(*grloc)[6] = NULL;
 26:   PetscScalar              *array,*array2;
 27:   PetscReal                gmin[3],gmax[3];
 28:   PetscErrorCode           ierr;

 31:   PetscObjectGetComm((PetscObject)da,&comm);
 32: #if defined(PETSC_USE_COMPLEX)
 33:   SETERRQ(comm,PETSC_ERR_SUP,"Complex values not supported");
 34: #endif
 35:   MPI_Comm_size(comm,&size);
 36:   MPI_Comm_rank(comm,&rank);
 37:   DMDAGetInfo(da,&dim,&mx,&my,&mz,0,0,0,&bs,0,0,0,0,0);
 38:   DMDAGetLocalInfo(da,&info);
 39:   DMDAGetBoundingBox(da,gmin,gmax);
 40:   DMGetCoordinates(da,&Coords);
 41:   if (Coords) {
 42:     PetscInt csize;
 43:     VecGetSize(Coords,&csize);
 44:     if (csize % (mx*my*mz)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch");
 45:     cdim = csize/(mx*my*mz);
 46:     if (cdim < dim || cdim > 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch");
 47:   } else {
 48:     cdim = dim;
 49:   }

 51:   PetscFOpen(comm,vtk->filename,"wb",&fp);
 52:   PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");
 53: #if defined(PETSC_WORDS_BIGENDIAN)
 54:   PetscFPrintf(comm,fp,"<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"BigEndian\">\n");
 55: #else
 56:   PetscFPrintf(comm,fp,"<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"LittleEndian\">\n");
 57: #endif
 58:   PetscFPrintf(comm,fp,"  <StructuredGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);

 60:   if (!rank) {PetscMalloc1(size*6,&grloc);}
 61:   rloc[0] = info.xs;
 62:   rloc[1] = info.xm;
 63:   rloc[2] = info.ys;
 64:   rloc[3] = info.ym;
 65:   rloc[4] = info.zs;
 66:   rloc[5] = info.zm;
 67:   MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);

 69:   /* Write XML header */
 70:   maxnnodes = 0;                /* Used for the temporary array size on rank 0 */
 71:   maxbs     = 0;                /* Used for the temporary array size on rank 0 */
 72:   boffset   = 0;                /* Offset into binary file */
 73:   for (r=0; r<size; r++) {
 74:     PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
 75:     if (!rank) {
 76:       xs     = grloc[r][0];
 77:       xm     = grloc[r][1];
 78:       ys     = grloc[r][2];
 79:       ym     = grloc[r][3];
 80:       zs     = grloc[r][4];
 81:       zm     = grloc[r][5];
 82:       nnodes = xm*ym*zm;
 83:     }
 84:     maxnnodes = PetscMax(maxnnodes,nnodes);
 85:     PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);
 86:     PetscFPrintf(comm,fp,"      <Points>\n");
 87:     PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Position\" NumberOfComponents=\"3\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);
 88:     boffset += 3*nnodes*sizeof(PetscScalar) + sizeof(int);
 89:     PetscFPrintf(comm,fp,"      </Points>\n");

 91:     PetscFPrintf(comm,fp,"      <PointData Scalars=\"ScalarPointData\">\n");
 92:     for (link=vtk->link; link; link=link->next) {
 93:       Vec        X = (Vec)link->vec;
 94:       PetscInt   bs;
 95:       DM         daCurr;
 96:       const char *vecname = "Unnamed Vec data";

 98:       VecGetDM(X,&daCurr);
 99:       DMDAGetInfo(daCurr,0,0,0,0,0,0,0,&bs,0,0,0,0,0);
100:       maxbs = PetscMax(maxbs,bs);

102:       if (((PetscObject)X)->name || link != vtk->link) {
103:         PetscObjectGetName((PetscObject)X,&vecname);
104:       }
105:       PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%D\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,bs,boffset);
106:       boffset += bs*nnodes*sizeof(PetscScalar) + sizeof(int);
107:     }
108:     PetscFPrintf(comm,fp,"      </PointData>\n");
109:     PetscFPrintf(comm,fp,"    </Piece>\n");
110:   }
111:   PetscFPrintf(comm,fp,"  </StructuredGrid>\n");
112:   PetscFPrintf(comm,fp,"  <AppendedData encoding=\"raw\">\n");
113:   PetscFPrintf(comm,fp,"_");

115:   /* Now write the arrays. */
116:   tag  = ((PetscObject)viewer)->tag;
117:   PetscMalloc2(maxnnodes*PetscMax(3,maxbs),&array,maxnnodes*3,&array2);
118:   for (r=0; r<size; r++) {
119:     MPI_Status status;
120:     PetscInt   xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
121:     if (!rank) {
122:       xs     = grloc[r][0];
123:       xm     = grloc[r][1];
124:       ys     = grloc[r][2];
125:       ym     = grloc[r][3];
126:       zs     = grloc[r][4];
127:       zm     = grloc[r][5];
128:       nnodes = xm*ym*zm;
129:     } else if (r == rank) {
130:       nnodes = info.xm*info.ym*info.zm;
131:     }

133:     /* Write the coordinates */
134:     if (Coords) {
135:       const PetscScalar *coords;
136:       VecGetArrayRead(Coords,&coords);
137:       if (!rank) {
138:         if (r) {
139:           PetscMPIInt nn;
140:           MPI_Recv(array,nnodes*cdim,MPIU_SCALAR,r,tag,comm,&status);
141:           MPI_Get_count(&status,MPIU_SCALAR,&nn);
142:           if (nn != nnodes*cdim) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch");
143:         } else {
144:           PetscMemcpy(array,coords,nnodes*cdim*sizeof(PetscScalar));
145:         }
146:         /* Transpose coordinates to VTK (C-style) ordering */
147:         for (k=0; k<zm; k++) {
148:           for (j=0; j<ym; j++) {
149:             for (i=0; i<xm; i++) {
150:               PetscInt Iloc = i+xm*(j+ym*k);
151:               array2[Iloc*3+0] = array[Iloc*cdim + 0];
152:               array2[Iloc*3+1] = cdim > 1 ? array[Iloc*cdim + 1] : 0.0;
153:               array2[Iloc*3+2] = cdim > 2 ? array[Iloc*cdim + 2] : 0.0;
154:             }
155:           }
156:         }
157:       } else if (r == rank) {
158:         MPI_Send((void*)coords,nnodes*cdim,MPIU_SCALAR,0,tag,comm);
159:       }
160:       VecRestoreArrayRead(Coords,&coords);
161:     } else {       /* Fabricate some coordinates using grid index */
162:       for (k=0; k<zm; k++) {
163:         for (j=0; j<ym; j++) {
164:           for (i=0; i<xm; i++) {
165:             PetscInt Iloc = i+xm*(j+ym*k);
166:             array2[Iloc*3+0] = xs+i;
167:             array2[Iloc*3+1] = ys+j;
168:             array2[Iloc*3+2] = zs+k;
169:           }
170:         }
171:       }
172:     }
173:     PetscViewerVTKFWrite(viewer,fp,array2,nnodes*3,MPIU_SCALAR);

175:     /* Write each of the objects queued up for this file */
176:     for (link=vtk->link; link; link=link->next) {
177:       Vec               X = (Vec)link->vec;
178:       const PetscScalar *x;
179:       PetscInt          bs;
180:       DM                daCurr;
181:       VecGetDM(X,&daCurr);
182:       DMDAGetInfo(daCurr,0,0,0,0, 0,0,0,&bs,0,0,0,0,0);
183:       VecGetArrayRead(X,&x);
184:       if (!rank) {
185:         if (r) {
186:           PetscMPIInt nn;
187:           MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);
188:           MPI_Get_count(&status,MPIU_SCALAR,&nn);
189:           if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r);
190:         } else {
191:           PetscMemcpy(array,x,nnodes*bs*sizeof(PetscScalar));
192:         }
193:         PetscViewerVTKFWrite(viewer,fp,array,bs*nnodes,MPIU_SCALAR);
194:       } else if (r == rank) {
195:         MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);
196:       }
197:       VecRestoreArrayRead(X,&x);
198:     }
199:   }
200:   PetscFree2(array,array2);
201:   PetscFree(grloc);

203:   PetscFPrintf(comm,fp,"\n </AppendedData>\n");
204:   PetscFPrintf(comm,fp,"</VTKFile>\n");
205:   PetscFClose(comm,fp);
206:   return(0);
207: }


210: static PetscErrorCode DMDAVTKWriteAll_VTR(DM da,PetscViewer viewer)
211: {
212: #if defined(PETSC_USE_REAL_SINGLE)
213:   const char precision[] = "Float32";
214: #elif defined(PETSC_USE_REAL_DOUBLE)
215:   const char precision[] = "Float64";
216: #else
217:   const char precision[] = "UnknownPrecision";
218: #endif
219:   MPI_Comm                 comm;
220:   PetscViewer_VTK          *vtk = (PetscViewer_VTK*)viewer->data;
221:   PetscViewerVTKObjectLink link;
222:   FILE                     *fp;
223:   PetscMPIInt              rank,size,tag;
224:   DMDALocalInfo            info;
225:   PetscInt                 dim,mx,my,mz,boffset,maxnnodes,maxbs,i,j,k,r;
226:   PetscInt                 rloc[6],(*grloc)[6] = NULL;
227:   PetscScalar              *array,*array2;
228:   PetscReal                gmin[3],gmax[3];
229:   PetscErrorCode           ierr;

232:   PetscObjectGetComm((PetscObject)da,&comm);
233: #if defined(PETSC_USE_COMPLEX)
234:   SETERRQ(comm,PETSC_ERR_SUP,"Complex values not supported");
235: #endif
236:   MPI_Comm_size(comm,&size);
237:   MPI_Comm_rank(comm,&rank);
238:   DMDAGetInfo(da,&dim,&mx,&my,&mz,0,0,0,0,0,0,0,0,0);
239:   DMDAGetLocalInfo(da,&info);
240:   DMDAGetBoundingBox(da,gmin,gmax);
241:   PetscFOpen(comm,vtk->filename,"wb",&fp);
242:   PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");
243: #if defined(PETSC_WORDS_BIGENDIAN)
244:   PetscFPrintf(comm,fp,"<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"BigEndian\">\n");
245: #else
246:   PetscFPrintf(comm,fp,"<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"LittleEndian\">\n");
247: #endif
248:   PetscFPrintf(comm,fp,"  <RectilinearGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);

250:   if (!rank) {PetscMalloc1(size*6,&grloc);}
251:   rloc[0] = info.xs;
252:   rloc[1] = info.xm;
253:   rloc[2] = info.ys;
254:   rloc[3] = info.ym;
255:   rloc[4] = info.zs;
256:   rloc[5] = info.zm;
257:   MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);

259:   /* Write XML header */
260:   maxnnodes = 0;                /* Used for the temporary array size on rank 0 */
261:   maxbs     = 0;                /* Used for the temporary array size on rank 0 */
262:   boffset   = 0;                /* Offset into binary file */
263:   for (r=0; r<size; r++) {
264:     PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
265:     if (!rank) {
266:       xs     = grloc[r][0];
267:       xm     = grloc[r][1];
268:       ys     = grloc[r][2];
269:       ym     = grloc[r][3];
270:       zs     = grloc[r][4];
271:       zm     = grloc[r][5];
272:       nnodes = xm*ym*zm;
273:     }
274:     maxnnodes = PetscMax(maxnnodes,nnodes);
275:     PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);
276:     PetscFPrintf(comm,fp,"      <Coordinates>\n");
277:     PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Xcoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);
278:     boffset += xm*sizeof(PetscScalar) + sizeof(int);
279:     PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Ycoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);
280:     boffset += ym*sizeof(PetscScalar) + sizeof(int);
281:     PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Zcoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);
282:     boffset += zm*sizeof(PetscScalar) + sizeof(int);
283:     PetscFPrintf(comm,fp,"      </Coordinates>\n");
284:     PetscFPrintf(comm,fp,"      <PointData Scalars=\"ScalarPointData\">\n");
285:     for (link=vtk->link; link; link=link->next) {
286:       Vec        X = (Vec)link->vec;
287:       PetscInt   bs;
288:       DM         daCurr;
289:       const char *vecname = "Unnamed Vec data";

291:       VecGetDM(X,&daCurr);
292:       DMDAGetInfo(daCurr,0,0,0,0,0,0,0,&bs,0,0,0,0,0);
293:       maxbs = PetscMax(maxbs,bs);
294:       if (((PetscObject)X)->name || link != vtk->link) {
295:         PetscObjectGetName((PetscObject)X,&vecname);
296:       }

298:      PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%D\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,bs,boffset);
299:      boffset += bs*nnodes*sizeof(PetscScalar) + sizeof(int);
300:     }
301:     PetscFPrintf(comm,fp,"      </PointData>\n");
302:     PetscFPrintf(comm,fp,"    </Piece>\n");
303:   }
304:   PetscFPrintf(comm,fp,"  </RectilinearGrid>\n");
305:   PetscFPrintf(comm,fp,"  <AppendedData encoding=\"raw\">\n");
306:   PetscFPrintf(comm,fp,"_");

308:   /* Now write the arrays. */
309:   tag  = ((PetscObject)viewer)->tag;
310:   PetscMalloc2(PetscMax(maxnnodes*maxbs,info.xm+info.ym+info.zm),&array,info.xm+info.ym+info.zm,&array2);
311:   for (r=0; r<size; r++) {
312:     MPI_Status status;
313:     PetscInt   xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
314:     if (!rank) {
315:       xs     = grloc[r][0];
316:       xm     = grloc[r][1];
317:       ys     = grloc[r][2];
318:       ym     = grloc[r][3];
319:       zs     = grloc[r][4];
320:       zm     = grloc[r][5];
321:       nnodes = xm*ym*zm;
322:     } else if (r == rank) {
323:       nnodes = info.xm*info.ym*info.zm;
324:     }
325:     {                           /* Write the coordinates */
326:       Vec Coords;

328:       DMGetCoordinates(da,&Coords);
329:       if (Coords) {
330:         const PetscScalar *coords;
331:         VecGetArrayRead(Coords,&coords);
332:         if (!rank) {
333:           if (r) {
334:             PetscMPIInt nn;
335:             MPI_Recv(array,xm+ym+zm,MPIU_SCALAR,r,tag,comm,&status);
336:             MPI_Get_count(&status,MPIU_SCALAR,&nn);
337:             if (nn != xm+ym+zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch");
338:           } else {
339:             /* x coordinates */
340:             for (j=0, k=0, i=0; i<xm; i++) {
341:               PetscInt Iloc = i+xm*(j+ym*k);
342:               array[i] = coords[Iloc*dim + 0];
343:             }
344:             /* y coordinates */
345:             for (i=0, k=0, j=0; j<ym; j++) {
346:               PetscInt Iloc = i+xm*(j+ym*k);
347:               array[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
348:             }
349:             /* z coordinates */
350:             for (i=0, j=0, k=0; k<zm; k++) {
351:               PetscInt Iloc = i+xm*(j+ym*k);
352:               array[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
353:             }
354:           }
355:         } else if (r == rank) {
356:           xm = info.xm;
357:           ym = info.ym;
358:           zm = info.zm;
359:           for (j=0, k=0, i=0; i<xm; i++) {
360:             PetscInt Iloc = i+xm*(j+ym*k);
361:             array2[i] = coords[Iloc*dim + 0];
362:           }
363:           for (i=0, k=0, j=0; j<ym; j++) {
364:             PetscInt Iloc = i+xm*(j+ym*k);
365:             array2[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
366:           }
367:           for (i=0, j=0, k=0; k<zm; k++) {
368:             PetscInt Iloc = i+xm*(j+ym*k);
369:             array2[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
370:           }
371:           MPI_Send((void*)array2,xm+ym+zm,MPIU_SCALAR,0,tag,comm);
372:         }
373:         VecRestoreArrayRead(Coords,&coords);
374:       } else {       /* Fabricate some coordinates using grid index */
375:         for (i=0; i<xm; i++) {
376:           array[i] = xs+i;
377:         }
378:         for (j=0; j<ym; j++) {
379:           array[j+xm] = ys+j;
380:         }
381:         for (k=0; k<zm; k++) {
382:           array[k+xm+ym] = zs+k;
383:         }
384:       }
385:       if (!rank) {
386:         PetscViewerVTKFWrite(viewer,fp,&(array[0]),xm,MPIU_SCALAR);
387:         PetscViewerVTKFWrite(viewer,fp,&(array[xm]),ym,MPIU_SCALAR);
388:         PetscViewerVTKFWrite(viewer,fp,&(array[xm+ym]),zm,MPIU_SCALAR);
389:       }
390:     }

392:     /* Write each of the objects queued up for this file */
393:     for (link=vtk->link; link; link=link->next) {
394:       Vec               X = (Vec)link->vec;
395:       const PetscScalar *x;
396:       PetscInt          bs;
397:       DM                daCurr;
398:       VecGetDM(X,&daCurr);
399:       DMDAGetInfo(daCurr,0,0,0,0,0,0,0,&bs,0,0,0,0,0);

401:       VecGetArrayRead(X,&x);
402:       if (!rank) {
403:         if (r) {
404:           PetscMPIInt nn;
405:           MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);
406:           MPI_Get_count(&status,MPIU_SCALAR,&nn);
407:           if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r);
408:         } else {
409:           PetscMemcpy(array,x,nnodes*bs*sizeof(PetscScalar));
410:         }
411:         PetscViewerVTKFWrite(viewer,fp,array,nnodes*bs,MPIU_SCALAR);

413:       } else if (r == rank) {
414:         MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);
415:       }
416:       VecRestoreArrayRead(X,&x);
417:     }
418:   }
419:   PetscFree2(array,array2);
420:   PetscFree(grloc);

422:   PetscFPrintf(comm,fp,"\n </AppendedData>\n");
423:   PetscFPrintf(comm,fp,"</VTKFile>\n");
424:   PetscFClose(comm,fp);
425:   return(0);
426: }

428: /*@C
429:    DMDAVTKWriteAll - Write a file containing all the fields that have been provided to the viewer

431:    Collective

433:    Input Arguments:
434:    odm - DM specifying the grid layout, passed as a PetscObject
435:    viewer - viewer of type VTK

437:    Level: developer

439:    Note:
440:    This function is a callback used by the VTK viewer to actually write the file.
441:    The reason for this odd model is that the VTK file format does not provide any way to write one field at a time.
442:    Instead, metadata for the entire file needs to be available up-front before you can start writing the file.

444: .seealso: PETSCVIEWERVTK
445: @*/
446: PetscErrorCode DMDAVTKWriteAll(PetscObject odm,PetscViewer viewer)
447: {
448:   DM             dm = (DM)odm;
449:   PetscBool      isvtk;

455:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk);
456:   if (!isvtk) SETERRQ1(PetscObjectComm((PetscObject)viewer),PETSC_ERR_ARG_INCOMP,"Cannot use viewer type %s",((PetscObject)viewer)->type_name);
457:   switch (viewer->format) {
458:   case PETSC_VIEWER_VTK_VTS:
459:     DMDAVTKWriteAll_VTS(dm,viewer);
460:     break;
461:   case PETSC_VIEWER_VTK_VTR:
462:     DMDAVTKWriteAll_VTR(dm,viewer);
463:     break;
464:   default: SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for format '%s'",PetscViewerFormats[viewer->format]);
465:   }
466:   return(0);
467: }