Actual source code: pdvec.c

petsc-3.10.5 2019-03-28
Report Typos and Errors

  2: /*
  3:      Code for some of the parallel vector primatives.
  4: */
  5:  #include <../src/vec/vec/impls/mpi/pvecimpl.h>
  6:  #include <petsc/private/glvisviewerimpl.h>
  7:  #include <petsc/private/glvisvecimpl.h>
  8: #include <petscviewerhdf5.h>

 10: PetscErrorCode VecDestroy_MPI(Vec v)
 11: {
 12:   Vec_MPI        *x = (Vec_MPI*)v->data;

 16: #if defined(PETSC_USE_LOG)
 17:   PetscLogObjectState((PetscObject)v,"Length=%D",v->map->N);
 18: #endif
 19:   if (!x) return(0);
 20:   PetscFree(x->array_allocated);

 22:   /* Destroy local representation of vector if it exists */
 23:   if (x->localrep) {
 24:     VecDestroy(&x->localrep);
 25:     VecScatterDestroy(&x->localupdate);
 26:   }
 27:   VecAssemblyReset_MPI(v);

 29:   /* Destroy the stashes: note the order - so that the tags are freed properly */
 30:   VecStashDestroy_Private(&v->bstash);
 31:   VecStashDestroy_Private(&v->stash);
 32:   PetscFree(v->data);
 33:   return(0);
 34: }

 36: PetscErrorCode VecView_MPI_ASCII(Vec xin,PetscViewer viewer)
 37: {
 38:   PetscErrorCode    ierr;
 39:   PetscInt          i,work = xin->map->n,cnt,len,nLen;
 40:   PetscMPIInt       j,n = 0,size,rank,tag = ((PetscObject)viewer)->tag;
 41:   MPI_Status        status;
 42:   PetscScalar       *values;
 43:   const PetscScalar *xarray;
 44:   const char        *name;
 45:   PetscViewerFormat format;

 48:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
 49:   PetscViewerGetFormat(viewer,&format);
 50:   if (format == PETSC_VIEWER_LOAD_BALANCE) {
 51:     PetscInt nmax = 0,nmin = xin->map->n,navg;
 52:     for (i=0; i<(PetscInt)size; i++) {
 53:       nmax = PetscMax(nmax,xin->map->range[i+1] - xin->map->range[i]);
 54:       nmin = PetscMin(nmin,xin->map->range[i+1] - xin->map->range[i]);
 55:     }
 56:     navg = xin->map->N/size;
 57:     PetscViewerASCIIPrintf(viewer,"  Load Balance - Local vector size Min %D  avg %D  max %D\n",nmin,navg,nmax);
 58:     return(0);
 59:   }

 61:   VecGetArrayRead(xin,&xarray);
 62:   /* determine maximum message to arrive */
 63:   MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
 64:   MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,PetscObjectComm((PetscObject)xin));
 65:   if (format == PETSC_VIEWER_ASCII_GLVIS) { rank = 0, len = 0; } /* no parallel distributed write support from GLVis */
 66:   if (!rank) {
 67:     PetscMalloc1(len,&values);
 68:     /*
 69:         MATLAB format and ASCII format are very similar except
 70:         MATLAB uses %18.16e format while ASCII uses %g
 71:     */
 72:     if (format == PETSC_VIEWER_ASCII_MATLAB) {
 73:       PetscObjectGetName((PetscObject)xin,&name);
 74:       PetscViewerASCIIPrintf(viewer,"%s = [\n",name);
 75:       for (i=0; i<xin->map->n; i++) {
 76: #if defined(PETSC_USE_COMPLEX)
 77:         if (PetscImaginaryPart(xarray[i]) > 0.0) {
 78:           PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
 79:         } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
 80:           PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",(double)PetscRealPart(xarray[i]),-(double)PetscImaginaryPart(xarray[i]));
 81:         } else {
 82:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)PetscRealPart(xarray[i]));
 83:         }
 84: #else
 85:         PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)xarray[i]);
 86: #endif
 87:       }
 88:       /* receive and print messages */
 89:       for (j=1; j<size; j++) {
 90:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
 91:         MPI_Get_count(&status,MPIU_SCALAR,&n);
 92:         for (i=0; i<n; i++) {
 93: #if defined(PETSC_USE_COMPLEX)
 94:           if (PetscImaginaryPart(values[i]) > 0.0) {
 95:             PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
 96:           } else if (PetscImaginaryPart(values[i]) < 0.0) {
 97:             PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",(double)PetscRealPart(values[i]),-(double)PetscImaginaryPart(values[i]));
 98:           } else {
 99:             PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)PetscRealPart(values[i]));
100:           }
101: #else
102:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)values[i]);
103: #endif
104:         }
105:       }
106:       PetscViewerASCIIPrintf(viewer,"];\n");

108:     } else if (format == PETSC_VIEWER_ASCII_SYMMODU) {
109:       for (i=0; i<xin->map->n; i++) {
110: #if defined(PETSC_USE_COMPLEX)
111:         PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
112: #else
113:         PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)xarray[i]);
114: #endif
115:       }
116:       /* receive and print messages */
117:       for (j=1; j<size; j++) {
118:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
119:         MPI_Get_count(&status,MPIU_SCALAR,&n);
120:         for (i=0; i<n; i++) {
121: #if defined(PETSC_USE_COMPLEX)
122:           PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
123: #else
124:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)values[i]);
125: #endif
126:         }
127:       }
128:     } else if (format == PETSC_VIEWER_ASCII_VTK || format == PETSC_VIEWER_ASCII_VTK_CELL) {
129:       /*
130:         state 0: No header has been output
131:         state 1: Only POINT_DATA has been output
132:         state 2: Only CELL_DATA has been output
133:         state 3: Output both, POINT_DATA last
134:         state 4: Output both, CELL_DATA last
135:       */
136:       static PetscInt stateId     = -1;
137:       int             outputState = 0;
138:       int             doOutput    = 0;
139:       PetscBool       hasState;
140:       PetscInt        bs, b;

142:       if (stateId < 0) {
143:         PetscObjectComposedDataRegister(&stateId);
144:       }
145:       PetscObjectComposedDataGetInt((PetscObject) viewer, stateId, outputState, hasState);
146:       if (!hasState) outputState = 0;

148:       PetscObjectGetName((PetscObject)xin,&name);
149:       VecGetLocalSize(xin, &nLen);
150:       PetscMPIIntCast(nLen,&n);
151:       VecGetBlockSize(xin, &bs);
152:       if (format == PETSC_VIEWER_ASCII_VTK) {
153:         if (outputState == 0) {
154:           outputState = 1;
155:           doOutput    = 1;
156:         } else if (outputState == 1) doOutput = 0;
157:         else if (outputState == 2) {
158:           outputState = 3;
159:           doOutput    = 1;
160:         } else if (outputState == 3) doOutput = 0;
161:         else if (outputState == 4) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Tried to output POINT_DATA again after intervening CELL_DATA");

163:         if (doOutput) {
164:           PetscViewerASCIIPrintf(viewer, "POINT_DATA %d\n", xin->map->N/bs);
165:         }
166:       } else {
167:         if (outputState == 0) {
168:           outputState = 2;
169:           doOutput    = 1;
170:         } else if (outputState == 1) {
171:           outputState = 4;
172:           doOutput    = 1;
173:         } else if (outputState == 2) doOutput = 0;
174:         else if (outputState == 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Tried to output CELL_DATA again after intervening POINT_DATA");
175:         else if (outputState == 4) doOutput = 0;

177:         if (doOutput) {
178:           PetscViewerASCIIPrintf(viewer, "CELL_DATA %d\n", xin->map->N/bs);
179:         }
180:       }
181:       PetscObjectComposedDataSetInt((PetscObject) viewer, stateId, outputState);
182:       if (name) {
183:         if (bs == 3) {
184:           PetscViewerASCIIPrintf(viewer, "VECTORS %s double\n", name);
185:         } else {
186:           PetscViewerASCIIPrintf(viewer, "SCALARS %s double %d\n", name, bs);
187:         }
188:       } else {
189:         PetscViewerASCIIPrintf(viewer, "SCALARS scalars double %d\n", bs);
190:       }
191:       if (bs != 3) {
192:         PetscViewerASCIIPrintf(viewer, "LOOKUP_TABLE default\n");
193:       }
194:       for (i=0; i<n/bs; i++) {
195:         for (b=0; b<bs; b++) {
196:           if (b > 0) {
197:             PetscViewerASCIIPrintf(viewer," ");
198:           }
199:           PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(xarray[i*bs+b]));
200:         }
201:         PetscViewerASCIIPrintf(viewer,"\n");
202:       }
203:       for (j=1; j<size; j++) {
204:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
205:         MPI_Get_count(&status,MPIU_SCALAR,&n);
206:         for (i=0; i<n/bs; i++) {
207:           for (b=0; b<bs; b++) {
208:             if (b > 0) {
209:               PetscViewerASCIIPrintf(viewer," ");
210:             }
211:             PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(values[i*bs+b]));
212:           }
213:           PetscViewerASCIIPrintf(viewer,"\n");
214:         }
215:       }
216:     } else if (format == PETSC_VIEWER_ASCII_VTK_COORDS) {
217:       PetscInt bs, b;

219:       VecGetLocalSize(xin, &nLen);
220:       PetscMPIIntCast(nLen,&n);
221:       VecGetBlockSize(xin, &bs);
222:       if ((bs < 1) || (bs > 3)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "VTK can only handle 3D objects, but vector dimension is %d", bs);

224:       for (i=0; i<n/bs; i++) {
225:         for (b=0; b<bs; b++) {
226:           if (b > 0) {
227:             PetscViewerASCIIPrintf(viewer," ");
228:           }
229:           PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(xarray[i*bs+b]));
230:         }
231:         for (b=bs; b<3; b++) {
232:           PetscViewerASCIIPrintf(viewer," 0.0");
233:         }
234:         PetscViewerASCIIPrintf(viewer,"\n");
235:       }
236:       for (j=1; j<size; j++) {
237:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
238:         MPI_Get_count(&status,MPIU_SCALAR,&n);
239:         for (i=0; i<n/bs; i++) {
240:           for (b=0; b<bs; b++) {
241:             if (b > 0) {
242:               PetscViewerASCIIPrintf(viewer," ");
243:             }
244:             PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(values[i*bs+b]));
245:           }
246:           for (b=bs; b<3; b++) {
247:             PetscViewerASCIIPrintf(viewer," 0.0");
248:           }
249:           PetscViewerASCIIPrintf(viewer,"\n");
250:         }
251:       }
252:     } else if (format == PETSC_VIEWER_ASCII_PCICE) {
253:       PetscInt bs, b, vertexCount = 1;

255:       VecGetLocalSize(xin, &nLen);
256:       PetscMPIIntCast(nLen,&n);
257:       VecGetBlockSize(xin, &bs);
258:       if ((bs < 1) || (bs > 3)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "PCICE can only handle up to 3D objects, but vector dimension is %d", bs);

260:       PetscViewerASCIIPrintf(viewer,"%D\n", xin->map->N/bs);
261:       for (i=0; i<n/bs; i++) {
262:         PetscViewerASCIIPrintf(viewer,"%7D   ", vertexCount++);
263:         for (b=0; b<bs; b++) {
264:           if (b > 0) {
265:             PetscViewerASCIIPrintf(viewer," ");
266:           }
267: #if !defined(PETSC_USE_COMPLEX)
268:           PetscViewerASCIIPrintf(viewer,"% 12.5E",(double)xarray[i*bs+b]);
269: #endif
270:         }
271:         PetscViewerASCIIPrintf(viewer,"\n");
272:       }
273:       for (j=1; j<size; j++) {
274:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
275:         MPI_Get_count(&status,MPIU_SCALAR,&n);
276:         for (i=0; i<n/bs; i++) {
277:           PetscViewerASCIIPrintf(viewer,"%7D   ", vertexCount++);
278:           for (b=0; b<bs; b++) {
279:             if (b > 0) {
280:               PetscViewerASCIIPrintf(viewer," ");
281:             }
282: #if !defined(PETSC_USE_COMPLEX)
283:             PetscViewerASCIIPrintf(viewer,"% 12.5E",(double)values[i*bs+b]);
284: #endif
285:           }
286:           PetscViewerASCIIPrintf(viewer,"\n");
287:         }
288:       }
289:     } else if (format == PETSC_VIEWER_ASCII_GLVIS) {
290:       /* GLVis ASCII visualization/dump: this function mimicks mfem::GridFunction::Save() */
291:       const PetscScalar       *array;
292:       PetscInt                i,n,vdim, ordering = 1; /* mfem::FiniteElementSpace::Ordering::byVDIM */
293:       PetscContainer          glvis_container;
294:       PetscViewerGLVisVecInfo glvis_vec_info;
295:       PetscViewerGLVisInfo    glvis_info;
296:       PetscErrorCode          ierr;

298:       /* mfem::FiniteElementSpace::Save() */
299:       VecGetBlockSize(xin,&vdim);
300:       PetscViewerASCIIPrintf(viewer,"FiniteElementSpace\n");
301:       PetscObjectQuery((PetscObject)xin,"_glvis_info_container",(PetscObject*)&glvis_container);
302:       if (!glvis_container) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_PLIB,"Missing GLVis container");
303:       PetscContainerGetPointer(glvis_container,(void**)&glvis_vec_info);
304:       PetscViewerASCIIPrintf(viewer,"%s\n",glvis_vec_info->fec_type);
305:       PetscViewerASCIIPrintf(viewer,"VDim: %d\n",vdim);
306:       PetscViewerASCIIPrintf(viewer,"Ordering: %d\n",ordering);
307:       PetscViewerASCIIPrintf(viewer,"\n");
308:       /* mfem::Vector::Print() */
309:       PetscObjectQuery((PetscObject)viewer,"_glvis_info_container",(PetscObject*)&glvis_container);
310:       if (!glvis_container) SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_PLIB,"Missing GLVis container");
311:       PetscContainerGetPointer(glvis_container,(void**)&glvis_info);
312:       if (glvis_info->enabled) {
313:         VecGetLocalSize(xin,&n);
314:         VecGetArrayRead(xin,&array);
315:         for (i=0;i<n;i++) {
316:           PetscViewerASCIIPrintf(viewer,glvis_info->fmt,(double)PetscRealPart(array[i]));
317:           PetscViewerASCIIPrintf(viewer,"\n");
318:         }
319:         VecRestoreArrayRead(xin,&array);
320:       }
321:     } else if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
322:       /* No info */
323:     } else {
324:       if (format != PETSC_VIEWER_ASCII_COMMON) {PetscViewerASCIIPrintf(viewer,"Process [%d]\n",rank);}
325:       cnt = 0;
326:       for (i=0; i<xin->map->n; i++) {
327:         if (format == PETSC_VIEWER_ASCII_INDEX) {
328:           PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
329:         }
330: #if defined(PETSC_USE_COMPLEX)
331:         if (PetscImaginaryPart(xarray[i]) > 0.0) {
332:           PetscViewerASCIIPrintf(viewer,"%g + %g i\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
333:         } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
334:           PetscViewerASCIIPrintf(viewer,"%g - %g i\n",(double)PetscRealPart(xarray[i]),-(double)PetscImaginaryPart(xarray[i]));
335:         } else {
336:           PetscViewerASCIIPrintf(viewer,"%g\n",(double)PetscRealPart(xarray[i]));
337:         }
338: #else
339:         PetscViewerASCIIPrintf(viewer,"%g\n",(double)xarray[i]);
340: #endif
341:       }
342:       /* receive and print messages */
343:       for (j=1; j<size; j++) {
344:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
345:         MPI_Get_count(&status,MPIU_SCALAR,&n);
346:         if (format != PETSC_VIEWER_ASCII_COMMON) {
347:           PetscViewerASCIIPrintf(viewer,"Process [%d]\n",j);
348:         }
349:         for (i=0; i<n; i++) {
350:           if (format == PETSC_VIEWER_ASCII_INDEX) {
351:             PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
352:           }
353: #if defined(PETSC_USE_COMPLEX)
354:           if (PetscImaginaryPart(values[i]) > 0.0) {
355:             PetscViewerASCIIPrintf(viewer,"%g + %g i\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
356:           } else if (PetscImaginaryPart(values[i]) < 0.0) {
357:             PetscViewerASCIIPrintf(viewer,"%g - %g i\n",(double)PetscRealPart(values[i]),-(double)PetscImaginaryPart(values[i]));
358:           } else {
359:             PetscViewerASCIIPrintf(viewer,"%g\n",(double)PetscRealPart(values[i]));
360:           }
361: #else
362:           PetscViewerASCIIPrintf(viewer,"%g\n",(double)values[i]);
363: #endif
364:         }
365:       }
366:     }
367:     PetscFree(values);
368:   } else {
369:     if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
370:       /* Rank 0 is not trying to receive anything, so don't send anything */
371:     } else {
372:       if (format == PETSC_VIEWER_ASCII_MATLAB || format == PETSC_VIEWER_ASCII_VTK || format == PETSC_VIEWER_ASCII_VTK_CELL) {
373:         /* this may be a collective operation so make sure everyone calls it */
374:         PetscObjectGetName((PetscObject)xin,&name);
375:       }
376:       MPI_Send((void*)xarray,xin->map->n,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)xin));
377:     }
378:   }
379:   PetscViewerFlush(viewer);
380:   VecRestoreArrayRead(xin,&xarray);
381:   return(0);
382: }

384: PetscErrorCode VecView_MPI_Binary(Vec xin,PetscViewer viewer)
385: {
386:   PetscErrorCode    ierr;
387:   PetscMPIInt       rank,size,mesgsize,tag = ((PetscObject)viewer)->tag, mesglen;
388:   PetscInt          len,n,j,tr[2];
389:   int               fdes;
390:   MPI_Status        status;
391:   PetscScalar       *values;
392:   const PetscScalar *xarray;
393:   FILE              *file;
394: #if defined(PETSC_HAVE_MPIIO)
395:   PetscBool         isMPIIO;
396: #endif
397:   PetscBool         skipHeader;
398:   PetscInt          message_count,flowcontrolcount;
399:   PetscViewerFormat format;

402:   VecGetArrayRead(xin,&xarray);
403:   PetscViewerBinaryGetDescriptor(viewer,&fdes);
404:   PetscViewerBinaryGetSkipHeader(viewer,&skipHeader);

406:   /* determine maximum message to arrive */
407:   MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
408:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);

410:   if (!skipHeader) {
411:     tr[0] = VEC_FILE_CLASSID;
412:     tr[1] = xin->map->N;
413:     PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT,PETSC_FALSE);
414:   }

416: #if defined(PETSC_HAVE_MPIIO)
417:   PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO);
418:   if (!isMPIIO) {
419: #endif
420:     PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);
421:     if (!rank) {
422:       PetscBinaryWrite(fdes,(void*)xarray,xin->map->n,PETSC_SCALAR,PETSC_FALSE);

424:       len = 0;
425:       for (j=1; j<size; j++) len = PetscMax(len,xin->map->range[j+1]-xin->map->range[j]);
426:       PetscMalloc1(len,&values);
427:       PetscMPIIntCast(len,&mesgsize);
428:       /* receive and save messages */
429:       for (j=1; j<size; j++) {
430:         PetscViewerFlowControlStepMaster(viewer,j,&message_count,flowcontrolcount);
431:         MPI_Recv(values,mesgsize,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
432:         MPI_Get_count(&status,MPIU_SCALAR,&mesglen);
433:         n    = (PetscInt)mesglen;
434:         PetscBinaryWrite(fdes,values,n,PETSC_SCALAR,PETSC_TRUE);
435:       }
436:       PetscViewerFlowControlEndMaster(viewer,&message_count);
437:       PetscFree(values);
438:     } else {
439:       PetscViewerFlowControlStepWorker(viewer,rank,&message_count);
440:       PetscMPIIntCast(xin->map->n,&mesgsize);
441:       MPI_Send((void*)xarray,mesgsize,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)xin));
442:       PetscViewerFlowControlEndWorker(viewer,&message_count);
443:     }
444:     PetscViewerGetFormat(viewer,&format);
445:     if (format == PETSC_VIEWER_BINARY_MATLAB) {
446:       MPI_Comm   comm;
447:       FILE       *info;
448:       const char *name;

450:       PetscObjectGetName((PetscObject)xin,&name);
451:       PetscObjectGetComm((PetscObject)viewer,&comm);
452:       PetscViewerBinaryGetInfoPointer(viewer,&info);
453:       PetscFPrintf(comm,info,"#--- begin code written by PetscViewerBinary for MATLAB format ---#\n");
454:       PetscFPrintf(comm,info,"#$$ Set.%s = PetscBinaryRead(fd);\n",name);
455:       PetscFPrintf(comm,info,"#--- end code written by PetscViewerBinary for MATLAB format ---#\n\n");
456:     }
457: #if defined(PETSC_HAVE_MPIIO)
458:   } else {
459:     MPI_Offset   off;
460:     MPI_File     mfdes;
461:     PetscMPIInt  lsize;

463:     PetscMPIIntCast(xin->map->n,&lsize);
464:     PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
465:     PetscViewerBinaryGetMPIIOOffset(viewer,&off);
466:     off += xin->map->rstart*sizeof(PetscScalar); /* off is MPI_Offset, not PetscMPIInt */
467:     MPI_File_set_view(mfdes,off,MPIU_SCALAR,MPIU_SCALAR,(char*)"native",MPI_INFO_NULL);
468:     MPIU_File_write_all(mfdes,(void*)xarray,lsize,MPIU_SCALAR,MPI_STATUS_IGNORE);
469:     PetscViewerBinaryAddMPIIOOffset(viewer,xin->map->N*sizeof(PetscScalar));
470:   }
471: #endif

473:   VecRestoreArrayRead(xin,&xarray);
474:   if (!rank) {
475:     PetscViewerBinaryGetInfoPointer(viewer,&file);
476:     if (file) {
477:       if (((PetscObject)xin)->prefix) {
478:         PetscFPrintf(PETSC_COMM_SELF,file,"-%svecload_block_size %D\n",((PetscObject)xin)->prefix,PetscAbs(xin->map->bs));
479:       } else {
480:         PetscFPrintf(PETSC_COMM_SELF,file,"-vecload_block_size %D\n",PetscAbs(xin->map->bs));
481:       }
482:     }
483:   }
484:   return(0);
485: }

487:  #include <petscdraw.h>
488: PetscErrorCode VecView_MPI_Draw_LG(Vec xin,PetscViewer viewer)
489: {
490:   PetscDraw         draw;
491:   PetscBool         isnull;
492:   PetscDrawLG       lg;
493:   PetscMPIInt       i,size,rank,n,N,*lens = NULL,*disp = NULL;
494:   PetscReal         *values, *xx = NULL,*yy = NULL;
495:   const PetscScalar *xarray;
496:   int               colors[] = {PETSC_DRAW_RED};
497:   PetscErrorCode    ierr;

500:   PetscViewerDrawGetDraw(viewer,0,&draw);
501:   PetscDrawIsNull(draw,&isnull);
502:   if (isnull) return(0);
503:   MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
504:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
505:   PetscMPIIntCast(xin->map->n,&n);
506:   PetscMPIIntCast(xin->map->N,&N);

508:   VecGetArrayRead(xin,&xarray);
509: #if defined(PETSC_USE_COMPLEX)
510:   PetscMalloc1(n+1,&values);
511:   for (i=0; i<n; i++) values[i] = PetscRealPart(xarray[i]);
512: #else
513:   values = (PetscReal*)xarray;
514: #endif
515:   if (!rank) {
516:     PetscMalloc2(N,&xx,N,&yy);
517:     for (i=0; i<N; i++) xx[i] = (PetscReal)i;
518:     PetscMalloc2(size,&lens,size,&disp);
519:     for (i=0; i<size; i++) lens[i] = (PetscMPIInt)xin->map->range[i+1] - (PetscMPIInt)xin->map->range[i];
520:     for (i=0; i<size; i++) disp[i] = (PetscMPIInt)xin->map->range[i];
521:   }
522:   MPI_Gatherv(values,n,MPIU_REAL,yy,lens,disp,MPIU_REAL,0,PetscObjectComm((PetscObject)xin));
523:   PetscFree2(lens,disp);
524: #if defined(PETSC_USE_COMPLEX)
525:   PetscFree(values);
526: #endif
527:   VecRestoreArrayRead(xin,&xarray);

529:   PetscViewerDrawGetDrawLG(viewer,0,&lg);
530:   PetscDrawLGReset(lg);
531:   PetscDrawLGSetDimension(lg,1);
532:   PetscDrawLGSetColors(lg,colors);
533:   if (!rank) {
534:     PetscDrawLGAddPoints(lg,N,&xx,&yy);
535:     PetscFree2(xx,yy);
536:   }
537:   PetscDrawLGDraw(lg);
538:   PetscDrawLGSave(lg);
539:   return(0);
540: }

542: PetscErrorCode  VecView_MPI_Draw(Vec xin,PetscViewer viewer)
543: {
544:   PetscErrorCode    ierr;
545:   PetscMPIInt       rank,size,tag = ((PetscObject)viewer)->tag;
546:   PetscInt          i,start,end;
547:   MPI_Status        status;
548:   PetscReal         min,max,tmp = 0.0;
549:   PetscDraw         draw;
550:   PetscBool         isnull;
551:   PetscDrawAxis     axis;
552:   const PetscScalar *xarray;

555:   PetscViewerDrawGetDraw(viewer,0,&draw);
556:   PetscDrawIsNull(draw,&isnull);
557:   if (isnull) return(0);
558:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
559:   MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);

561:   VecMin(xin,NULL,&min);
562:   VecMax(xin,NULL,&max);
563:   if (min == max) {
564:     min -= 1.e-5;
565:     max += 1.e-5;
566:   }

568:   PetscDrawCheckResizedWindow(draw);
569:   PetscDrawClear(draw);

571:   PetscDrawAxisCreate(draw,&axis);
572:   PetscDrawAxisSetLimits(axis,0.0,(PetscReal)xin->map->N,min,max);
573:   PetscDrawAxisDraw(axis);
574:   PetscDrawAxisDestroy(&axis);

576:   /* draw local part of vector */
577:   VecGetArrayRead(xin,&xarray);
578:   VecGetOwnershipRange(xin,&start,&end);
579:   if (rank < size-1) { /* send value to right */
580:     MPI_Send((void*)&xarray[xin->map->n-1],1,MPIU_REAL,rank+1,tag,PetscObjectComm((PetscObject)xin));
581:   }
582:   if (rank) { /* receive value from right */
583:     MPI_Recv(&tmp,1,MPIU_REAL,rank-1,tag,PetscObjectComm((PetscObject)xin),&status);
584:   }
585:   PetscDrawCollectiveBegin(draw);
586:   if (rank) {
587:     PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,PetscRealPart(xarray[0]),PETSC_DRAW_RED);
588:   }
589:   for (i=1; i<xin->map->n; i++) {
590:     PetscDrawLine(draw,(PetscReal)(i-1+start),PetscRealPart(xarray[i-1]),(PetscReal)(i+start),PetscRealPart(xarray[i]),PETSC_DRAW_RED);
591:   }
592:   PetscDrawCollectiveEnd(draw);
593:   VecRestoreArrayRead(xin,&xarray);

595:   PetscDrawFlush(draw);
596:   PetscDrawPause(draw);
597:   PetscDrawSave(draw);
598:   return(0);
599: }

601: #if defined(PETSC_HAVE_MATLAB_ENGINE)
602: PetscErrorCode VecView_MPI_Matlab(Vec xin,PetscViewer viewer)
603: {
604:   PetscErrorCode    ierr;
605:   PetscMPIInt       rank,size,*lens;
606:   PetscInt          i,N = xin->map->N;
607:   const PetscScalar *xarray;
608:   PetscScalar       *xx;

611:   VecGetArrayRead(xin,&xarray);
612:   MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
613:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
614:   if (!rank) {
615:     PetscMalloc1(N,&xx);
616:     PetscMalloc1(size,&lens);
617:     for (i=0; i<size; i++) lens[i] = xin->map->range[i+1] - xin->map->range[i];

619:     MPI_Gatherv((void*)xarray,xin->map->n,MPIU_SCALAR,xx,lens,xin->map->range,MPIU_SCALAR,0,PetscObjectComm((PetscObject)xin));
620:     PetscFree(lens);

622:     PetscObjectName((PetscObject)xin);
623:     PetscViewerMatlabPutArray(viewer,N,1,xx,((PetscObject)xin)->name);

625:     PetscFree(xx);
626:   } else {
627:     MPI_Gatherv((void*)xarray,xin->map->n,MPIU_SCALAR,0,0,0,MPIU_SCALAR,0,PetscObjectComm((PetscObject)xin));
628:   }
629:   VecRestoreArrayRead(xin,&xarray);
630:   return(0);
631: }
632: #endif

634: #if defined(PETSC_HAVE_ADIOS)
635: #include <adios.h>
636: #include <adios_read.h>
637:  #include <petsc/private/vieweradiosimpl.h>
638:  #include <petsc/private/viewerimpl.h>

640: PetscErrorCode VecView_MPI_ADIOS(Vec xin, PetscViewer viewer)
641: {
642:   PetscViewer_ADIOS *adios = (PetscViewer_ADIOS*)viewer->data;
643:   PetscErrorCode    ierr;
644:   const char        *vecname;
645:   int64_t           id;
646:   PetscInt          n,N,rstart;
647:   const PetscScalar *array;
648:   char              nglobalname[16],nlocalname[16],coffset[16];

651:   PetscObjectGetName((PetscObject) xin, &vecname);

653:   VecGetLocalSize(xin,&n);
654:   VecGetSize(xin,&N);
655:   VecGetOwnershipRange(xin,&rstart,NULL);

657:   sprintf(nlocalname,"%d",(int)n);
658:   sprintf(nglobalname,"%d",(int)N);
659:   sprintf(coffset,"%d",(int)rstart);
660:   id   = adios_define_var(Petsc_adios_group,vecname,"",adios_double,nlocalname,nglobalname,coffset);
661:   VecGetArrayRead(xin,&array);
662:   adios_write_byid(adios->adios_handle,id,array);
663:   VecRestoreArrayRead(xin,&array);

665:   return(0);
666: }
667: #endif

669: #if defined(PETSC_HAVE_ADIOS2)
670: #include <adios2_c.h>
671: #include <petsc/private/vieweradios2impl.h>
672:  #include <petsc/private/viewerimpl.h>

674: PetscErrorCode VecView_MPI_ADIOS2(Vec xin, PetscViewer viewer)
675: {
676:   PetscErrorCode     ierr;
677:   PetscViewer_ADIOS2 *adios2 = (PetscViewer_ADIOS2*)viewer->data;
678:   PetscInt           n,N,rstart;
679:   const char         *vecname;
680:   const PetscScalar  *array;

683:   PetscObjectGetName((PetscObject) xin, &vecname);
684:   VecGetLocalSize(xin,&n);
685:   VecGetSize(xin,&N);
686:   VecGetOwnershipRange(xin,&rstart,NULL);

688:   VecGetArrayRead(xin,&array);
689:   VecRestoreArrayRead(xin,&array);
690:   return(0);
691: }
692: #endif

694: #if defined(PETSC_HAVE_HDF5)
695: PetscErrorCode VecView_MPI_HDF5(Vec xin, PetscViewer viewer)
696: {
697:   /* TODO: It looks like we can remove the H5Sclose(filespace) and H5Dget_space(dset_id). Why do we do this? */
698:   hid_t             filespace; /* file dataspace identifier */
699:   hid_t             chunkspace; /* chunk dataset property identifier */
700:   hid_t             plist_id;  /* property list identifier */
701:   hid_t             dset_id;   /* dataset identifier */
702:   hid_t             memspace;  /* memory dataspace identifier */
703:   hid_t             file_id;
704:   hid_t             group;
705:   hid_t             memscalartype; /* scalar type for mem (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
706:   hid_t             filescalartype; /* scalar type for file (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
707:   PetscInt          bs = PetscAbs(xin->map->bs);
708:   hsize_t           dim;
709:   hsize_t           maxDims[4], dims[4], chunkDims[4], count[4],offset[4];
710:   PetscInt          timestep;
711:   PetscInt          low;
712:   PetscInt          chunksize;
713:   const PetscScalar *x;
714:   const char        *vecname;
715:   PetscErrorCode    ierr;
716:   PetscBool         dim2;
717:   PetscBool         spoutput;

720:   PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
721:   PetscViewerHDF5GetTimestep(viewer, &timestep);
722:   PetscViewerHDF5GetBaseDimension2(viewer,&dim2);
723:   PetscViewerHDF5GetSPOutput(viewer,&spoutput);

725:   /* Create the dataspace for the dataset.
726:    *
727:    * dims - holds the current dimensions of the dataset
728:    *
729:    * maxDims - holds the maximum dimensions of the dataset (unlimited
730:    * for the number of time steps with the current dimensions for the
731:    * other dimensions; so only additional time steps can be added).
732:    *
733:    * chunkDims - holds the size of a single time step (required to
734:    * permit extending dataset).
735:    */
736:   dim = 0;
737:   chunksize = 1;
738:   if (timestep >= 0) {
739:     dims[dim]      = timestep+1;
740:     maxDims[dim]   = H5S_UNLIMITED;
741:     chunkDims[dim] = 1;
742:     ++dim;
743:   }
744:   PetscHDF5IntCast(xin->map->N/bs,dims + dim);

746:   maxDims[dim]   = dims[dim];
747:   chunkDims[dim] = dims[dim];
748:   chunksize      *= chunkDims[dim];
749:   ++dim;
750:   if (bs > 1 || dim2) {
751:     dims[dim]      = bs;
752:     maxDims[dim]   = dims[dim];
753:     chunkDims[dim] = dims[dim];
754:     chunksize      *= chunkDims[dim];
755:     ++dim;
756:   }
757: #if defined(PETSC_USE_COMPLEX)
758:   dims[dim]      = 2;
759:   maxDims[dim]   = dims[dim];
760:   chunkDims[dim] = dims[dim];
761:   chunksize      *= chunkDims[dim];
762:   /* hdf5 chunks must be less than the max of 32 bit int */
763:   if (chunksize > PETSC_HDF5_INT_MAX/64 ) {
764:     if (bs > 1 || dim2) {
765:       if (chunkDims[dim-2] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/128))) {
766:         chunkDims[dim-2] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/128));
767:       } if (chunkDims[dim-1] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/128))) {
768:         chunkDims[dim-1] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/128));
769:       }
770:     } else {
771:       chunkDims[dim-1] = PETSC_HDF5_INT_MAX/128;
772:     }
773:   }
774:   ++dim;
775: #else 
776:   /* hdf5 chunks must be less than the max of 32 bit int */
777:   if (chunksize > PETSC_HDF5_INT_MAX/64) {
778:     if (bs > 1 || dim2) {
779:       if (chunkDims[dim-2] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/64))) {
780:         chunkDims[dim-2] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/64));
781:       } if (chunkDims[dim-1] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/64))) {
782:         chunkDims[dim-1] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/64));
783:       }
784:     } else {
785:       chunkDims[dim-1] = PETSC_HDF5_INT_MAX/64;
786:     }
787:   }
788: #endif


791:   PetscStackCallHDF5Return(filespace,H5Screate_simple,(dim, dims, maxDims));

793: #if defined(PETSC_USE_REAL_SINGLE)
794:   memscalartype = H5T_NATIVE_FLOAT;
795:   filescalartype = H5T_NATIVE_FLOAT;
796: #elif defined(PETSC_USE_REAL___FLOAT128)
797: #error "HDF5 output with 128 bit floats not supported."
798: #elif defined(PETSC_USE_REAL___FP16)
799: #error "HDF5 output with 16 bit floats not supported."
800: #else
801:   memscalartype = H5T_NATIVE_DOUBLE;
802:   if (spoutput == PETSC_TRUE) filescalartype = H5T_NATIVE_FLOAT;
803:   else filescalartype = H5T_NATIVE_DOUBLE;
804: #endif

806:   /* Create the dataset with default properties and close filespace */
807:   PetscObjectGetName((PetscObject) xin, &vecname);
808:   if (!H5Lexists(group, vecname, H5P_DEFAULT)) {
809:     /* Create chunk */
810:     PetscStackCallHDF5Return(chunkspace,H5Pcreate,(H5P_DATASET_CREATE));
811:     PetscStackCallHDF5(H5Pset_chunk,(chunkspace, dim, chunkDims));

813: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
814:     PetscStackCallHDF5Return(dset_id,H5Dcreate2,(group, vecname, filescalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT));
815: #else
816:     PetscStackCallHDF5Return(dset_id,H5Dcreate,(group, vecname, filescalartype, filespace, H5P_DEFAULT));
817: #endif
818:     PetscStackCallHDF5(H5Pclose,(chunkspace));
819:   } else {
820:     PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT));
821:     PetscStackCallHDF5(H5Dset_extent,(dset_id, dims));
822:   }
823:   PetscStackCallHDF5(H5Sclose,(filespace));

825:   /* Each process defines a dataset and writes it to the hyperslab in the file */
826:   dim = 0;
827:   if (timestep >= 0) {
828:     count[dim] = 1;
829:     ++dim;
830:   }
831:   PetscHDF5IntCast(xin->map->n/bs,count + dim);
832:   ++dim;
833:   if (bs > 1 || dim2) {
834:     count[dim] = bs;
835:     ++dim;
836:   }
837: #if defined(PETSC_USE_COMPLEX)
838:   count[dim] = 2;
839:   ++dim;
840: #endif
841:   if (xin->map->n > 0) {
842:     PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL));
843:   } else {
844:     /* Can't create dataspace with zero for any dimension, so create null dataspace. */
845:     PetscStackCallHDF5Return(memspace,H5Screate,(H5S_NULL));
846:   }

848:   /* Select hyperslab in the file */
849:   VecGetOwnershipRange(xin, &low, NULL);
850:   dim  = 0;
851:   if (timestep >= 0) {
852:     offset[dim] = timestep;
853:     ++dim;
854:   }
855:   PetscHDF5IntCast(low/bs,offset + dim);
856:   ++dim;
857:   if (bs > 1 || dim2) {
858:     offset[dim] = 0;
859:     ++dim;
860:   }
861: #if defined(PETSC_USE_COMPLEX)
862:   offset[dim] = 0;
863:   ++dim;
864: #endif
865:   if (xin->map->n > 0) {
866:     PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
867:     PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL));
868:   } else {
869:     /* Create null filespace to match null memspace. */
870:     PetscStackCallHDF5Return(filespace,H5Screate,(H5S_NULL));
871:   }

873:   /* Create property list for collective dataset write */
874:   PetscStackCallHDF5Return(plist_id,H5Pcreate,(H5P_DATASET_XFER));
875: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
876:   PetscStackCallHDF5(H5Pset_dxpl_mpio,(plist_id, H5FD_MPIO_COLLECTIVE));
877: #endif
878:   /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */

880:   VecGetArrayRead(xin, &x);
881:   PetscStackCallHDF5(H5Dwrite,(dset_id, memscalartype, memspace, filespace, plist_id, x));
882:   PetscStackCallHDF5(H5Fflush,(file_id, H5F_SCOPE_GLOBAL));
883:   VecRestoreArrayRead(xin, &x);

885: #if defined(PETSC_USE_COMPLEX)
886:   {
887:     PetscInt   one = 1;
888:     const char *groupname;
889:     char       vecgroup[PETSC_MAX_PATH_LEN];

891:     PetscViewerHDF5GetGroup(viewer,&groupname);
892:     PetscSNPrintf(vecgroup,PETSC_MAX_PATH_LEN,"%s/%s",groupname ? groupname : "",vecname);
893:     PetscViewerHDF5WriteAttribute(viewer,vecgroup,"complex",PETSC_INT,&one);
894:   }
895: #endif
896:   /* Close/release resources */
897:   if (group != file_id) {
898:     PetscStackCallHDF5(H5Gclose,(group));
899:   }
900:   PetscStackCallHDF5(H5Pclose,(plist_id));
901:   PetscStackCallHDF5(H5Sclose,(filespace));
902:   PetscStackCallHDF5(H5Sclose,(memspace));
903:   PetscStackCallHDF5(H5Dclose,(dset_id));
904:   PetscInfo1(xin,"Wrote Vec object with name %s\n",vecname);
905:   return(0);
906: }
907: #endif

909: PETSC_EXTERN PetscErrorCode VecView_MPI(Vec xin,PetscViewer viewer)
910: {
912:   PetscBool      iascii,isbinary,isdraw;
913: #if defined(PETSC_HAVE_MATHEMATICA)
914:   PetscBool      ismathematica;
915: #endif
916: #if defined(PETSC_HAVE_HDF5)
917:   PetscBool      ishdf5;
918: #endif
919: #if defined(PETSC_HAVE_MATLAB_ENGINE)
920:   PetscBool      ismatlab;
921: #endif
922: #if defined(PETSC_HAVE_ADIOS)
923:   PetscBool      isadios;
924: #endif
925: #if defined(PETSC_HAVE_ADIOS2)
926:   PetscBool      isadios2;
927: #endif
928:   PetscBool      isglvis;

931:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
932:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
933:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
934: #if defined(PETSC_HAVE_MATHEMATICA)
935:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATHEMATICA,&ismathematica);
936: #endif
937: #if defined(PETSC_HAVE_HDF5)
938:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
939: #endif
940: #if defined(PETSC_HAVE_MATLAB_ENGINE)
941:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);
942: #endif
943:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERGLVIS,&isglvis);
944: #if defined(PETSC_HAVE_ADIOS)
945:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERADIOS,&isadios);
946: #endif
947: #if defined(PETSC_HAVE_ADIOS2)
948:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERADIOS2,&isadios2);
949: #endif
950:   if (iascii) {
951:     VecView_MPI_ASCII(xin,viewer);
952:   } else if (isbinary) {
953:     VecView_MPI_Binary(xin,viewer);
954:   } else if (isdraw) {
955:     PetscViewerFormat format;
956:     PetscViewerGetFormat(viewer,&format);
957:     if (format == PETSC_VIEWER_DRAW_LG) {
958:       VecView_MPI_Draw_LG(xin,viewer);
959:     } else {
960:       VecView_MPI_Draw(xin,viewer);
961:     }
962: #if defined(PETSC_HAVE_MATHEMATICA)
963:   } else if (ismathematica) {
964:     PetscViewerMathematicaPutVector(viewer,xin);
965: #endif
966: #if defined(PETSC_HAVE_HDF5)
967:   } else if (ishdf5) {
968:     VecView_MPI_HDF5(xin,viewer);
969: #endif
970: #if defined(PETSC_HAVE_ADIOS)
971:   } else if (isadios) {
972:     VecView_MPI_ADIOS(xin,viewer);
973: #endif
974: #if defined(PETSC_HAVE_ADIOS2)
975:   } else if (isadios2) {
976:     VecView_MPI_ADIOS2(xin,viewer);
977: #endif
978: #if defined(PETSC_HAVE_MATLAB_ENGINE)
979:   } else if (ismatlab) {
980:     VecView_MPI_Matlab(xin,viewer);
981: #endif
982:   } else if (isglvis) {
983:     VecView_GLVis(xin,viewer);
984:   }
985:   return(0);
986: }

988: PetscErrorCode VecGetSize_MPI(Vec xin,PetscInt *N)
989: {
991:   *N = xin->map->N;
992:   return(0);
993: }

995: PetscErrorCode VecGetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],PetscScalar y[])
996: {
997:   const PetscScalar *xx;
998:   PetscInt          i,tmp,start = xin->map->range[xin->stash.rank];
999:   PetscErrorCode    ierr;

1002:   VecGetArrayRead(xin,&xx);
1003:   for (i=0; i<ni; i++) {
1004:     if (xin->stash.ignorenegidx && ix[i] < 0) continue;
1005:     tmp = ix[i] - start;
1006: #if defined(PETSC_USE_DEBUG)
1007:     if (tmp < 0 || tmp >= xin->map->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Can only get local values, trying %D",ix[i]);
1008: #endif
1009:     y[i] = xx[tmp];
1010:   }
1011:   VecRestoreArrayRead(xin,&xx);
1012:   return(0);
1013: }

1015: PetscErrorCode VecSetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar y[],InsertMode addv)
1016: {
1018:   PetscMPIInt    rank    = xin->stash.rank;
1019:   PetscInt       *owners = xin->map->range,start = owners[rank];
1020:   PetscInt       end     = owners[rank+1],i,row;
1021:   PetscScalar    *xx;

1024: #if defined(PETSC_USE_DEBUG)
1025:   if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
1026:   else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
1027: #endif
1028:   VecGetArray(xin,&xx);
1029:   xin->stash.insertmode = addv;

1031:   if (addv == INSERT_VALUES) {
1032:     for (i=0; i<ni; i++) {
1033:       if (xin->stash.ignorenegidx && ix[i] < 0) continue;
1034: #if defined(PETSC_USE_DEBUG)
1035:       if (ix[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
1036: #endif
1037:       if ((row = ix[i]) >= start && row < end) {
1038:         xx[row-start] = y[i];
1039:       } else if (!xin->stash.donotstash) {
1040: #if defined(PETSC_USE_DEBUG)
1041:         if (ix[i] >= xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
1042: #endif
1043:         VecStashValue_Private(&xin->stash,row,y[i]);
1044:       }
1045:     }
1046:   } else {
1047:     for (i=0; i<ni; i++) {
1048:       if (xin->stash.ignorenegidx && ix[i] < 0) continue;
1049: #if defined(PETSC_USE_DEBUG)
1050:       if (ix[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
1051: #endif
1052:       if ((row = ix[i]) >= start && row < end) {
1053:         xx[row-start] += y[i];
1054:       } else if (!xin->stash.donotstash) {
1055: #if defined(PETSC_USE_DEBUG)
1056:         if (ix[i] >= xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
1057: #endif
1058:         VecStashValue_Private(&xin->stash,row,y[i]);
1059:       }
1060:     }
1061:   }
1062:   VecRestoreArray(xin,&xx);
1063:   return(0);
1064: }

1066: PetscErrorCode VecSetValuesBlocked_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar yin[],InsertMode addv)
1067: {
1068:   PetscMPIInt    rank    = xin->stash.rank;
1069:   PetscInt       *owners = xin->map->range,start = owners[rank];
1071:   PetscInt       end = owners[rank+1],i,row,bs = PetscAbs(xin->map->bs),j;
1072:   PetscScalar    *xx,*y = (PetscScalar*)yin;

1075:   VecGetArray(xin,&xx);
1076: #if defined(PETSC_USE_DEBUG)
1077:   if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
1078:   else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
1079: #endif
1080:   xin->stash.insertmode = addv;

1082:   if (addv == INSERT_VALUES) {
1083:     for (i=0; i<ni; i++) {
1084:       if ((row = bs*ix[i]) >= start && row < end) {
1085:         for (j=0; j<bs; j++) xx[row-start+j] = y[j];
1086:       } else if (!xin->stash.donotstash) {
1087:         if (ix[i] < 0) { y += bs; continue; }
1088: #if defined(PETSC_USE_DEBUG)
1089:         if (ix[i] >= xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
1090: #endif
1091:         VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
1092:       }
1093:       y += bs;
1094:     }
1095:   } else {
1096:     for (i=0; i<ni; i++) {
1097:       if ((row = bs*ix[i]) >= start && row < end) {
1098:         for (j=0; j<bs; j++) xx[row-start+j] += y[j];
1099:       } else if (!xin->stash.donotstash) {
1100:         if (ix[i] < 0) { y += bs; continue; }
1101: #if defined(PETSC_USE_DEBUG)
1102:         if (ix[i] > xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
1103: #endif
1104:         VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
1105:       }
1106:       y += bs;
1107:     }
1108:   }
1109:   VecRestoreArray(xin,&xx);
1110:   return(0);
1111: }

1113: /*
1114:    Since nsends or nreceives may be zero we add 1 in certain mallocs
1115: to make sure we never malloc an empty one.
1116: */
1117: PetscErrorCode VecAssemblyBegin_MPI(Vec xin)
1118: {
1120:   PetscInt       *owners = xin->map->range,*bowners,i,bs,nstash,reallocs;
1121:   PetscMPIInt    size;
1122:   InsertMode     addv;
1123:   MPI_Comm       comm;

1126:   PetscObjectGetComm((PetscObject)xin,&comm);
1127:   if (xin->stash.donotstash) return(0);

1129:   MPIU_Allreduce((PetscEnum*)&xin->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);
1130:   if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
1131:   xin->stash.insertmode = addv; /* in case this processor had no cache */
1132:   xin->bstash.insertmode = addv; /* Block stash implicitly tracks InsertMode of scalar stash */

1134:   VecGetBlockSize(xin,&bs);
1135:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
1136:   if (!xin->bstash.bowners && xin->map->bs != -1) {
1137:     PetscMalloc1(size+1,&bowners);
1138:     for (i=0; i<size+1; i++) bowners[i] = owners[i]/bs;
1139:     xin->bstash.bowners = bowners;
1140:   } else bowners = xin->bstash.bowners;

1142:   VecStashScatterBegin_Private(&xin->stash,owners);
1143:   VecStashScatterBegin_Private(&xin->bstash,bowners);
1144:   VecStashGetInfo_Private(&xin->stash,&nstash,&reallocs);
1145:   PetscInfo2(xin,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
1146:   VecStashGetInfo_Private(&xin->bstash,&nstash,&reallocs);
1147:   PetscInfo2(xin,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
1148:   return(0);
1149: }

1151: PetscErrorCode VecAssemblyEnd_MPI(Vec vec)
1152: {
1154:   PetscInt       base,i,j,*row,flg,bs;
1155:   PetscMPIInt    n;
1156:   PetscScalar    *val,*vv,*array,*xarray;

1159:   if (!vec->stash.donotstash) {
1160:     VecGetArray(vec,&xarray);
1161:     VecGetBlockSize(vec,&bs);
1162:     base = vec->map->range[vec->stash.rank];

1164:     /* Process the stash */
1165:     while (1) {
1166:       VecStashScatterGetMesg_Private(&vec->stash,&n,&row,&val,&flg);
1167:       if (!flg) break;
1168:       if (vec->stash.insertmode == ADD_VALUES) {
1169:         for (i=0; i<n; i++) xarray[row[i] - base] += val[i];
1170:       } else if (vec->stash.insertmode == INSERT_VALUES) {
1171:         for (i=0; i<n; i++) xarray[row[i] - base] = val[i];
1172:       } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1173:     }
1174:     VecStashScatterEnd_Private(&vec->stash);

1176:     /* now process the block-stash */
1177:     while (1) {
1178:       VecStashScatterGetMesg_Private(&vec->bstash,&n,&row,&val,&flg);
1179:       if (!flg) break;
1180:       for (i=0; i<n; i++) {
1181:         array = xarray+row[i]*bs-base;
1182:         vv    = val+i*bs;
1183:         if (vec->stash.insertmode == ADD_VALUES) {
1184:           for (j=0; j<bs; j++) array[j] += vv[j];
1185:         } else if (vec->stash.insertmode == INSERT_VALUES) {
1186:           for (j=0; j<bs; j++) array[j] = vv[j];
1187:         } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1188:       }
1189:     }
1190:     VecStashScatterEnd_Private(&vec->bstash);
1191:     VecRestoreArray(vec,&xarray);
1192:   }
1193:   vec->stash.insertmode = NOT_SET_VALUES;
1194:   return(0);
1195: }