Actual source code: pbvec.c

petsc-3.3-p7 2013-05-11
  2: /*
  3:    This file contains routines for Parallel vector operations.
  4:  */
  5: #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/

  9: PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
 10: {
 11:   PetscScalar    sum,work;

 15:   VecDot_Seq(xin,yin,&work);
 16:   MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);
 17:   *z = sum;
 18:   return(0);
 19: }

 23: PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
 24: {
 25:   PetscScalar    sum,work;

 29:   VecTDot_Seq(xin,yin,&work);
 30:   MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);
 31:   *z   = sum;
 32:   return(0);
 33: }

 35: EXTERN_C_BEGIN
 36: extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
 37: EXTERN_C_END

 41: PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
 42: {
 44:   Vec_MPI        *v = (Vec_MPI *)vin->data;

 47:   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
 48:   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
 49:   v->array = (PetscScalar *)a;
 50:   if (v->localrep) {
 51:     VecPlaceArray(v->localrep,a);
 52:   }
 53:   return(0);
 54: }

 56: extern PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);

 60: static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
 61: {
 63:   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
 64:   PetscScalar    *array;

 67:   VecCreate(((PetscObject)win)->comm,v);
 68:   PetscLayoutReference(win->map,&(*v)->map);

 70:   VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);
 71:   vw   = (Vec_MPI *)(*v)->data;
 72:   PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));

 74:   /* save local representation of the parallel vector (and scatter) if it exists */
 75:   if (w->localrep) {
 76:     VecGetArray(*v,&array);
 77:     VecCreateSeqWithArray(PETSC_COMM_SELF,win->map->bs,win->map->n+w->nghost,array,&vw->localrep);
 78:     PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));
 79:     VecRestoreArray(*v,&array);
 80:     PetscLogObjectParent(*v,vw->localrep);
 81:     vw->localupdate = w->localupdate;
 82:     if (vw->localupdate) {
 83:       PetscObjectReference((PetscObject)vw->localupdate);
 84:     }
 85:   }

 87:   /* New vector should inherit stashing property of parent */
 88:   (*v)->stash.donotstash = win->stash.donotstash;
 89:   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
 90: 
 91:   PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);
 92:   PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);
 93:   (*v)->map->bs    = win->map->bs;
 94:   (*v)->bstash.bs = win->bstash.bs;

 96:   return(0);
 97: }

 99: extern PetscErrorCode VecSetOption_MPI(Vec,VecOption,PetscBool);
100: extern PetscErrorCode VecResetArray_MPI(Vec);

102: static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
103:             VecDuplicateVecs_Default,
104:             VecDestroyVecs_Default,
105:             VecDot_MPI,
106:             VecMDot_MPI,
107:             VecNorm_MPI,
108:             VecTDot_MPI,
109:             VecMTDot_MPI,
110:             VecScale_Seq,
111:             VecCopy_Seq, /* 10 */
112:             VecSet_Seq,
113:             VecSwap_Seq,
114:             VecAXPY_Seq,
115:             VecAXPBY_Seq,
116:             VecMAXPY_Seq,
117:             VecAYPX_Seq,
118:             VecWAXPY_Seq,
119:             VecAXPBYPCZ_Seq,
120:             VecPointwiseMult_Seq,
121:             VecPointwiseDivide_Seq,
122:             VecSetValues_MPI, /* 20 */
123:             VecAssemblyBegin_MPI,
124:             VecAssemblyEnd_MPI,
125:             0,
126:             VecGetSize_MPI,
127:             VecGetSize_Seq,
128:             0,
129:             VecMax_MPI,
130:             VecMin_MPI,
131:             VecSetRandom_Seq,
132:             VecSetOption_MPI,
133:             VecSetValuesBlocked_MPI,
134:             VecDestroy_MPI,
135:             VecView_MPI,
136:             VecPlaceArray_MPI,
137:             VecReplaceArray_Seq,
138:             VecDot_Seq,
139:             VecTDot_Seq,
140:             VecNorm_Seq,
141:             VecMDot_Seq,
142:             VecMTDot_Seq,
143:             VecLoad_Default,
144:             VecReciprocal_Default,
145:             VecConjugate_Seq,
146:             0,
147:             0,
148:             VecResetArray_MPI,
149:             0,
150:             VecMaxPointwiseDivide_Seq,
151:             VecPointwiseMax_Seq,
152:             VecPointwiseMaxAbs_Seq,
153:             VecPointwiseMin_Seq,
154:               VecGetValues_MPI,
155:                 0,
156:                 0,
157:                 0,
158:                 0,
159:                 0,
160:                 0,
161:                VecStrideGather_Default,
162:                VecStrideScatter_Default
163: };

167: /*
168:     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
169:     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
170:     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()

172:     If alloc is true and array is PETSC_NULL then this routine allocates the space, otherwise
173:     no space is allocated.
174: */
175: PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool  alloc,PetscInt nghost,const PetscScalar array[])
176: {
177:   Vec_MPI        *s;


182:   PetscNewLog(v,Vec_MPI,&s);
183:   v->data        = (void*)s;
184:   PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));
185:   s->nghost      = nghost;
186:   v->petscnative = PETSC_TRUE;

188:   PetscLayoutSetUp(v->map);
189:   s->array           = (PetscScalar *)array;
190:   s->array_allocated = 0;
191:   if (alloc && !array) {
192:     PetscInt n         = v->map->n+nghost;
193:     PetscMalloc(n*sizeof(PetscScalar),&s->array);
194:     PetscLogObjectMemory(v,n*sizeof(PetscScalar));
195:     PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));
196:     s->array_allocated = s->array;
197:   }

199:   /* By default parallel vectors do not have local representation */
200:   s->localrep    = 0;
201:   s->localupdate = 0;

203:   v->stash.insertmode  = NOT_SET_VALUES;
204:   /* create the stashes. The block-size for bstash is set later when 
205:      VecSetValuesBlocked is called.
206:   */
207:   VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);
208:   VecStashCreate_Private(((PetscObject)v)->comm,v->map->bs,&v->bstash);
209: 
210: #if defined(PETSC_HAVE_MATLAB_ENGINE)
211:   PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);
212:   PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);
213: #endif
214:   PetscObjectChangeTypeName((PetscObject)v,VECMPI);
215:   return(0);
216: }

218: /*MC
219:    VECMPI - VECMPI = "mpi" - The basic parallel vector

221:    Options Database Keys:
222: . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()

224:   Level: beginner

226: .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
227: M*/

229: EXTERN_C_BEGIN
232: PetscErrorCode  VecCreate_MPI(Vec vv)
233: {

237:   VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);
238:   return(0);
239: }
240: EXTERN_C_END

242: /*MC
243:    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process

245:    Options Database Keys:
246: . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()

248:   Level: beginner

250: .seealso: VecCreateSeq(), VecCreateMPI()
251: M*/

253: EXTERN_C_BEGIN
256: PetscErrorCode  VecCreate_Standard(Vec v)
257: {
259:   PetscMPIInt    size;

262:   MPI_Comm_size(((PetscObject)v)->comm,&size);
263:   if (size == 1) {
264:     VecSetType(v,VECSEQ);
265:   } else {
266:     VecSetType(v,VECMPI);
267:   }
268:   return(0);
269: }
270: EXTERN_C_END


275: /*@C
276:    VecCreateMPIWithArray - Creates a parallel, array-style vector,
277:    where the user provides the array space to store the vector values.

279:    Collective on MPI_Comm

281:    Input Parameters:
282: +  comm  - the MPI communicator to use
283: .  bs    - block size, same meaning as VecSetBlockSize()
284: .  n     - local vector length, cannot be PETSC_DECIDE
285: .  N     - global vector length (or PETSC_DECIDE to have calculated)
286: -  array - the user provided array to store the vector values

288:    Output Parameter:
289: .  vv - the vector
290:  
291:    Notes:
292:    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
293:    same type as an existing vector.

295:    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
296:    at a later stage to SET the array for storing the vector values.

298:    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
299:    The user should not free the array until the vector is destroyed.

301:    Level: intermediate

303:    Concepts: vectors^creating with array

305: .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
306:           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()

308: @*/
309: PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
310: {

314:   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
315:   PetscSplitOwnership(comm,&n,&N);
316:   VecCreate(comm,vv);
317:   VecSetSizes(*vv,n,N);
318:   VecSetBlockSize(*vv,bs);
319:   VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);
320:   return(0);
321: }

325: /*@C
326:    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
327:    the caller allocates the array space.

329:    Collective on MPI_Comm

331:    Input Parameters:
332: +  comm - the MPI communicator to use
333: .  n - local vector length 
334: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
335: .  nghost - number of local ghost points
336: .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
337: -  array - the space to store the vector values (as long as n + nghost)

339:    Output Parameter:
340: .  vv - the global vector representation (without ghost points as part of vector)
341:  
342:    Notes:
343:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
344:    of the vector.

346:    This also automatically sets the ISLocalToGlobalMapping() for this vector.

348:    Level: advanced

350:    Concepts: vectors^creating with array
351:    Concepts: vectors^ghosted

353: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 
354:           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
355:           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()

357: @*/
358: PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
359: {
360:   PetscErrorCode         ierr;
361:   Vec_MPI                *w;
362:   PetscScalar            *larray;
363:   IS                     from,to;
364:   ISLocalToGlobalMapping ltog;
365:   PetscInt               rstart,i,*indices;

368:   *vv = 0;

370:   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
371:   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
372:   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
373:   PetscSplitOwnership(comm,&n,&N);
374:   /* Create global representation */
375:   VecCreate(comm,vv);
376:   VecSetSizes(*vv,n,N);
377:   VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);
378:   w    = (Vec_MPI *)(*vv)->data;
379:   /* Create local representation */
380:   VecGetArray(*vv,&larray);
381:   VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);
382:   PetscLogObjectParent(*vv,w->localrep);
383:   VecRestoreArray(*vv,&larray);

385:   /*
386:        Create scatter context for scattering (updating) ghost values 
387:   */
388:   ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);
389:   ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);
390:   VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
391:   PetscLogObjectParent(*vv,w->localupdate);
392:   ISDestroy(&to);
393:   ISDestroy(&from);

395:   /* set local to global mapping for ghosted vector */
396:   PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);
397:   VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);
398:   for (i=0; i<n; i++) {
399:     indices[i] = rstart + i;
400:   }
401:   for (i=0; i<nghost; i++) {
402:     indices[n+i] = ghosts[i];
403:   }
404:   ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,&ltog);
405:   VecSetLocalToGlobalMapping(*vv,ltog);
406:   ISLocalToGlobalMappingDestroy(&ltog);
407:   return(0);
408: }

412: /*@
413:    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.

415:    Collective on MPI_Comm

417:    Input Parameters:
418: +  comm - the MPI communicator to use
419: .  n - local vector length 
420: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
421: .  nghost - number of local ghost points
422: -  ghosts - global indices of ghost points

424:    Output Parameter:
425: .  vv - the global vector representation (without ghost points as part of vector)
426:  
427:    Notes:
428:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
429:    of the vector.

431:    This also automatically sets the ISLocalToGlobalMapping() for this vector.

433:    Level: advanced

435:    Concepts: vectors^ghosted

437: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
438:           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
439:           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
440:           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()

442: @*/
443: PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
444: {

448:   VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);
449:   return(0);
450: }

454: /*@
455:    VecMPISetGhost - Sets the ghost points for an MPI ghost vector

457:    Collective on Vec

459:    Input Parameters:
460: +  vv - the MPI vector
461: .  nghost - number of local ghost points
462: -  ghosts - global indices of ghost points

464:  
465:    Notes:
466:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
467:    of the vector.

469:    This also automatically sets the ISLocalToGlobalMapping() for this vector.

471:    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).

473:    Level: advanced

475:    Concepts: vectors^ghosted

477: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
478:           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
479:           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
480:           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()

482: @*/
483: PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
484: {
486:   PetscBool      flg;

489:   PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);
490:   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
491:   if (flg) {
492:     PetscInt               n,N;
493:     Vec_MPI                *w;
494:     PetscScalar            *larray;
495:     IS                     from,to;
496:     ISLocalToGlobalMapping ltog;
497:     PetscInt               rstart,i,*indices;
498:     MPI_Comm               comm = ((PetscObject)vv)->comm;

500:     n = vv->map->n;
501:     N = vv->map->N;
502:     (*vv->ops->destroy)(vv);
503:     VecSetSizes(vv,n,N);
504:     VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,PETSC_NULL);
505:     w    = (Vec_MPI *)(vv)->data;
506:     /* Create local representation */
507:     VecGetArray(vv,&larray);
508:     VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);
509:     PetscLogObjectParent(vv,w->localrep);
510:     VecRestoreArray(vv,&larray);

512:     /*
513:      Create scatter context for scattering (updating) ghost values 
514:      */
515:     ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);
516:     ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);
517:     VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);
518:     PetscLogObjectParent(vv,w->localupdate);
519:     ISDestroy(&to);
520:     ISDestroy(&from);

522:     /* set local to global mapping for ghosted vector */
523:     PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);
524:     VecGetOwnershipRange(vv,&rstart,PETSC_NULL);
525:     for (i=0; i<n; i++) {
526:       indices[i] = rstart + i;
527:     }
528:     for (i=0; i<nghost; i++) {
529:       indices[n+i] = ghosts[i];
530:     }
531:     ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,&ltog);
532:     VecSetLocalToGlobalMapping(vv,ltog);
533:     ISLocalToGlobalMappingDestroy(&ltog);
534:   } else if (vv->ops->create == VecCreate_MPI) SETERRQ(((PetscObject)vv)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
535:   else if (!((PetscObject)vv)->type_name) SETERRQ(((PetscObject)vv)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
536:   return(0);
537: }


540: /* ------------------------------------------------------------------------------------------*/
543: /*@C
544:    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
545:    the caller allocates the array space. Indices in the ghost region are based on blocks.

547:    Collective on MPI_Comm

549:    Input Parameters:
550: +  comm - the MPI communicator to use
551: .  bs - block size
552: .  n - local vector length 
553: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
554: .  nghost - number of local ghost blocks
555: .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed), counts are by block not by index
556: -  array - the space to store the vector values (as long as n + nghost*bs)

558:    Output Parameter:
559: .  vv - the global vector representation (without ghost points as part of vector)
560:  
561:    Notes:
562:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
563:    of the vector.

565:    n is the local vector size (total local size not the number of blocks) while nghost
566:    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
567:    portion is bs*nghost

569:    Level: advanced

571:    Concepts: vectors^creating ghosted
572:    Concepts: vectors^creating with array

574: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 
575:           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
576:           VecCreateGhostWithArray(), VecCreateGhostBlock()

578: @*/
579: PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
580: {
581:   PetscErrorCode         ierr;
582:   Vec_MPI                *w;
583:   PetscScalar            *larray;
584:   IS                     from,to;
585:   ISLocalToGlobalMapping ltog;
586:   PetscInt               rstart,i,nb,*indices;

589:   *vv = 0;

591:   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
592:   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
593:   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
594:   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
595:   PetscSplitOwnership(comm,&n,&N);
596:   /* Create global representation */
597:   VecCreate(comm,vv);
598:   VecSetSizes(*vv,n,N);
599:   VecSetBlockSize(*vv,bs);
600:   VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);
601:   w    = (Vec_MPI *)(*vv)->data;
602:   /* Create local representation */
603:   VecGetArray(*vv,&larray);
604:   VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);
605:   PetscLogObjectParent(*vv,w->localrep);
606:   VecRestoreArray(*vv,&larray);

608:   /*
609:        Create scatter context for scattering (updating) ghost values 
610:   */
611:   ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);
612:   ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);
613:   VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
614:   PetscLogObjectParent(*vv,w->localupdate);
615:   ISDestroy(&to);
616:   ISDestroy(&from);

618:   /* set local to global mapping for ghosted vector */
619:   nb = n/bs;
620:   PetscMalloc((nb+nghost)*sizeof(PetscInt),&indices);
621:   VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);
622:   for (i=0; i<nb; i++) {
623:     indices[i] = rstart + i*bs;
624:   }
625:   for (i=0; i<nghost; i++) {
626:     indices[nb+i] = ghosts[i];
627:   }
628:   ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);
629:   VecSetLocalToGlobalMappingBlock(*vv,ltog);
630:   ISLocalToGlobalMappingDestroy(&ltog);
631:   return(0);
632: }

636: /*@
637:    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
638:         The indicing of the ghost points is done with blocks.

640:    Collective on MPI_Comm

642:    Input Parameters:
643: +  comm - the MPI communicator to use
644: .  bs - the block size
645: .  n - local vector length 
646: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
647: .  nghost - number of local ghost blocks
648: -  ghosts - global indices of ghost blocks, counts are by block, not by individual index

650:    Output Parameter:
651: .  vv - the global vector representation (without ghost points as part of vector)
652:  
653:    Notes:
654:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
655:    of the vector.

657:    n is the local vector size (total local size not the number of blocks) while nghost
658:    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
659:    portion is bs*nghost

661:    Level: advanced

663:    Concepts: vectors^ghosted

665: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
666:           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
667:           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()

669: @*/
670: PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
671: {

675:   VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);
676:   return(0);
677: }