Actual source code: dagtol.c

  1: /*
  2:   Code for manipulating distributed regular arrays in parallel.
  3: */

  5: #include <petsc/private/dmdaimpl.h>

  7: PetscErrorCode  DMGlobalToLocalBegin_DA(DM da,Vec g,InsertMode mode,Vec l)
  8: {
 10:   DM_DA          *dd = (DM_DA*)da->data;

 16:   VecScatterBegin(dd->gtol,g,l,mode,SCATTER_FORWARD);
 17:   return(0);
 18: }

 20: PetscErrorCode  DMGlobalToLocalEnd_DA(DM da,Vec g,InsertMode mode,Vec l)
 21: {
 23:   DM_DA          *dd = (DM_DA*)da->data;

 29:   VecScatterEnd(dd->gtol,g,l,mode,SCATTER_FORWARD);
 30:   return(0);
 31: }

 33: PetscErrorCode  DMLocalToGlobalBegin_DA(DM da,Vec l,InsertMode mode,Vec g)
 34: {
 36:   DM_DA          *dd = (DM_DA*)da->data;

 42:   if (mode == ADD_VALUES) {
 43:     VecScatterBegin(dd->gtol,l,g,ADD_VALUES,SCATTER_REVERSE);
 44:   } else if (mode == INSERT_VALUES) {
 45:     if (dd->bx != DM_BOUNDARY_GHOSTED && dd->bx != DM_BOUNDARY_NONE && dd->s > 0 && dd->m == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallelism in x direction");
 46:     if (dd->bx != DM_BOUNDARY_GHOSTED && dd->by != DM_BOUNDARY_NONE && dd->s > 0 && dd->n == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallelism in y direction");
 47:     if (dd->bx != DM_BOUNDARY_GHOSTED && dd->bz != DM_BOUNDARY_NONE && dd->s > 0 && dd->p == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallelism in z direction");
 48:     VecScatterBegin(dd->gtol,l,g,INSERT_VALUES,SCATTER_REVERSE_LOCAL);
 49:   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented");
 50:   return(0);
 51: }

 53: PetscErrorCode  DMLocalToGlobalEnd_DA(DM da,Vec l,InsertMode mode,Vec g)
 54: {
 56:   DM_DA          *dd = (DM_DA*)da->data;

 62:   if (mode == ADD_VALUES) {
 63:     VecScatterEnd(dd->gtol,l,g,ADD_VALUES,SCATTER_REVERSE);
 64:   } else if (mode == INSERT_VALUES) {
 65:     VecScatterEnd(dd->gtol,l,g,INSERT_VALUES,SCATTER_REVERSE_LOCAL);
 66:   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented");
 67:   return(0);
 68: }

 70: extern PetscErrorCode DMDAGetNatural_Private(DM,PetscInt*,IS*);
 71: /*
 72:    DMDAGlobalToNatural_Create - Create the global to natural scatter object

 74:    Collective on da

 76:    Input Parameter:
 77: .  da - the distributed array context

 79:    Level: developer

 81:    Notes:
 82:     This is an internal routine called by DMDAGlobalToNatural() to
 83:      create the scatter context.

 85: .seealso: DMDAGlobalToNaturalBegin(), DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
 86:           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
 87: */
 88: PetscErrorCode DMDAGlobalToNatural_Create(DM da)
 89: {
 91:   PetscInt       m,start,Nlocal;
 92:   IS             from,to;
 93:   Vec            global;
 94:   DM_DA          *dd = (DM_DA*)da->data;

 98:   if (!dd->natural) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ORDER,"Natural layout vector not yet created; cannot scatter into it");

100:   /* create the scatter context */
101:   VecGetLocalSize(dd->natural,&m);
102:   VecGetOwnershipRange(dd->natural,&start,NULL);

104:   DMDAGetNatural_Private(da,&Nlocal,&to);
105:   if (Nlocal != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal error: Nlocal %D local vector size %D",Nlocal,m);
106:   ISCreateStride(PetscObjectComm((PetscObject)da),m,start,1,&from);
107:   VecCreateMPIWithArray(PetscObjectComm((PetscObject)da),dd->w,dd->Nlocal,PETSC_DETERMINE,NULL,&global);
108:   VecScatterCreate(global,from,dd->natural,to,&dd->gton);
109:   VecDestroy(&global);
110:   ISDestroy(&from);
111:   ISDestroy(&to);
112:   return(0);
113: }

115: /*@
116:    DMDAGlobalToNaturalBegin - Maps values from the global vector to a global vector
117:    in the "natural" grid ordering. Must be followed by
118:    DMDAGlobalToNaturalEnd() to complete the exchange.

120:    Neighbor-wise Collective on da

122:    Input Parameters:
123: +  da - the distributed array context
124: .  g - the global vector
125: -  mode - one of INSERT_VALUES or ADD_VALUES

127:    Output Parameter:
128: .  l  - the natural ordering values

130:    Level: advanced

132:    Notes:
133:    The global and natrual vectors used here need not be the same as those
134:    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
135:    must have the same parallel data layout; they could, for example, be
136:    obtained with VecDuplicate() from the DMDA originating vectors.

138:    You must call DMDACreateNaturalVector() before using this routine

140: .seealso: DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
141:           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()

143: @*/
144: PetscErrorCode  DMDAGlobalToNaturalBegin(DM da,Vec g,InsertMode mode,Vec n)
145: {
147:   DM_DA          *dd = (DM_DA*)da->data;

153:   if (!dd->gton) {
154:     /* create the scatter context */
155:     DMDAGlobalToNatural_Create(da);
156:   }
157:   VecScatterBegin(dd->gton,g,n,mode,SCATTER_FORWARD);
158:   return(0);
159: }

161: /*@
162:    DMDAGlobalToNaturalEnd - Maps values from the global vector to a global vector
163:    in the natural ordering. Must be preceded by DMDAGlobalToNaturalBegin().

165:    Neighbor-wise Collective on da

167:    Input Parameters:
168: +  da - the distributed array context
169: .  g - the global vector
170: -  mode - one of INSERT_VALUES or ADD_VALUES

172:    Output Parameter:
173: .  l  - the global values in the natural ordering

175:    Level: advanced

177:    Notes:
178:    The global and local vectors used here need not be the same as those
179:    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
180:    must have the same parallel data layout; they could, for example, be
181:    obtained with VecDuplicate() from the DMDA originating vectors.

183: .seealso: DMDAGlobalToNaturalBegin(), DMLocalToGlobalBegin(), DMDACreate2d(),
184:           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()

186: @*/
187: PetscErrorCode  DMDAGlobalToNaturalEnd(DM da,Vec g,InsertMode mode,Vec n)
188: {
190:   DM_DA          *dd = (DM_DA*)da->data;

196:   VecScatterEnd(dd->gton,g,n,mode,SCATTER_FORWARD);
197:   return(0);
198: }

200: /*@
201:    DMDANaturalToGlobalBegin - Maps values from a global vector in the "natural" ordering
202:    to a global vector in the PETSc DMDA grid ordering. Must be followed by
203:    DMDANaturalToGlobalEnd() to complete the exchange.

205:    Neighbor-wise Collective on da

207:    Input Parameters:
208: +  da - the distributed array context
209: .  g - the global vector in a natural ordering
210: -  mode - one of INSERT_VALUES or ADD_VALUES

212:    Output Parameter:
213: .  l  - the values in the DMDA ordering

215:    Level: advanced

217:    Notes:
218:    The global and natural vectors used here need not be the same as those
219:    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
220:    must have the same parallel data layout; they could, for example, be
221:    obtained with VecDuplicate() from the DMDA originating vectors.

223: .seealso: DMDAGlobalToNaturalEnd(), DMDAGlobalToNaturalBegin(), DMLocalToGlobalBegin(), DMDACreate2d(),
224:           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()

226: @*/
227: PetscErrorCode  DMDANaturalToGlobalBegin(DM da,Vec n,InsertMode mode,Vec g)
228: {
230:   DM_DA          *dd = (DM_DA*)da->data;

236:   if (!dd->gton) {
237:     /* create the scatter context */
238:     DMDAGlobalToNatural_Create(da);
239:   }
240:   VecScatterBegin(dd->gton,n,g,mode,SCATTER_REVERSE);
241:   return(0);
242: }

244: /*@
245:    DMDANaturalToGlobalEnd - Maps values from the natural ordering global vector
246:    to a global vector in the PETSc DMDA ordering. Must be preceded by DMDANaturalToGlobalBegin().

248:    Neighbor-wise Collective on da

250:    Input Parameters:
251: +  da - the distributed array context
252: .  g - the global vector in a natural ordering
253: -  mode - one of INSERT_VALUES or ADD_VALUES

255:    Output Parameter:
256: .  l  - the global values in the PETSc DMDA ordering

258:    Level: advanced

260:    Notes:
261:    The global and local vectors used here need not be the same as those
262:    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
263:    must have the same parallel data layout; they could, for example, be
264:    obtained with VecDuplicate() from the DMDA originating vectors.

266: .seealso: DMDAGlobalToNaturalBegin(), DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
267:           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()

269: @*/
270: PetscErrorCode  DMDANaturalToGlobalEnd(DM da,Vec n,InsertMode mode,Vec g)
271: {
273:   DM_DA          *dd = (DM_DA*)da->data;

279:   VecScatterEnd(dd->gton,n,g,mode,SCATTER_REVERSE);
280:   return(0);
281: }