Actual source code: dagtol.c

petsc-3.8.4 2018-03-24
Report Typos and Errors

  2: /*
  3:   Code for manipulating distributed regular arrays in parallel.
  4: */

  6:  #include <petsc/private/dmdaimpl.h>

  8: PetscErrorCode  DMGlobalToLocalBegin_DA(DM da,Vec g,InsertMode mode,Vec l)
  9: {
 11:   DM_DA          *dd = (DM_DA*)da->data;

 17:   VecScatterBegin(dd->gtol,g,l,mode,SCATTER_FORWARD);
 18:   return(0);
 19: }


 22: PetscErrorCode  DMGlobalToLocalEnd_DA(DM da,Vec g,InsertMode mode,Vec l)
 23: {
 25:   DM_DA          *dd = (DM_DA*)da->data;

 31:   VecScatterEnd(dd->gtol,g,l,mode,SCATTER_FORWARD);
 32:   return(0);
 33: }

 35: PetscErrorCode  DMLocalToGlobalBegin_DA(DM da,Vec l,InsertMode mode,Vec g)
 36: {
 38:   DM_DA          *dd = (DM_DA*)da->data;

 44:   if (mode == ADD_VALUES) {
 45:     VecScatterBegin(dd->gtol,l,g,ADD_VALUES,SCATTER_REVERSE);
 46:   } else if (mode == INSERT_VALUES) {
 47:     if (dd->bx != DM_BOUNDARY_GHOSTED && dd->bx != DM_BOUNDARY_NONE && dd->s > 0 && dd->m == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallelism in x direction");
 48:     if (dd->bx != DM_BOUNDARY_GHOSTED && dd->by != DM_BOUNDARY_NONE && dd->s > 0 && dd->n == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallelism in y direction");
 49:     if (dd->bx != DM_BOUNDARY_GHOSTED && dd->bz != DM_BOUNDARY_NONE && dd->s > 0 && dd->p == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallelism in z direction");
 50:     VecScatterBegin(dd->gtol,l,g,INSERT_VALUES,SCATTER_REVERSE_LOCAL);
 51:   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented");
 52:   return(0);
 53: }

 55: PetscErrorCode  DMLocalToGlobalEnd_DA(DM da,Vec l,InsertMode mode,Vec g)
 56: {
 58:   DM_DA          *dd = (DM_DA*)da->data;

 64:   if (mode == ADD_VALUES) {
 65:     VecScatterEnd(dd->gtol,l,g,ADD_VALUES,SCATTER_REVERSE);
 66:   } else if (mode == INSERT_VALUES) {
 67:     VecScatterEnd(dd->gtol,l,g,INSERT_VALUES,SCATTER_REVERSE_LOCAL);
 68:   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented");
 69:   return(0);
 70: }

 72: extern PetscErrorCode DMDAGetNatural_Private(DM,PetscInt*,IS*);
 73: /*
 74:    DMDAGlobalToNatural_Create - Create the global to natural scatter object

 76:    Collective on DMDA

 78:    Input Parameter:
 79: .  da - the distributed array context

 81:    Level: developer

 83:    Notes: This is an internal routine called by DMDAGlobalToNatural() to
 84:      create the scatter context.

 86: .keywords: distributed array, global to local, begin

 88: .seealso: DMDAGlobalToNaturalBegin(), DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
 89:           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
 90: */
 91: PetscErrorCode DMDAGlobalToNatural_Create(DM da)
 92: {
 94:   PetscInt       m,start,Nlocal;
 95:   IS             from,to;
 96:   Vec            global;
 97:   DM_DA          *dd = (DM_DA*)da->data;

101:   if (!dd->natural) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ORDER,"Natural layout vector not yet created; cannot scatter into it");

103:   /* create the scatter context */
104:   VecGetLocalSize(dd->natural,&m);
105:   VecGetOwnershipRange(dd->natural,&start,NULL);

107:   DMDAGetNatural_Private(da,&Nlocal,&to);
108:   if (Nlocal != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal error: Nlocal %D local vector size %D",Nlocal,m);
109:   ISCreateStride(PetscObjectComm((PetscObject)da),m,start,1,&from);
110:   VecCreateMPIWithArray(PetscObjectComm((PetscObject)da),dd->w,dd->Nlocal,PETSC_DETERMINE,0,&global);
111:   VecScatterCreate(global,from,dd->natural,to,&dd->gton);
112:   VecDestroy(&global);
113:   ISDestroy(&from);
114:   ISDestroy(&to);
115:   return(0);
116: }

118: /*@
119:    DMDAGlobalToNaturalBegin - Maps values from the global vector to a global vector
120:    in the "natural" grid ordering. Must be followed by
121:    DMDAGlobalToNaturalEnd() to complete the exchange.

123:    Neighbor-wise Collective on DMDA

125:    Input Parameters:
126: +  da - the distributed array context
127: .  g - the global vector
128: -  mode - one of INSERT_VALUES or ADD_VALUES

130:    Output Parameter:
131: .  l  - the natural ordering values

133:    Level: advanced

135:    Notes:
136:    The global and natrual vectors used here need not be the same as those
137:    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
138:    must have the same parallel data layout; they could, for example, be
139:    obtained with VecDuplicate() from the DMDA originating vectors.

141:    You must call DMDACreateNaturalVector() before using this routine

143: .keywords: distributed array, global to local, begin

145: .seealso: DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
146:           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()

148: @*/
149: PetscErrorCode  DMDAGlobalToNaturalBegin(DM da,Vec g,InsertMode mode,Vec l)
150: {
152:   DM_DA          *dd = (DM_DA*)da->data;

158:   if (!dd->gton) {
159:     /* create the scatter context */
160:     DMDAGlobalToNatural_Create(da);
161:   }
162:   VecScatterBegin(dd->gton,g,l,mode,SCATTER_FORWARD);
163:   return(0);
164: }

166: /*@
167:    DMDAGlobalToNaturalEnd - Maps values from the global vector to a global vector
168:    in the natural ordering. Must be preceeded by DMDAGlobalToNaturalBegin().

170:    Neighbor-wise Collective on DMDA

172:    Input Parameters:
173: +  da - the distributed array context
174: .  g - the global vector
175: -  mode - one of INSERT_VALUES or ADD_VALUES

177:    Output Parameter:
178: .  l  - the global values in the natural ordering

180:    Level: advanced

182:    Notes:
183:    The global and local vectors used here need not be the same as those
184:    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
185:    must have the same parallel data layout; they could, for example, be
186:    obtained with VecDuplicate() from the DMDA originating vectors.

188: .keywords: distributed array, global to local, end

190: .seealso: DMDAGlobalToNaturalBegin(), DMLocalToGlobalBegin(), DMDACreate2d(),
191:           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()

193: @*/
194: PetscErrorCode  DMDAGlobalToNaturalEnd(DM da,Vec g,InsertMode mode,Vec l)
195: {
197:   DM_DA          *dd = (DM_DA*)da->data;

203:   VecScatterEnd(dd->gton,g,l,mode,SCATTER_FORWARD);
204:   return(0);
205: }

207: /*@
208:    DMDANaturalToGlobalBegin - Maps values from a global vector in the "natural" ordering
209:    to a global vector in the PETSc DMDA grid ordering. Must be followed by
210:    DMDANaturalToGlobalEnd() to complete the exchange.

212:    Neighbor-wise Collective on DMDA

214:    Input Parameters:
215: +  da - the distributed array context
216: .  g - the global vector in a natural ordering
217: -  mode - one of INSERT_VALUES or ADD_VALUES

219:    Output Parameter:
220: .  l  - the values in the DMDA ordering

222:    Level: advanced

224:    Notes:
225:    The global and natural vectors used here need not be the same as those
226:    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
227:    must have the same parallel data layout; they could, for example, be
228:    obtained with VecDuplicate() from the DMDA originating vectors.

230: .keywords: distributed array, global to local, begin

232: .seealso: DMDAGlobalToNaturalEnd(), DMDAGlobalToNaturalBegin(), DMLocalToGlobalBegin(), DMDACreate2d(),
233:           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()

235: @*/
236: PetscErrorCode  DMDANaturalToGlobalBegin(DM da,Vec g,InsertMode mode,Vec l)
237: {
239:   DM_DA          *dd = (DM_DA*)da->data;

245:   if (!dd->gton) {
246:     /* create the scatter context */
247:     DMDAGlobalToNatural_Create(da);
248:   }
249:   VecScatterBegin(dd->gton,g,l,mode,SCATTER_REVERSE);
250:   return(0);
251: }

253: /*@
254:    DMDANaturalToGlobalEnd - Maps values from the natural ordering global vector
255:    to a global vector in the PETSc DMDA ordering. Must be preceeded by DMDANaturalToGlobalBegin().

257:    Neighbor-wise Collective on DMDA

259:    Input Parameters:
260: +  da - the distributed array context
261: .  g - the global vector in a natural ordering
262: -  mode - one of INSERT_VALUES or ADD_VALUES

264:    Output Parameter:
265: .  l  - the global values in the PETSc DMDA ordering

267:    Level: advanced

269:    Notes:
270:    The global and local vectors used here need not be the same as those
271:    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
272:    must have the same parallel data layout; they could, for example, be
273:    obtained with VecDuplicate() from the DMDA originating vectors.

275: .keywords: distributed array, global to local, end

277: .seealso: DMDAGlobalToNaturalBegin(), DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
278:           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()

280: @*/
281: PetscErrorCode  DMDANaturalToGlobalEnd(DM da,Vec g,InsertMode mode,Vec l)
282: {
284:   DM_DA          *dd = (DM_DA*)da->data;

290:   VecScatterEnd(dd->gton,g,l,mode,SCATTER_REVERSE);
291:   return(0);
292: }