Actual source code: dagtol.c

petsc-3.9.4 2018-09-11
Report Typos and Errors
  1: /*
  2:   Code for manipulating distributed regular arrays in parallel.
  3: */

  5:  #include <petsc/private/dmdaimpl.h>

  7: PetscErrorCode  DMGlobalToLocalBegin_DA(DM da,Vec g,InsertMode mode,Vec l)
  8: {
 10:   DM_DA          *dd = (DM_DA*)da->data;

 16:   VecScatterBegin(dd->gtol,g,l,mode,SCATTER_FORWARD);
 17:   return(0);
 18: }

 20: PetscErrorCode  DMGlobalToLocalEnd_DA(DM da,Vec g,InsertMode mode,Vec l)
 21: {
 23:   DM_DA          *dd = (DM_DA*)da->data;

 29:   VecScatterEnd(dd->gtol,g,l,mode,SCATTER_FORWARD);
 30:   return(0);
 31: }

 33: PetscErrorCode  DMLocalToGlobalBegin_DA(DM da,Vec l,InsertMode mode,Vec g)
 34: {
 36:   DM_DA          *dd = (DM_DA*)da->data;

 42:   if (mode == ADD_VALUES) {
 43:     VecScatterBegin(dd->gtol,l,g,ADD_VALUES,SCATTER_REVERSE);
 44:   } else if (mode == INSERT_VALUES) {
 45:     if (dd->bx != DM_BOUNDARY_GHOSTED && dd->bx != DM_BOUNDARY_NONE && dd->s > 0 && dd->m == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallelism in x direction");
 46:     if (dd->bx != DM_BOUNDARY_GHOSTED && dd->by != DM_BOUNDARY_NONE && dd->s > 0 && dd->n == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallelism in y direction");
 47:     if (dd->bx != DM_BOUNDARY_GHOSTED && dd->bz != DM_BOUNDARY_NONE && dd->s > 0 && dd->p == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallelism in z direction");
 48:     VecScatterBegin(dd->gtol,l,g,INSERT_VALUES,SCATTER_REVERSE_LOCAL);
 49:   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented");
 50:   return(0);
 51: }

 53: PetscErrorCode  DMLocalToGlobalEnd_DA(DM da,Vec l,InsertMode mode,Vec g)
 54: {
 56:   DM_DA          *dd = (DM_DA*)da->data;

 62:   if (mode == ADD_VALUES) {
 63:     VecScatterEnd(dd->gtol,l,g,ADD_VALUES,SCATTER_REVERSE);
 64:   } else if (mode == INSERT_VALUES) {
 65:     VecScatterEnd(dd->gtol,l,g,INSERT_VALUES,SCATTER_REVERSE_LOCAL);
 66:   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented");
 67:   return(0);
 68: }

 70: extern PetscErrorCode DMDAGetNatural_Private(DM,PetscInt*,IS*);
 71: /*
 72:    DMDAGlobalToNatural_Create - Create the global to natural scatter object

 74:    Collective on DMDA

 76:    Input Parameter:
 77: .  da - the distributed array context

 79:    Level: developer

 81:    Notes: This is an internal routine called by DMDAGlobalToNatural() to
 82:      create the scatter context.

 84: .keywords: distributed array, global to local, begin

 86: .seealso: DMDAGlobalToNaturalBegin(), DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
 87:           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
 88: */
 89: PetscErrorCode DMDAGlobalToNatural_Create(DM da)
 90: {
 92:   PetscInt       m,start,Nlocal;
 93:   IS             from,to;
 94:   Vec            global;
 95:   DM_DA          *dd = (DM_DA*)da->data;

 99:   if (!dd->natural) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ORDER,"Natural layout vector not yet created; cannot scatter into it");

101:   /* create the scatter context */
102:   VecGetLocalSize(dd->natural,&m);
103:   VecGetOwnershipRange(dd->natural,&start,NULL);

105:   DMDAGetNatural_Private(da,&Nlocal,&to);
106:   if (Nlocal != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal error: Nlocal %D local vector size %D",Nlocal,m);
107:   ISCreateStride(PetscObjectComm((PetscObject)da),m,start,1,&from);
108:   VecCreateMPIWithArray(PetscObjectComm((PetscObject)da),dd->w,dd->Nlocal,PETSC_DETERMINE,0,&global);
109:   VecScatterCreate(global,from,dd->natural,to,&dd->gton);
110:   VecDestroy(&global);
111:   ISDestroy(&from);
112:   ISDestroy(&to);
113:   return(0);
114: }

116: /*@
117:    DMDAGlobalToNaturalBegin - Maps values from the global vector to a global vector
118:    in the "natural" grid ordering. Must be followed by
119:    DMDAGlobalToNaturalEnd() to complete the exchange.

121:    Neighbor-wise Collective on DMDA

123:    Input Parameters:
124: +  da - the distributed array context
125: .  g - the global vector
126: -  mode - one of INSERT_VALUES or ADD_VALUES

128:    Output Parameter:
129: .  l  - the natural ordering values

131:    Level: advanced

133:    Notes:
134:    The global and natrual vectors used here need not be the same as those
135:    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
136:    must have the same parallel data layout; they could, for example, be
137:    obtained with VecDuplicate() from the DMDA originating vectors.

139:    You must call DMDACreateNaturalVector() before using this routine

141: .keywords: distributed array, global to local, begin

143: .seealso: DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
144:           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()

146: @*/
147: PetscErrorCode  DMDAGlobalToNaturalBegin(DM da,Vec g,InsertMode mode,Vec n)
148: {
150:   DM_DA          *dd = (DM_DA*)da->data;

156:   if (!dd->gton) {
157:     /* create the scatter context */
158:     DMDAGlobalToNatural_Create(da);
159:   }
160:   VecScatterBegin(dd->gton,g,n,mode,SCATTER_FORWARD);
161:   return(0);
162: }

164: /*@
165:    DMDAGlobalToNaturalEnd - Maps values from the global vector to a global vector
166:    in the natural ordering. Must be preceeded by DMDAGlobalToNaturalBegin().

168:    Neighbor-wise Collective on DMDA

170:    Input Parameters:
171: +  da - the distributed array context
172: .  g - the global vector
173: -  mode - one of INSERT_VALUES or ADD_VALUES

175:    Output Parameter:
176: .  l  - the global values in the natural ordering

178:    Level: advanced

180:    Notes:
181:    The global and local vectors used here need not be the same as those
182:    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
183:    must have the same parallel data layout; they could, for example, be
184:    obtained with VecDuplicate() from the DMDA originating vectors.

186: .keywords: distributed array, global to local, end

188: .seealso: DMDAGlobalToNaturalBegin(), DMLocalToGlobalBegin(), DMDACreate2d(),
189:           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()

191: @*/
192: PetscErrorCode  DMDAGlobalToNaturalEnd(DM da,Vec g,InsertMode mode,Vec n)
193: {
195:   DM_DA          *dd = (DM_DA*)da->data;

201:   VecScatterEnd(dd->gton,g,n,mode,SCATTER_FORWARD);
202:   return(0);
203: }

205: /*@
206:    DMDANaturalToGlobalBegin - Maps values from a global vector in the "natural" ordering
207:    to a global vector in the PETSc DMDA grid ordering. Must be followed by
208:    DMDANaturalToGlobalEnd() to complete the exchange.

210:    Neighbor-wise Collective on DMDA

212:    Input Parameters:
213: +  da - the distributed array context
214: .  g - the global vector in a natural ordering
215: -  mode - one of INSERT_VALUES or ADD_VALUES

217:    Output Parameter:
218: .  l  - the values in the DMDA ordering

220:    Level: advanced

222:    Notes:
223:    The global and natural vectors used here need not be the same as those
224:    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
225:    must have the same parallel data layout; they could, for example, be
226:    obtained with VecDuplicate() from the DMDA originating vectors.

228: .keywords: distributed array, global to local, begin

230: .seealso: DMDAGlobalToNaturalEnd(), DMDAGlobalToNaturalBegin(), DMLocalToGlobalBegin(), DMDACreate2d(),
231:           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()

233: @*/
234: PetscErrorCode  DMDANaturalToGlobalBegin(DM da,Vec n,InsertMode mode,Vec g)
235: {
237:   DM_DA          *dd = (DM_DA*)da->data;

243:   if (!dd->gton) {
244:     /* create the scatter context */
245:     DMDAGlobalToNatural_Create(da);
246:   }
247:   VecScatterBegin(dd->gton,n,g,mode,SCATTER_REVERSE);
248:   return(0);
249: }

251: /*@
252:    DMDANaturalToGlobalEnd - Maps values from the natural ordering global vector
253:    to a global vector in the PETSc DMDA ordering. Must be preceeded by DMDANaturalToGlobalBegin().

255:    Neighbor-wise Collective on DMDA

257:    Input Parameters:
258: +  da - the distributed array context
259: .  g - the global vector in a natural ordering
260: -  mode - one of INSERT_VALUES or ADD_VALUES

262:    Output Parameter:
263: .  l  - the global values in the PETSc DMDA ordering

265:    Level: advanced

267:    Notes:
268:    The global and local vectors used here need not be the same as those
269:    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
270:    must have the same parallel data layout; they could, for example, be
271:    obtained with VecDuplicate() from the DMDA originating vectors.

273: .keywords: distributed array, global to local, end

275: .seealso: DMDAGlobalToNaturalBegin(), DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
276:           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()

278: @*/
279: PetscErrorCode  DMDANaturalToGlobalEnd(DM da,Vec n,InsertMode mode,Vec g)
280: {
282:   DM_DA          *dd = (DM_DA*)da->data;

288:   VecScatterEnd(dd->gton,n,g,mode,SCATTER_REVERSE);
289:   return(0);
290: }