Actual source code: dagtol.c
petsc-3.3-p7 2013-05-11
2: /*
3: Code for manipulating distributed regular arrays in parallel.
4: */
6: #include <petsc-private/daimpl.h> /*I "petscdmda.h" I*/
10: PetscErrorCode DMGlobalToLocalBegin_DA(DM da,Vec g,InsertMode mode,Vec l)
11: {
13: DM_DA *dd = (DM_DA*)da->data;
19: VecScatterBegin(dd->gtol,g,l,mode,SCATTER_FORWARD);
20: return(0);
21: }
26: PetscErrorCode DMGlobalToLocalEnd_DA(DM da,Vec g,InsertMode mode,Vec l)
27: {
29: DM_DA *dd = (DM_DA*)da->data;
35: VecScatterEnd(dd->gtol,g,l,mode,SCATTER_FORWARD);
36: return(0);
37: }
41: PetscErrorCode DMLocalToGlobalBegin_DA(DM da,Vec l,InsertMode mode,Vec g)
42: {
44: DM_DA *dd = (DM_DA*)da->data;
50: if (mode == ADD_VALUES) {
51: VecScatterBegin(dd->gtol,l,g,ADD_VALUES,SCATTER_REVERSE);
52: } else if (mode == INSERT_VALUES) {
53: VecScatterBegin(dd->ltog,l,g,mode,SCATTER_FORWARD);
54: } else SETERRQ(((PetscObject)da)->comm,PETSC_ERR_SUP,"Not yet implemented");
55: return(0);
56: }
60: PetscErrorCode DMLocalToGlobalEnd_DA(DM da,Vec l,InsertMode mode,Vec g)
61: {
63: DM_DA *dd = (DM_DA*)da->data;
69: if (mode == ADD_VALUES) {
70: VecScatterEnd(dd->gtol,l,g,ADD_VALUES,SCATTER_REVERSE);
71: } else if (mode == INSERT_VALUES) {
72: VecScatterEnd(dd->ltog,l,g,mode,SCATTER_FORWARD);
73: } else SETERRQ(((PetscObject)da)->comm,PETSC_ERR_SUP,"Not yet implemented");
74: return(0);
75: }
77: extern PetscErrorCode DMDAGetNatural_Private(DM,PetscInt*,IS*);
80: /*
81: DMDAGlobalToNatural_Create - Create the global to natural scatter object
83: Collective on DMDA
85: Input Parameter:
86: . da - the distributed array context
88: Level: developer
90: Notes: This is an internal routine called by DMDAGlobalToNatural() to
91: create the scatter context.
93: .keywords: distributed array, global to local, begin
95: .seealso: DMDAGlobalToNaturalBegin(), DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
96: DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
97: */
98: PetscErrorCode DMDAGlobalToNatural_Create(DM da)
99: {
101: PetscInt m,start,Nlocal;
102: IS from,to;
103: Vec global;
104: DM_DA *dd = (DM_DA*)da->data;
108: if (!dd->natural) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ORDER,"Natural layout vector not yet created; cannot scatter into it");
110: /* create the scatter context */
111: VecGetLocalSize(dd->natural,&m);
112: VecGetOwnershipRange(dd->natural,&start,PETSC_NULL);
114: DMDAGetNatural_Private(da,&Nlocal,&to);
115: if (Nlocal != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal error: Nlocal %D local vector size %D",Nlocal,m);
116: ISCreateStride(((PetscObject)da)->comm,m,start,1,&from);
117: VecCreateMPIWithArray(((PetscObject)da)->comm,dd->w,dd->Nlocal,PETSC_DETERMINE,0,&global);
118: VecScatterCreate(global,from,dd->natural,to,&dd->gton);
119: VecDestroy(&global);
120: ISDestroy(&from);
121: ISDestroy(&to);
122: return(0);
123: }
127: /*@
128: DMDAGlobalToNaturalBegin - Maps values from the global vector to a global vector
129: in the "natural" grid ordering. Must be followed by
130: DMDAGlobalToNaturalEnd() to complete the exchange.
132: Neighbor-wise Collective on DMDA
134: Input Parameters:
135: + da - the distributed array context
136: . g - the global vector
137: - mode - one of INSERT_VALUES or ADD_VALUES
139: Output Parameter:
140: . l - the natural ordering values
142: Level: advanced
144: Notes:
145: The global and natrual vectors used here need not be the same as those
146: obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
147: must have the same parallel data layout; they could, for example, be
148: obtained with VecDuplicate() from the DMDA originating vectors.
150: You must call DMDACreateNaturalVector() before using this routine
152: .keywords: distributed array, global to local, begin
154: .seealso: DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
155: DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
157: @*/
158: PetscErrorCode DMDAGlobalToNaturalBegin(DM da,Vec g,InsertMode mode,Vec l)
159: {
161: DM_DA *dd = (DM_DA*)da->data;
167: if (!dd->gton) {
168: /* create the scatter context */
169: DMDAGlobalToNatural_Create(da);
170: }
171: VecScatterBegin(dd->gton,g,l,mode,SCATTER_FORWARD);
172: return(0);
173: }
177: /*@
178: DMDAGlobalToNaturalEnd - Maps values from the global vector to a global vector
179: in the natural ordering. Must be preceeded by DMDAGlobalToNaturalBegin().
181: Neighbor-wise Collective on DMDA
183: Input Parameters:
184: + da - the distributed array context
185: . g - the global vector
186: - mode - one of INSERT_VALUES or ADD_VALUES
188: Output Parameter:
189: . l - the global values in the natural ordering
191: Level: advanced
193: Notes:
194: The global and local vectors used here need not be the same as those
195: obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
196: must have the same parallel data layout; they could, for example, be
197: obtained with VecDuplicate() from the DMDA originating vectors.
199: .keywords: distributed array, global to local, end
201: .seealso: DMDAGlobalToNaturalBegin(), DMLocalToGlobalBegin(), DMDACreate2d(),
202: DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
204: @*/
205: PetscErrorCode DMDAGlobalToNaturalEnd(DM da,Vec g,InsertMode mode,Vec l)
206: {
208: DM_DA *dd = (DM_DA*)da->data;
214: VecScatterEnd(dd->gton,g,l,mode,SCATTER_FORWARD);
215: return(0);
216: }
220: /*@
221: DMDANaturalToGlobalBegin - Maps values from a global vector in the "natural" ordering
222: to a global vector in the PETSc DMDA grid ordering. Must be followed by
223: DMDANaturalToGlobalEnd() to complete the exchange.
225: Neighbor-wise Collective on DMDA
227: Input Parameters:
228: + da - the distributed array context
229: . g - the global vector in a natural ordering
230: - mode - one of INSERT_VALUES or ADD_VALUES
232: Output Parameter:
233: . l - the values in the DMDA ordering
235: Level: advanced
237: Notes:
238: The global and natural vectors used here need not be the same as those
239: obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
240: must have the same parallel data layout; they could, for example, be
241: obtained with VecDuplicate() from the DMDA originating vectors.
243: .keywords: distributed array, global to local, begin
245: .seealso: DMDAGlobalToNaturalEnd(), DMDAGlobalToNaturalBegin(), DMLocalToGlobalBegin(), DMDACreate2d(),
246: DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
248: @*/
249: PetscErrorCode DMDANaturalToGlobalBegin(DM da,Vec g,InsertMode mode,Vec l)
250: {
252: DM_DA *dd = (DM_DA*)da->data;
258: if (!dd->gton) {
259: /* create the scatter context */
260: DMDAGlobalToNatural_Create(da);
261: }
262: VecScatterBegin(dd->gton,g,l,mode,SCATTER_REVERSE);
263: return(0);
264: }
268: /*@
269: DMDANaturalToGlobalEnd - Maps values from the natural ordering global vector
270: to a global vector in the PETSc DMDA ordering. Must be preceeded by DMDANaturalToGlobalBegin().
272: Neighbor-wise Collective on DMDA
274: Input Parameters:
275: + da - the distributed array context
276: . g - the global vector in a natural ordering
277: - mode - one of INSERT_VALUES or ADD_VALUES
279: Output Parameter:
280: . l - the global values in the PETSc DMDA ordering
282: Level: intermediate
284: Notes:
285: The global and local vectors used here need not be the same as those
286: obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
287: must have the same parallel data layout; they could, for example, be
288: obtained with VecDuplicate() from the DMDA originating vectors.
290: .keywords: distributed array, global to local, end
292: .seealso: DMDAGlobalToNaturalBegin(), DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
293: DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
295: @*/
296: PetscErrorCode DMDANaturalToGlobalEnd(DM da,Vec g,InsertMode mode,Vec l)
297: {
299: DM_DA *dd = (DM_DA*)da->data;
305: VecScatterEnd(dd->gton,g,l,mode,SCATTER_REVERSE);
306: return(0);
307: }