Actual source code: dagtol.c
petsc-3.7.3 2016-08-01
2: /*
3: Code for manipulating distributed regular arrays in parallel.
4: */
6: #include <petsc/private/dmdaimpl.h> /*I "petscdmda.h" I*/
10: PetscErrorCode DMGlobalToLocalBegin_DA(DM da,Vec g,InsertMode mode,Vec l)
11: {
13: DM_DA *dd = (DM_DA*)da->data;
19: VecScatterBegin(dd->gtol,g,l,mode,SCATTER_FORWARD);
20: return(0);
21: }
26: PetscErrorCode DMGlobalToLocalEnd_DA(DM da,Vec g,InsertMode mode,Vec l)
27: {
29: DM_DA *dd = (DM_DA*)da->data;
35: VecScatterEnd(dd->gtol,g,l,mode,SCATTER_FORWARD);
36: return(0);
37: }
41: PetscErrorCode DMLocalToGlobalBegin_DA(DM da,Vec l,InsertMode mode,Vec g)
42: {
44: DM_DA *dd = (DM_DA*)da->data;
50: if (mode == ADD_VALUES) {
51: VecScatterBegin(dd->gtol,l,g,ADD_VALUES,SCATTER_REVERSE);
52: } else if (mode == INSERT_VALUES) {
53: if (dd->bx != DM_BOUNDARY_GHOSTED && dd->bx != DM_BOUNDARY_NONE && dd->s > 0 && dd->m == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallism in x direction");
54: if (dd->bx != DM_BOUNDARY_GHOSTED && dd->by != DM_BOUNDARY_NONE && dd->s > 0 && dd->n == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallism in y direction");
55: if (dd->bx != DM_BOUNDARY_GHOSTED && dd->bz != DM_BOUNDARY_NONE && dd->s > 0 && dd->p == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallism in z direction");
56: VecScatterBegin(dd->gtol,l,g,INSERT_VALUES,SCATTER_REVERSE_LOCAL);
57: } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented");
58: return(0);
59: }
63: PetscErrorCode DMLocalToGlobalEnd_DA(DM da,Vec l,InsertMode mode,Vec g)
64: {
66: DM_DA *dd = (DM_DA*)da->data;
72: if (mode == ADD_VALUES) {
73: VecScatterEnd(dd->gtol,l,g,ADD_VALUES,SCATTER_REVERSE);
74: } else if (mode == INSERT_VALUES) {
75: VecScatterEnd(dd->gtol,l,g,INSERT_VALUES,SCATTER_REVERSE_LOCAL);
76: } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented");
77: return(0);
78: }
80: extern PetscErrorCode DMDAGetNatural_Private(DM,PetscInt*,IS*);
83: /*
84: DMDAGlobalToNatural_Create - Create the global to natural scatter object
86: Collective on DMDA
88: Input Parameter:
89: . da - the distributed array context
91: Level: developer
93: Notes: This is an internal routine called by DMDAGlobalToNatural() to
94: create the scatter context.
96: .keywords: distributed array, global to local, begin
98: .seealso: DMDAGlobalToNaturalBegin(), DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
99: DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
100: */
101: PetscErrorCode DMDAGlobalToNatural_Create(DM da)
102: {
104: PetscInt m,start,Nlocal;
105: IS from,to;
106: Vec global;
107: DM_DA *dd = (DM_DA*)da->data;
111: if (!dd->natural) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ORDER,"Natural layout vector not yet created; cannot scatter into it");
113: /* create the scatter context */
114: VecGetLocalSize(dd->natural,&m);
115: VecGetOwnershipRange(dd->natural,&start,NULL);
117: DMDAGetNatural_Private(da,&Nlocal,&to);
118: if (Nlocal != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal error: Nlocal %D local vector size %D",Nlocal,m);
119: ISCreateStride(PetscObjectComm((PetscObject)da),m,start,1,&from);
120: VecCreateMPIWithArray(PetscObjectComm((PetscObject)da),dd->w,dd->Nlocal,PETSC_DETERMINE,0,&global);
121: VecScatterCreate(global,from,dd->natural,to,&dd->gton);
122: VecDestroy(&global);
123: ISDestroy(&from);
124: ISDestroy(&to);
125: return(0);
126: }
130: /*@
131: DMDAGlobalToNaturalBegin - Maps values from the global vector to a global vector
132: in the "natural" grid ordering. Must be followed by
133: DMDAGlobalToNaturalEnd() to complete the exchange.
135: Neighbor-wise Collective on DMDA
137: Input Parameters:
138: + da - the distributed array context
139: . g - the global vector
140: - mode - one of INSERT_VALUES or ADD_VALUES
142: Output Parameter:
143: . l - the natural ordering values
145: Level: advanced
147: Notes:
148: The global and natrual vectors used here need not be the same as those
149: obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
150: must have the same parallel data layout; they could, for example, be
151: obtained with VecDuplicate() from the DMDA originating vectors.
153: You must call DMDACreateNaturalVector() before using this routine
155: .keywords: distributed array, global to local, begin
157: .seealso: DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
158: DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
160: @*/
161: PetscErrorCode DMDAGlobalToNaturalBegin(DM da,Vec g,InsertMode mode,Vec l)
162: {
164: DM_DA *dd = (DM_DA*)da->data;
170: if (!dd->gton) {
171: /* create the scatter context */
172: DMDAGlobalToNatural_Create(da);
173: }
174: VecScatterBegin(dd->gton,g,l,mode,SCATTER_FORWARD);
175: return(0);
176: }
180: /*@
181: DMDAGlobalToNaturalEnd - Maps values from the global vector to a global vector
182: in the natural ordering. Must be preceeded by DMDAGlobalToNaturalBegin().
184: Neighbor-wise Collective on DMDA
186: Input Parameters:
187: + da - the distributed array context
188: . g - the global vector
189: - mode - one of INSERT_VALUES or ADD_VALUES
191: Output Parameter:
192: . l - the global values in the natural ordering
194: Level: advanced
196: Notes:
197: The global and local vectors used here need not be the same as those
198: obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
199: must have the same parallel data layout; they could, for example, be
200: obtained with VecDuplicate() from the DMDA originating vectors.
202: .keywords: distributed array, global to local, end
204: .seealso: DMDAGlobalToNaturalBegin(), DMLocalToGlobalBegin(), DMDACreate2d(),
205: DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
207: @*/
208: PetscErrorCode DMDAGlobalToNaturalEnd(DM da,Vec g,InsertMode mode,Vec l)
209: {
211: DM_DA *dd = (DM_DA*)da->data;
217: VecScatterEnd(dd->gton,g,l,mode,SCATTER_FORWARD);
218: return(0);
219: }
223: /*@
224: DMDANaturalToGlobalBegin - Maps values from a global vector in the "natural" ordering
225: to a global vector in the PETSc DMDA grid ordering. Must be followed by
226: DMDANaturalToGlobalEnd() to complete the exchange.
228: Neighbor-wise Collective on DMDA
230: Input Parameters:
231: + da - the distributed array context
232: . g - the global vector in a natural ordering
233: - mode - one of INSERT_VALUES or ADD_VALUES
235: Output Parameter:
236: . l - the values in the DMDA ordering
238: Level: advanced
240: Notes:
241: The global and natural vectors used here need not be the same as those
242: obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
243: must have the same parallel data layout; they could, for example, be
244: obtained with VecDuplicate() from the DMDA originating vectors.
246: .keywords: distributed array, global to local, begin
248: .seealso: DMDAGlobalToNaturalEnd(), DMDAGlobalToNaturalBegin(), DMLocalToGlobalBegin(), DMDACreate2d(),
249: DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
251: @*/
252: PetscErrorCode DMDANaturalToGlobalBegin(DM da,Vec g,InsertMode mode,Vec l)
253: {
255: DM_DA *dd = (DM_DA*)da->data;
261: if (!dd->gton) {
262: /* create the scatter context */
263: DMDAGlobalToNatural_Create(da);
264: }
265: VecScatterBegin(dd->gton,g,l,mode,SCATTER_REVERSE);
266: return(0);
267: }
271: /*@
272: DMDANaturalToGlobalEnd - Maps values from the natural ordering global vector
273: to a global vector in the PETSc DMDA ordering. Must be preceeded by DMDANaturalToGlobalBegin().
275: Neighbor-wise Collective on DMDA
277: Input Parameters:
278: + da - the distributed array context
279: . g - the global vector in a natural ordering
280: - mode - one of INSERT_VALUES or ADD_VALUES
282: Output Parameter:
283: . l - the global values in the PETSc DMDA ordering
285: Level: intermediate
287: Notes:
288: The global and local vectors used here need not be the same as those
289: obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
290: must have the same parallel data layout; they could, for example, be
291: obtained with VecDuplicate() from the DMDA originating vectors.
293: .keywords: distributed array, global to local, end
295: .seealso: DMDAGlobalToNaturalBegin(), DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
296: DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
298: @*/
299: PetscErrorCode DMDANaturalToGlobalEnd(DM da,Vec g,InsertMode mode,Vec l)
300: {
302: DM_DA *dd = (DM_DA*)da->data;
308: VecScatterEnd(dd->gton,g,l,mode,SCATTER_REVERSE);
309: return(0);
310: }