2: /*
3: Code for manipulating distributed regular arrays in parallel.
4: */
6: #include <petsc-private/dmdaimpl.h> /*I "petscdmda.h" I*/
10: /*
11: DMLocalToLocalCreate_DA - Creates the local to local scatter
13: Collective on DMDA 15: Input Parameter:
16: . da - the distributed array
18: */
19: PetscErrorCode DMLocalToLocalCreate_DA(DM da) 20: {
22: PetscInt *idx,left,j,count,up,down,i,bottom,top,k;
23: DM_DA *dd = (DM_DA*)da->data;
28: if (dd->ltol) return(0);
29: /*
30: We simply remap the values in the from part of
31: global to local to read from an array with the ghost values
32: rather then from the plain array.
33: */
34: VecScatterCopy(dd->gtol,&dd->ltol);
35: PetscLogObjectParent((PetscObject)da,(PetscObject)dd->ltol);
36: if (dd->dim == 1) {
37: left = dd->xs - dd->Xs;
38: PetscMalloc1((dd->xe-dd->xs),&idx);
39: for (j=0; j<dd->xe-dd->xs; j++) idx[j] = left + j;
40: } else if (dd->dim == 2) {
41: left = dd->xs - dd->Xs; down = dd->ys - dd->Ys; up = down + dd->ye-dd->ys;
42: PetscMalloc1((dd->xe-dd->xs)*(up - down),&idx);
43: count = 0;
44: for (i=down; i<up; i++) {
45: for (j=0; j<dd->xe-dd->xs; j++) {
46: idx[count++] = left + i*(dd->Xe-dd->Xs) + j;
47: }
48: }
49: } else if (dd->dim == 3) {
50: left = dd->xs - dd->Xs;
51: bottom = dd->ys - dd->Ys; top = bottom + dd->ye-dd->ys;
52: down = dd->zs - dd->Zs; up = down + dd->ze-dd->zs;
53: count = (dd->xe-dd->xs)*(top-bottom)*(up-down);
54: PetscMalloc1(count,&idx);
55: count = 0;
56: for (i=down; i<up; i++) {
57: for (j=bottom; j<top; j++) {
58: for (k=0; k<dd->xe-dd->xs; k++) {
59: idx[count++] = (left+j*(dd->Xe-dd->Xs))+i*(dd->Xe-dd->Xs)*(dd->Ye-dd->Ys) + k;
60: }
61: }
62: }
63: } else SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_CORRUPT,"DMDA has invalid dimension %D",dd->dim);
65: VecScatterRemap(dd->ltol,idx,NULL);
66: PetscFree(idx);
67: return(0);
68: }
72: /*
73: DMLocalToLocalBegin_DA - Maps from a local vector (including ghost points
74: that contain irrelevant values) to another local vector where the ghost
75: points in the second are set correctly. Must be followed by DMLocalToLocalEnd_DA().
77: Neighbor-wise Collective on DMDA and Vec 79: Input Parameters:
80: + da - the distributed array context
81: . g - the original local vector
82: - mode - one of INSERT_VALUES or ADD_VALUES 84: Output Parameter:
85: . l - the local vector with correct ghost values
87: Notes:
88: The local vectors used here need not be the same as those
89: obtained from DMCreateLocalVector(), BUT they
90: must have the same parallel data layout; they could, for example, be
91: obtained with VecDuplicate() from the DMDA originating vectors.
93: */
94: PetscErrorCode DMLocalToLocalBegin_DA(DM da,Vec g,InsertMode mode,Vec l) 95: {
97: DM_DA *dd = (DM_DA*)da->data;
101: if (!dd->ltol) {
102: DMLocalToLocalCreate_DA(da);
103: }
104: VecScatterBegin(dd->ltol,g,l,mode,SCATTER_FORWARD);
105: return(0);
106: }
110: /*
111: DMLocalToLocalEnd_DA - Maps from a local vector (including ghost points
112: that contain irrelevant values) to another local vector where the ghost
113: points in the second are set correctly. Must be preceeded by
114: DMLocalToLocalBegin_DA().
116: Neighbor-wise Collective on DMDA and Vec118: Input Parameters:
119: + da - the distributed array context
120: . g - the original local vector
121: - mode - one of INSERT_VALUES or ADD_VALUES123: Output Parameter:
124: . l - the local vector with correct ghost values
126: Note:
127: The local vectors used here need not be the same as those
128: obtained from DMCreateLocalVector(), BUT they
129: must have the same parallel data layout; they could, for example, be
130: obtained with VecDuplicate() from the DMDA originating vectors.
132: */
133: PetscErrorCode DMLocalToLocalEnd_DA(DM da,Vec g,InsertMode mode,Vec l)134: {
136: DM_DA *dd = (DM_DA*)da->data;
142: VecScatterEnd(dd->ltol,g,l,mode,SCATTER_FORWARD);
143: return(0);
144: }