2: /*
3: Code for manipulating distributed regular arrays in parallel.
4: */
6: #include <petsc/private/dmdaimpl.h> 8: /*
9: DMLocalToLocalCreate_DA - Creates the local to local scatter
11: Collective on DMDA 13: Input Parameter:
14: . da - the distributed array
16: */
17: PetscErrorCode DMLocalToLocalCreate_DA(DM da) 18: {
20: PetscInt *idx,left,j,count,up,down,i,bottom,top,k,dim=da->dim;
21: DM_DA *dd = (DM_DA*)da->data;
26: if (dd->ltol) return(0);
27: /*
28: We simply remap the values in the from part of
29: global to local to read from an array with the ghost values
30: rather then from the plain array.
31: */
32: VecScatterCopy(dd->gtol,&dd->ltol);
33: PetscLogObjectParent((PetscObject)da,(PetscObject)dd->ltol);
34: if (dim == 1) {
35: left = dd->xs - dd->Xs;
36: PetscMalloc1(dd->xe-dd->xs,&idx);
37: for (j=0; j<dd->xe-dd->xs; j++) idx[j] = left + j;
38: } else if (dim == 2) {
39: left = dd->xs - dd->Xs; down = dd->ys - dd->Ys; up = down + dd->ye-dd->ys;
40: PetscMalloc1((dd->xe-dd->xs)*(up - down),&idx);
41: count = 0;
42: for (i=down; i<up; i++) {
43: for (j=0; j<dd->xe-dd->xs; j++) {
44: idx[count++] = left + i*(dd->Xe-dd->Xs) + j;
45: }
46: }
47: } else if (dim == 3) {
48: left = dd->xs - dd->Xs;
49: bottom = dd->ys - dd->Ys; top = bottom + dd->ye-dd->ys;
50: down = dd->zs - dd->Zs; up = down + dd->ze-dd->zs;
51: count = (dd->xe-dd->xs)*(top-bottom)*(up-down);
52: PetscMalloc1(count,&idx);
53: count = 0;
54: for (i=down; i<up; i++) {
55: for (j=bottom; j<top; j++) {
56: for (k=0; k<dd->xe-dd->xs; k++) {
57: idx[count++] = (left+j*(dd->Xe-dd->Xs))+i*(dd->Xe-dd->Xs)*(dd->Ye-dd->Ys) + k;
58: }
59: }
60: }
61: } else SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_CORRUPT,"DMDA has invalid dimension %D",dim);
63: VecScatterRemap(dd->ltol,idx,NULL);
64: PetscFree(idx);
65: return(0);
66: }
68: /*
69: DMLocalToLocalBegin_DA - Maps from a local vector (including ghost points
70: that contain irrelevant values) to another local vector where the ghost
71: points in the second are set correctly. Must be followed by DMLocalToLocalEnd_DA().
73: Neighbor-wise Collective on DMDA and Vec 75: Input Parameters:
76: + da - the distributed array context
77: . g - the original local vector
78: - mode - one of INSERT_VALUES or ADD_VALUES 80: Output Parameter:
81: . l - the local vector with correct ghost values
83: Notes:
84: The local vectors used here need not be the same as those
85: obtained from DMCreateLocalVector(), BUT they
86: must have the same parallel data layout; they could, for example, be
87: obtained with VecDuplicate() from the DMDA originating vectors.
89: */
90: PetscErrorCode DMLocalToLocalBegin_DA(DM da,Vec g,InsertMode mode,Vec l) 91: {
93: DM_DA *dd = (DM_DA*)da->data;
97: if (!dd->ltol) {
98: DMLocalToLocalCreate_DA(da);
99: }
100: VecScatterBegin(dd->ltol,g,l,mode,SCATTER_FORWARD);
101: return(0);
102: }
104: /*
105: DMLocalToLocalEnd_DA - Maps from a local vector (including ghost points
106: that contain irrelevant values) to another local vector where the ghost
107: points in the second are set correctly. Must be preceeded by
108: DMLocalToLocalBegin_DA().
110: Neighbor-wise Collective on DMDA and Vec112: Input Parameters:
113: + da - the distributed array context
114: . g - the original local vector
115: - mode - one of INSERT_VALUES or ADD_VALUES117: Output Parameter:
118: . l - the local vector with correct ghost values
120: Note:
121: The local vectors used here need not be the same as those
122: obtained from DMCreateLocalVector(), BUT they
123: must have the same parallel data layout; they could, for example, be
124: obtained with VecDuplicate() from the DMDA originating vectors.
126: */
127: PetscErrorCode DMLocalToLocalEnd_DA(DM da,Vec g,InsertMode mode,Vec l)128: {
130: DM_DA *dd = (DM_DA*)da->data;
136: VecScatterEnd(dd->ltol,g,l,mode,SCATTER_FORWARD);
137: return(0);
138: }