Actual source code: daindex.c
petsc-3.4.5 2014-06-29
2: /*
3: Code for manipulating distributed regular arrays in parallel.
4: */
6: #include <petsc-private/dmdaimpl.h> /*I "petscdmda.h" I*/
10: /*@C
11: DMDAGetGlobalIndices - Returns the global node number of all local nodes,
12: including ghost nodes.
14: Not Collective
16: Input Parameter:
17: . da - the distributed array
19: Output Parameters:
20: + n - the number of local elements, including ghost nodes (or NULL)
21: - idx - the global indices
23: Level: intermediate
25: Note:
26: For DMDA_STENCIL_STAR stencils the inactive corner ghost nodes are also included
27: in the list of local indices (even though those nodes are not updated
28: during calls to DMDAXXXToXXX().
30: Essentially the same data is returned in the form of a local-to-global mapping
31: with the routine DMDAGetISLocalToGlobalMapping();
33: Fortran Note:
34: This routine is used differently from Fortran
35: .vb
36: DM da
37: integer n,da_array(1)
38: PetscOffset i_da
39: integer ierr
40: call DMDAGetGlobalIndices(da,n,da_array,i_da,ierr)
42: C Access first local entry in list
43: value = da_array(i_da + 1)
44: .ve
46: See the <A href="../../docs/manual.pdf#nameddest=Chapter 9 PETSc for Fortran Users">Fortran chapter</A> of the users manual for details.
48: .keywords: distributed array, get, global, indices, local-to-global
50: .seealso: DMDACreate2d(), DMDAGetGhostCorners(), DMDAGetCorners(), DMLocalToGlobalBegin()
51: DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDALocalToLocalBegin(), DMDAGetAO(), DMDAGetGlobalIndicesF90()
52: DMDAGetISLocalToGlobalMapping(), DMDACreate3d(), DMDACreate1d(), DMDALocalToLocalEnd(), DMDAGetOwnershipRanges()
53: @*/
54: PetscErrorCode DMDAGetGlobalIndices(DM da,PetscInt *n,PetscInt **idx)
55: {
56: DM_DA *dd = (DM_DA*)da->data;
60: if (n) *n = dd->Nl;
61: if (idx) *idx = dd->idx;
62: return(0);
63: }
67: /*
68: Gets the natural number for each global number on the process.
70: Used by DMDAGetAO() and DMDAGlobalToNatural_Create()
71: */
72: PetscErrorCode DMDAGetNatural_Private(DM da,PetscInt *outNlocal,IS *isnatural)
73: {
75: PetscInt Nlocal,i,j,k,*lidx,lict = 0;
76: DM_DA *dd = (DM_DA*)da->data;
79: Nlocal = (dd->xe-dd->xs);
80: if (dd->dim > 1) Nlocal *= (dd->ye-dd->ys);
81: if (dd->dim > 2) Nlocal *= (dd->ze-dd->zs);
83: PetscMalloc(Nlocal*sizeof(PetscInt),&lidx);
85: if (dd->dim == 1) {
86: for (i=dd->xs; i<dd->xe; i++) {
87: /* global number in natural ordering */
88: lidx[lict++] = i;
89: }
90: } else if (dd->dim == 2) {
91: for (j=dd->ys; j<dd->ye; j++) {
92: for (i=dd->xs; i<dd->xe; i++) {
93: /* global number in natural ordering */
94: lidx[lict++] = i + j*dd->M*dd->w;
95: }
96: }
97: } else if (dd->dim == 3) {
98: for (k=dd->zs; k<dd->ze; k++) {
99: for (j=dd->ys; j<dd->ye; j++) {
100: for (i=dd->xs; i<dd->xe; i++) {
101: lidx[lict++] = i + j*dd->M*dd->w + k*dd->M*dd->N*dd->w;
102: }
103: }
104: }
105: }
106: *outNlocal = Nlocal;
107: ISCreateGeneral(PetscObjectComm((PetscObject)da),Nlocal,lidx,PETSC_OWN_POINTER,isnatural);
108: return(0);
109: }
113: /*@
114: DMDAGetAO - Gets the application ordering context for a distributed array.
116: Collective on DMDA
118: Input Parameter:
119: . da - the distributed array
121: Output Parameters:
122: . ao - the application ordering context for DMDAs
124: Level: intermediate
126: Notes:
127: In this case, the AO maps to the natural grid ordering that would be used
128: for the DMDA if only 1 processor were employed (ordering most rapidly in the
129: x-direction, then y, then z). Multiple degrees of freedom are numbered
130: for each node (rather than 1 component for the whole grid, then the next
131: component, etc.)
133: .keywords: distributed array, get, global, indices, local-to-global
135: .seealso: DMDACreate2d(), DMDAGetGhostCorners(), DMDAGetCorners(), DMDALocalToGlocal()
136: DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDALocalToLocalBegin(), DMDALocalToLocalEnd(), DMDAGetGlobalIndices(), DMDAGetOwnershipRanges(),
137: AO, AOPetscToApplication(), AOApplicationToPetsc()
138: @*/
139: PetscErrorCode DMDAGetAO(DM da,AO *ao)
140: {
141: DM_DA *dd = (DM_DA*)da->data;
147: /*
148: Build the natural ordering to PETSc ordering mappings.
149: */
150: if (!dd->ao) {
151: IS ispetsc,isnatural;
153: PetscInt Nlocal;
155: DMDAGetNatural_Private(da,&Nlocal,&isnatural);
156: ISCreateStride(PetscObjectComm((PetscObject)da),Nlocal,dd->base,1,&ispetsc);
157: AOCreateBasicIS(isnatural,ispetsc,&dd->ao);
158: PetscLogObjectParent(da,dd->ao);
159: ISDestroy(&ispetsc);
160: ISDestroy(&isnatural);
161: }
162: *ao = dd->ao;
163: return(0);
164: }
166: /*MC
167: DMDAGetGlobalIndicesF90 - Returns a Fortran90 pointer to the list of
168: global indices (global node number of all local nodes, including
169: ghost nodes).
171: Synopsis:
172: DMDAGetGlobalIndicesF90(DM da,integer n,{integer, pointer :: idx(:)},integer ierr)
174: Not Collective
176: Input Parameter:
177: . da - the distributed array
179: Output Parameters:
180: + n - the number of local elements, including ghost nodes (or NULL)
181: . idx - the Fortran90 pointer to the global indices
182: - ierr - error code
184: Level: intermediate
186: Notes:
187: Not yet supported for all F90 compilers
189: .keywords: distributed array, get, global, indices, local-to-global, f90
191: .seealso: DMDAGetGlobalIndices()
192: M*/