Actual source code: daindex.c

petsc-3.3-p7 2013-05-11
  2: /*
  3:   Code for manipulating distributed regular arrays in parallel.
  4: */

  6: #include <petsc-private/daimpl.h>    /*I   "petscdmda.h"   I*/

 10: /*@C
 11:    DMDAGetGlobalIndices - Returns the global node number of all local nodes,
 12:    including ghost nodes.

 14:    Not Collective

 16:    Input Parameter:
 17: .  da - the distributed array

 19:    Output Parameters:
 20: +  n - the number of local elements, including ghost nodes (or PETSC_NULL)
 21: -  idx - the global indices

 23:    Level: intermediate

 25:    Note: 
 26:    For DMDA_STENCIL_STAR stencils the inactive corner ghost nodes are also included
 27:    in the list of local indices (even though those nodes are not updated 
 28:    during calls to DMDAXXXToXXX().

 30:    Essentially the same data is returned in the form of a local-to-global mapping
 31:    with the routine DMDAGetISLocalToGlobalMapping();

 33:    Fortran Note:
 34:    This routine is used differently from Fortran
 35: .vb
 36:         DM          da
 37:         integer     n,da_array(1)
 38:         PetscOffset i_da
 39:         integer     ierr
 40:         call DMDAGetGlobalIndices(da,n,da_array,i_da,ierr)

 42:    C Access first local entry in list
 43:         value = da_array(i_da + 1)
 44: .ve

 46:    See the <A href="../../docs/manual.pdf#nameddest=Chapter 9 PETSc for Fortran Users">Fortran chapter</A> of the users manual for details.

 48: .keywords: distributed array, get, global, indices, local-to-global

 50: .seealso: DMDACreate2d(), DMDAGetGhostCorners(), DMDAGetCorners(), DMLocalToGlobalBegin()
 51:           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDALocalToLocalBegin(), DMDAGetAO(), DMDAGetGlobalIndicesF90()
 52:           DMDAGetISLocalToGlobalMapping(), DMDACreate3d(), DMDACreate1d(), DMDALocalToLocalEnd(), DMDAGetOwnershipRanges()
 53: @*/
 54: PetscErrorCode  DMDAGetGlobalIndices(DM da,PetscInt *n,PetscInt **idx)
 55: {
 56:   DM_DA          *dd = (DM_DA*)da->data;

 60:   if (n)   *n   = dd->Nl;
 61:   if (idx) *idx = dd->idx;
 62:   return(0);
 63: }

 67: /*
 68:    Gets the natural number for each global number on the process.

 70:    Used by DMDAGetAO() and DMDAGlobalToNatural_Create()
 71: */
 72: PetscErrorCode DMDAGetNatural_Private(DM da,PetscInt *outNlocal,IS *isnatural)
 73: {
 75:   PetscInt       Nlocal,i,j,k,*lidx,lict = 0;
 76:   DM_DA          *dd = (DM_DA*)da->data;

 79:   Nlocal = (dd->xe-dd->xs);
 80:   if (dd->dim > 1) {
 81:     Nlocal *= (dd->ye-dd->ys);
 82:   }
 83:   if (dd->dim > 2) {
 84:     Nlocal *= (dd->ze-dd->zs);
 85:   }
 86: 
 87:   PetscMalloc(Nlocal*sizeof(PetscInt),&lidx);
 88: 
 89:   if (dd->dim == 1) {
 90:     for (i=dd->xs; i<dd->xe; i++) {
 91:       /*  global number in natural ordering */
 92:       lidx[lict++] = i;
 93:     }
 94:   } else if (dd->dim == 2) {
 95:     for (j=dd->ys; j<dd->ye; j++) {
 96:       for (i=dd->xs; i<dd->xe; i++) {
 97:         /*  global number in natural ordering */
 98:         lidx[lict++] = i + j*dd->M*dd->w;
 99:       }
100:     }
101:   } else if (dd->dim == 3) {
102:     for (k=dd->zs; k<dd->ze; k++) {
103:       for (j=dd->ys; j<dd->ye; j++) {
104:         for (i=dd->xs; i<dd->xe; i++) {
105:           lidx[lict++] = i + j*dd->M*dd->w + k*dd->M*dd->N*dd->w;
106:         }
107:       }
108:     }
109:   }
110:   *outNlocal = Nlocal;
111:   ISCreateGeneral(((PetscObject)da)->comm,Nlocal,lidx,PETSC_OWN_POINTER,isnatural);
112:   return(0);
113: }

117: /*@
118:    DMDAGetAO - Gets the application ordering context for a distributed array.

120:    Collective on DMDA

122:    Input Parameter:
123: .  da - the distributed array

125:    Output Parameters:
126: .  ao - the application ordering context for DMDAs

128:    Level: intermediate

130:    Notes:
131:    In this case, the AO maps to the natural grid ordering that would be used
132:    for the DMDA if only 1 processor were employed (ordering most rapidly in the
133:    x-direction, then y, then z).  Multiple degrees of freedom are numbered
134:    for each node (rather than 1 component for the whole grid, then the next
135:    component, etc.)

137: .keywords: distributed array, get, global, indices, local-to-global

139: .seealso: DMDACreate2d(), DMDAGetGhostCorners(), DMDAGetCorners(), DMDALocalToGlocal()
140:           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDALocalToLocalBegin(), DMDALocalToLocalEnd(), DMDAGetGlobalIndices(), DMDAGetOwnershipRanges(),
141:           AO, AOPetscToApplication(), AOApplicationToPetsc()
142: @*/
143: PetscErrorCode  DMDAGetAO(DM da,AO *ao)
144: {
145:   DM_DA *dd = (DM_DA*)da->data;


151:   /* 
152:      Build the natural ordering to PETSc ordering mappings.
153:   */
154:   if (!dd->ao) {
155:     IS             ispetsc,isnatural;
157:     PetscInt       Nlocal;

159:     DMDAGetNatural_Private(da,&Nlocal,&isnatural);
160:     ISCreateStride(((PetscObject)da)->comm,Nlocal,dd->base,1,&ispetsc);
161:     AOCreateBasicIS(isnatural,ispetsc,&dd->ao);
162:     PetscLogObjectParent(da,dd->ao);
163:     ISDestroy(&ispetsc);
164:     ISDestroy(&isnatural);
165:   }
166:   *ao = dd->ao;
167:   return(0);
168: }

170: /*MC
171:     DMDAGetGlobalIndicesF90 - Returns a Fortran90 pointer to the list of 
172:     global indices (global node number of all local nodes, including
173:     ghost nodes).

175:     Synopsis:
176:     DMDAGetGlobalIndicesF90(DM da,integer n,{integer, pointer :: idx(:)},integer ierr)

178:     Not Collective

180:     Input Parameter:
181: .   da - the distributed array

183:     Output Parameters:
184: +   n - the number of local elements, including ghost nodes (or PETSC_NULL)
185: .   idx - the Fortran90 pointer to the global indices
186: -   ierr - error code

188:     Level: intermediate

190:     Notes:
191:      Not yet supported for all F90 compilers

193: .keywords: distributed array, get, global, indices, local-to-global, f90

195: .seealso: DMDAGetGlobalIndices()
196: M*/