Actual source code: da1.c

petsc-3.4.5 2014-06-29
  2: /*
  3:    Code for manipulating distributed regular 1d arrays in parallel.
  4:    This file was created by Peter Mell   6/30/95
  5: */

  7: #include <petsc-private/dmdaimpl.h>     /*I  "petscdmda.h"   I*/

  9: const char *const DMDABoundaryTypes[] = {"NONE","GHOSTED","PERIODIC","DMDA_BOUNDARY_",0};

 11: #include <petscdraw.h>
 14: PetscErrorCode DMView_DA_1d(DM da,PetscViewer viewer)
 15: {
 17:   PetscMPIInt    rank;
 18:   PetscBool      iascii,isdraw,isbinary;
 19:   DM_DA          *dd = (DM_DA*)da->data;
 20: #if defined(PETSC_HAVE_MATLAB_ENGINE)
 21:   PetscBool ismatlab;
 22: #endif

 25:   MPI_Comm_rank(PetscObjectComm((PetscObject)da),&rank);

 27:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
 28:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
 29:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
 30: #if defined(PETSC_HAVE_MATLAB_ENGINE)
 31:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);
 32: #endif
 33:   if (iascii) {
 34:     PetscViewerFormat format;

 36:     PetscViewerGetFormat(viewer, &format);
 37:     if (format != PETSC_VIEWER_ASCII_VTK && format != PETSC_VIEWER_ASCII_VTK_CELL) {
 38:       DMDALocalInfo info;
 39:       DMDAGetLocalInfo(da,&info);
 40:       PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);
 41:       PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D m %D w %D s %D\n",rank,dd->M,dd->m,dd->w,dd->s);
 42:       PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D\n",info.xs,info.xs+info.xm);
 43:       PetscViewerFlush(viewer);
 44:       PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);
 45:     } else {
 46:       DMView_DA_VTK(da, viewer);
 47:     }
 48:   } else if (isdraw) {
 49:     PetscDraw draw;
 50:     double    ymin = -1,ymax = 1,xmin = -1,xmax = dd->M,x;
 51:     PetscInt  base;
 52:     char      node[10];
 53:     PetscBool isnull;

 55:     PetscViewerDrawGetDraw(viewer,0,&draw);
 56:     PetscDrawIsNull(draw,&isnull); if (isnull) return(0);

 58:     PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);
 59:     PetscDrawSynchronizedClear(draw);

 61:     /* first processor draws all node lines */
 62:     if (!rank) {
 63:       PetscInt xmin_tmp;
 64:       ymin = 0.0; ymax = 0.3;

 66:       for (xmin_tmp=0; xmin_tmp < dd->M; xmin_tmp++) {
 67:         PetscDrawLine(draw,(double)xmin_tmp,ymin,(double)xmin_tmp,ymax,PETSC_DRAW_BLACK);
 68:       }

 70:       xmin = 0.0; xmax = dd->M - 1;
 71:       PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);
 72:       PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_BLACK);
 73:     }

 75:     PetscDrawSynchronizedFlush(draw);
 76:     PetscDrawPause(draw);

 78:     /* draw my box */
 79:     ymin = 0; ymax = 0.3; xmin = dd->xs / dd->w; xmax = (dd->xe / dd->w)  - 1;
 80:     PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);
 81:     PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);
 82:     PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);
 83:     PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);

 85:     /* Put in index numbers */
 86:     base = dd->base / dd->w;
 87:     for (x=xmin; x<=xmax; x++) {
 88:       sprintf(node,"%d",(int)base++);
 89:       PetscDrawString(draw,x,ymin,PETSC_DRAW_RED,node);
 90:     }

 92:     PetscDrawSynchronizedFlush(draw);
 93:     PetscDrawPause(draw);
 94:   } else if (isbinary) {
 95:     DMView_DA_Binary(da,viewer);
 96: #if defined(PETSC_HAVE_MATLAB_ENGINE)
 97:   } else if (ismatlab) {
 98:     DMView_DA_Matlab(da,viewer);
 99: #endif
100:   }
101:   return(0);
102: }


107: PetscErrorCode  DMSetUp_DA_1D(DM da)
108: {
109:   DM_DA            *dd   = (DM_DA*)da->data;
110:   const PetscInt   M     = dd->M;
111:   const PetscInt   dof   = dd->w;
112:   const PetscInt   s     = dd->s;
113:   const PetscInt   sDist = s*dof;  /* absolute stencil distance */
114:   const PetscInt   *lx   = dd->lx;
115:   DMDABoundaryType bx    = dd->bx;
116:   MPI_Comm         comm;
117:   Vec              local, global;
118:   VecScatter       ltog, gtol;
119:   IS               to, from;
120:   PetscBool        flg1 = PETSC_FALSE, flg2 = PETSC_FALSE;
121:   PetscMPIInt      rank, size;
122:   PetscInt         i,j,*idx,nn,left,xs,xe,x,Xs,Xe,start,end,m,IXs,IXe;
123:   PetscErrorCode   ierr;

126:   PetscObjectGetComm((PetscObject) da, &comm);
127:   MPI_Comm_size(comm,&size);
128:   MPI_Comm_rank(comm,&rank);

130:   dd->m = size;
131:   m     = dd->m;

133:   if (s > 0) {
134:     /* if not communicating data then should be ok to have nothing on some processes */
135:     if (M < m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"More processes than data points! %D %D",m,M);
136:     if ((M-1) < s) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Array is too small for stencil! %D %D",M-1,s);
137:   }

139:   /*
140:      Determine locally owned region
141:      xs is the first local node number, x is the number of local nodes
142:   */
143:   if (!lx) {
144:     PetscMalloc(m*sizeof(PetscInt), &dd->lx);
145:     PetscOptionsGetBool(NULL,"-da_partition_blockcomm",&flg1,NULL);
146:     PetscOptionsGetBool(NULL,"-da_partition_nodes_at_end",&flg2,NULL);
147:     if (flg1) {      /* Block Comm type Distribution */
148:       xs = rank*M/m;
149:       x  = (rank + 1)*M/m - xs;
150:     } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
151:       x = (M + rank)/m;
152:       if (M/m == x) xs = rank*x;
153:       else          xs = rank*(x-1) + (M+rank)%(x*m);
154:     } else { /* The odd nodes are evenly distributed across the first k nodes */
155:       /* Regular PETSc Distribution */
156:       x = M/m + ((M % m) > rank);
157:       if (rank >= (M % m)) xs = (rank * (PetscInt)(M/m) + M % m);
158:       else                 xs = rank * (PetscInt)(M/m) + rank;
159:     }
160:     MPI_Allgather(&xs,1,MPIU_INT,dd->lx,1,MPIU_INT,comm);
161:     for (i=0; i<m-1; i++) dd->lx[i] = dd->lx[i+1] - dd->lx[i];
162:     dd->lx[m-1] = M - dd->lx[m-1];
163:   } else {
164:     x  = lx[rank];
165:     xs = 0;
166:     for (i=0; i<rank; i++) xs += lx[i];
167:     /* verify that data user provided is consistent */
168:     left = xs;
169:     for (i=rank; i<size; i++) left += lx[i];
170:     if (left != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M %D %D",left,M);
171:   }

173:   /*
174:    check if the scatter requires more than one process neighbor or wraps around
175:    the domain more than once
176:   */
177:   if ((x < s) & ((M > 1) | (bx == DMDA_BOUNDARY_PERIODIC))) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local x-width of domain x %D is smaller than stencil width s %D",x,s);

179:   /* From now on x,xs,xe,Xs,Xe are the exact location in the array */
180:   x  *= dof;
181:   xs *= dof;
182:   xe  = xs + x;

184:   /* determine ghost region (Xs) and region scattered into (IXs)  */
185:   if (xs-sDist > 0) {
186:     Xs  = xs - sDist;
187:     IXs = xs - sDist;
188:   } else {
189:     if (bx) Xs = xs - sDist;
190:     else Xs = 0;
191:     IXs = 0;
192:   }
193:   if (xe+sDist <= M*dof) {
194:     Xe  = xe + sDist;
195:     IXe = xe + sDist;
196:   } else {
197:     if (bx) Xe = xe + sDist;
198:     else Xe = M*dof;
199:     IXe = M*dof;
200:   }

202:   if (bx == DMDA_BOUNDARY_PERIODIC || bx == DMDA_BOUNDARY_MIRROR) {
203:     Xs  = xs - sDist;
204:     Xe  = xe + sDist;
205:     IXs = xs - sDist;
206:     IXe = xe + sDist;
207:   }

209:   /* allocate the base parallel and sequential vectors */
210:   dd->Nlocal = x;
211:   VecCreateMPIWithArray(comm,dof,dd->Nlocal,PETSC_DECIDE,0,&global);
212:   dd->nlocal = (Xe-Xs);
213:   VecCreateSeqWithArray(PETSC_COMM_SELF,dof,dd->nlocal,0,&local);

215:   /* Create Local to Global Vector Scatter Context */
216:   /* local to global inserts non-ghost point region into global */
217:   VecGetOwnershipRange(global,&start,&end);
218:   ISCreateStride(comm,x,start,1,&to);
219:   ISCreateStride(comm,x,xs-Xs,1,&from);
220:   VecScatterCreate(local,from,global,to,&ltog);
221:   PetscLogObjectParent(da,ltog);
222:   ISDestroy(&from);
223:   ISDestroy(&to);

225:   /* Create Global to Local Vector Scatter Context */
226:   /* global to local must retrieve ghost points */
227:   ISCreateStride(comm,(IXe-IXs),IXs-Xs,1,&to);

229:   PetscMalloc((x+2*(sDist))*sizeof(PetscInt),&idx);
230:   PetscLogObjectMemory(da,(x+2*(sDist))*sizeof(PetscInt));

232:   for (i=0; i<IXs-Xs; i++) idx[i] = -1; /* prepend with -1s if needed for ghosted case*/

234:   nn = IXs-Xs;
235:   if (bx == DMDA_BOUNDARY_PERIODIC) { /* Handle all cases with periodic first */
236:     for (i=0; i<sDist; i++) {  /* Left ghost points */
237:       if ((xs-sDist+i)>=0) idx[nn++] = xs-sDist+i;
238:       else                 idx[nn++] = M*dof+(xs-sDist+i);
239:     }

241:     for (i=0; i<x; i++) idx [nn++] = xs + i;  /* Non-ghost points */

243:     for (i=0; i<sDist; i++) { /* Right ghost points */
244:       if ((xe+i)<M*dof) idx [nn++] =  xe+i;
245:       else              idx [nn++] = (xe+i) - M*dof;
246:     }
247:   } else if (bx == DMDA_BOUNDARY_MIRROR) { /* Handle all cases with periodic first */
248:     for (i=0; i<(sDist)/dof; i++) {  /* Left ghost points */
249:       for (j=0; j<dof; j++) {
250:         if ((xs-sDist+i*dof + j)>=0) idx[nn++] = xs-sDist+i*dof +j;
251:         else                         idx[nn++] = sDist - dof*(i) + j;
252:       }
253:     }

255:     for (i=0; i<x; i++) idx [nn++] = xs + i;  /* Non-ghost points */

257:     for (i=0; i<(sDist)/dof; i++) { /* Right ghost points */
258:       for (j=0; j<dof; j++) {
259:         if ((xe+i)<M*dof) idx[nn++] =  xe+i*dof+j;
260:         else              idx[nn++] = M*dof - dof*(i + 2) + j;
261:       }
262:     }
263:   } else {      /* Now do all cases with no periodicity */
264:     if (0 <= xs-sDist) {
265:       for (i=0; i<sDist; i++) idx[nn++] = xs - sDist + i;
266:     } else {
267:       for (i=0; i<xs; i++) idx[nn++] = i;
268:     }

270:     for (i=0; i<x; i++) idx [nn++] = xs + i;

272:     if ((xe+sDist)<=M*dof) {
273:       for (i=0; i<sDist; i++) idx[nn++]=xe+i;
274:     } else {
275:       for (i=xe; i<(M*dof); i++) idx[nn++]=i;
276:     }
277:   }

279:   ISCreateGeneral(comm,nn-IXs+Xs,&idx[IXs-Xs],PETSC_COPY_VALUES,&from);
280:   VecScatterCreate(global,from,local,to,&gtol);
281:   PetscLogObjectParent(da,to);
282:   PetscLogObjectParent(da,from);
283:   PetscLogObjectParent(da,gtol);
284:   ISDestroy(&to);
285:   ISDestroy(&from);
286:   VecDestroy(&local);
287:   VecDestroy(&global);

289:   dd->xs = xs; dd->xe = xe; dd->ys = 0; dd->ye = 1; dd->zs = 0; dd->ze = 1;
290:   dd->Xs = Xs; dd->Xe = Xe; dd->Ys = 0; dd->Ye = 1; dd->Zs = 0; dd->Ze = 1;

292:   dd->gtol      = gtol;
293:   dd->ltog      = ltog;
294:   dd->base      = xs;
295:   da->ops->view = DMView_DA_1d;

297:   /*
298:      Set the local to global ordering in the global vector, this allows use
299:      of VecSetValuesLocal().
300:   */
301:   for (i=0; i<Xe-IXe; i++) idx[nn++] = -1; /* pad with -1s if needed for ghosted case*/

303:   ISLocalToGlobalMappingCreate(comm,nn,idx,PETSC_COPY_VALUES,&da->ltogmap);
304:   ISLocalToGlobalMappingBlock(da->ltogmap,dd->w,&da->ltogmapb);
305:   PetscLogObjectParent(da,da->ltogmap);

307:   dd->idx = idx;
308:   dd->Nl  = nn;
309:   return(0);
310: }


315: /*@C
316:    DMDACreate1d - Creates an object that will manage the communication of  one-dimensional
317:    regular array data that is distributed across some processors.

319:    Collective on MPI_Comm

321:    Input Parameters:
322: +  comm - MPI communicator
323: .  bx - type of ghost cells at the boundary the array should have, if any. Use
324:           DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_GHOSTED, or DMDA_BOUNDARY_PERIODIC.
325: .  M - global dimension of the array (use -M to indicate that it may be set to a different value
326:             from the command line with -da_grid_x <M>)
327: .  dof - number of degrees of freedom per node
328: .  s - stencil width
329: -  lx - array containing number of nodes in the X direction on each processor,
330:         or NULL. If non-null, must be of length as the number of processes in the MPI_Comm.

332:    Output Parameter:
333: .  da - the resulting distributed array object

335:    Options Database Key:
336: +  -dm_view - Calls DMView() at the conclusion of DMDACreate1d()
337: .  -da_grid_x <nx> - number of grid points in x direction; can set if M < 0
338: .  -da_refine_x <rx> - refinement factor
339: -  -da_refine <n> - refine the DMDA n times before creating it, if M < 0

341:    Level: beginner

343:    Notes:
344:    The array data itself is NOT stored in the DMDA, it is stored in Vec objects;
345:    The appropriate vector objects can be obtained with calls to DMCreateGlobalVector()
346:    and DMCreateLocalVector() and calls to VecDuplicate() if more are needed.

348: .keywords: distributed array, create, one-dimensional

350: .seealso: DMDestroy(), DMView(), DMDACreate2d(), DMDACreate3d(), DMGlobalToLocalBegin(), DMDASetRefinementFactor(),
351:           DMGlobalToLocalEnd(), DMLocalToGlobalBegin(), DMDALocalToLocalBegin(), DMDALocalToLocalEnd(), DMDAGetRefinementFactor(),
352:           DMDAGetInfo(), DMCreateGlobalVector(), DMCreateLocalVector(), DMDACreateNaturalVector(), DMLoad(), DMDAGetOwnershipRanges()

354: @*/
355: PetscErrorCode  DMDACreate1d(MPI_Comm comm, DMDABoundaryType bx, PetscInt M, PetscInt dof, PetscInt s, const PetscInt lx[], DM *da)
356: {
358:   PetscMPIInt    size;

361:   DMDACreate(comm, da);
362:   DMDASetDim(*da, 1);
363:   DMDASetSizes(*da, M, 1, 1);
364:   MPI_Comm_size(comm, &size);
365:   DMDASetNumProcs(*da, size, PETSC_DECIDE, PETSC_DECIDE);
366:   DMDASetBoundaryType(*da, bx, DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_NONE);
367:   DMDASetDof(*da, dof);
368:   DMDASetStencilWidth(*da, s);
369:   DMDASetOwnershipRanges(*da, lx, NULL, NULL);
370:   /* This violates the behavior for other classes, but right now users expect negative dimensions to be handled this way */
371:   DMSetFromOptions(*da);
372:   DMSetUp(*da);
373:   DMViewFromOptions(*da,NULL,"-dm_view");
374:   return(0);
375: }