Actual source code: dacorn.c
petsc-3.4.5 2014-06-29
2: /*
3: Code for manipulating distributed regular arrays in parallel.
4: */
6: #include <petsc-private/dmdaimpl.h> /*I "petscdmda.h" I*/
10: PetscErrorCode DMCreateCoordinateDM_DA(DM dm, DM *cdm)
11: {
12: DM_DA *da = (DM_DA*) dm->data;
13: PetscMPIInt size;
17: MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size);
18: if (da->dim == 1) {
19: PetscInt s,m,*lc,l;
20: DMDABoundaryType bx;
22: DMDAGetInfo(dm,0,&m,0,0,0,0,0,0,&s,&bx,0,0,0);
23: DMDAGetCorners(dm,0,0,0,&l,0,0);
24: PetscMalloc(size * sizeof(PetscInt), &lc);
25: MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,PetscObjectComm((PetscObject)dm));
26: DMDACreate1d(PetscObjectComm((PetscObject)dm),bx,m,1,s,lc,cdm);
27: PetscFree(lc);
28: } else if (da->dim == 2) {
29: PetscInt i,s,m,*lc,*ld,l,k,n,M,N;
30: DMDABoundaryType bx,by;
32: DMDAGetInfo(dm,0,&m,&n,0,&M,&N,0,0,&s,&bx,&by,0,0);
33: DMDAGetCorners(dm,0,0,0,&l,&k,0);
34: PetscMalloc2(size,PetscInt,&lc,size,PetscInt,&ld);
35: /* only first M values in lc matter */
36: MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,PetscObjectComm((PetscObject)dm));
37: /* every Mth value in ld matters */
38: MPI_Allgather(&k,1,MPIU_INT,ld,1,MPIU_INT,PetscObjectComm((PetscObject)dm));
39: for (i = 0; i < N; ++i) ld[i] = ld[M*i];
40: if (bx == DMDA_BOUNDARY_MIRROR || by == DMDA_BOUNDARY_MIRROR) {
41: DMDACreate2d(PetscObjectComm((PetscObject)dm),bx,by,DMDA_STENCIL_STAR,m,n,M,N,2,s,lc,ld,cdm);
42: } else {
43: DMDACreate2d(PetscObjectComm((PetscObject)dm),bx,by,DMDA_STENCIL_BOX,m,n,M,N,2,s,lc,ld,cdm);
44: }
45: PetscFree2(lc,ld);
46: } else if (da->dim == 3) {
47: PetscInt i,s,m,*lc,*ld,*le,l,k,q,n,M,N,P,p;
48: DMDABoundaryType bx,by,bz;
50: DMDAGetInfo(dm,0,&m,&n,&p,&M,&N,&P,0,&s,&bx,&by,&bz,0);
51: DMDAGetCorners(dm,0,0,0,&l,&k,&q);
52: PetscMalloc3(size,PetscInt,&lc,size,PetscInt,&ld,size,PetscInt,&le);
53: /* only first M values in lc matter */
54: MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,PetscObjectComm((PetscObject)dm));
55: /* every Mth value in ld matters */
56: MPI_Allgather(&k,1,MPIU_INT,ld,1,MPIU_INT,PetscObjectComm((PetscObject)dm));
57: for (i = 0; i < N; ++i) ld[i] = ld[M*i];
58: MPI_Allgather(&q,1,MPIU_INT,le,1,MPIU_INT,PetscObjectComm((PetscObject)dm));
59: for (i = 0; i < P; ++i) le[i] = le[M*N*i];
60: DMDACreate3d(PetscObjectComm((PetscObject)dm),bx,by,bz,DMDA_STENCIL_BOX,m,n,p,M,N,P,3,s,lc,ld,le,cdm);
61: PetscFree3(lc,ld,le);
62: }
63: return(0);
64: }
68: /*@C
69: DMDASetFieldName - Sets the names of individual field components in multicomponent
70: vectors associated with a DMDA.
72: Not Collective
74: Input Parameters:
75: + da - the distributed array
76: . nf - field number for the DMDA (0, 1, ... dof-1), where dof indicates the
77: number of degrees of freedom per node within the DMDA
78: - names - the name of the field (component)
80: Level: intermediate
82: .keywords: distributed array, get, component name
84: .seealso: DMDAGetFieldName(), DMDASetCoordinateName(), DMDAGetCoordinateName()
85: @*/
86: PetscErrorCode DMDASetFieldName(DM da,PetscInt nf,const char name[])
87: {
89: DM_DA *dd = (DM_DA*)da->data;
93: if (nf < 0 || nf >= dd->w) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid field number: %D",nf);
94: PetscFree(dd->fieldname[nf]);
95: PetscStrallocpy(name,&dd->fieldname[nf]);
96: return(0);
97: }
101: /*@C
102: DMDAGetFieldName - Gets the names of individual field components in multicomponent
103: vectors associated with a DMDA.
105: Not Collective
107: Input Parameter:
108: + da - the distributed array
109: - nf - field number for the DMDA (0, 1, ... dof-1), where dof indicates the
110: number of degrees of freedom per node within the DMDA
112: Output Parameter:
113: . names - the name of the field (component)
115: Level: intermediate
117: .keywords: distributed array, get, component name
119: .seealso: DMDASetFieldName(), DMDASetCoordinateName(), DMDAGetCoordinateName()
120: @*/
121: PetscErrorCode DMDAGetFieldName(DM da,PetscInt nf,const char **name)
122: {
123: DM_DA *dd = (DM_DA*)da->data;
128: if (nf < 0 || nf >= dd->w) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid field number: %D",nf);
129: *name = dd->fieldname[nf];
130: return(0);
131: }
135: /*@C
136: DMDASetCoordinateName - Sets the name of the coordinate directions associated with a DMDA, for example "x" or "y"
138: Not Collective
140: Input Parameters:
141: + da - the distributed array
142: . nf - coordinate number for the DMDA (0, 1, ... dim-1),
143: - name - the name of the coordinate
145: Level: intermediate
147: .keywords: distributed array, get, component name
149: .seealso: DMDAGetCoordinateName(), DMDASetFieldName(), DMDAGetFieldName()
150: @*/
151: PetscErrorCode DMDASetCoordinateName(DM da,PetscInt nf,const char name[])
152: {
154: DM_DA *dd = (DM_DA*)da->data;
158: if (nf < 0 || nf >= dd->dim) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid coordinate number: %D",nf);
159: PetscFree(dd->coordinatename[nf]);
160: PetscStrallocpy(name,&dd->coordinatename[nf]);
161: return(0);
162: }
166: /*@C
167: DMDAGetCoordinateName - Gets the name of a coodinate direction associated with a DMDA.
169: Not Collective
171: Input Parameter:
172: + da - the distributed array
173: - nf - number for the DMDA (0, 1, ... dim-1)
175: Output Parameter:
176: . names - the name of the coordinate direction
178: Level: intermediate
180: .keywords: distributed array, get, component name
182: .seealso: DMDASetCoordinateName(), DMDASetFieldName(), DMDAGetFieldName()
183: @*/
184: PetscErrorCode DMDAGetCoordinateName(DM da,PetscInt nf,const char **name)
185: {
186: DM_DA *dd = (DM_DA*)da->data;
191: if (nf < 0 || nf >= dd->dim) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid coordinate number: %D",nf);
192: *name = dd->coordinatename[nf];
193: return(0);
194: }
198: /*@
199: DMDAGetCorners - Returns the global (x,y,z) indices of the lower left
200: corner of the local region, excluding ghost points.
202: Not Collective
204: Input Parameter:
205: . da - the distributed array
207: Output Parameters:
208: + x,y,z - the corner indices (where y and z are optional; these are used
209: for 2D and 3D problems)
210: - m,n,p - widths in the corresponding directions (where n and p are optional;
211: these are used for 2D and 3D problems)
213: Note:
214: The corner information is independent of the number of degrees of
215: freedom per node set with the DMDACreateXX() routine. Thus the x, y, z, and
216: m, n, p can be thought of as coordinates on a logical grid, where each
217: grid point has (potentially) several degrees of freedom.
218: Any of y, z, n, and p can be passed in as NULL if not needed.
220: Level: beginner
222: .keywords: distributed array, get, corners, nodes, local indices
224: .seealso: DMDAGetGhostCorners(), DMDAGetOwnershipRanges()
225: @*/
226: PetscErrorCode DMDAGetCorners(DM da,PetscInt *x,PetscInt *y,PetscInt *z,PetscInt *m,PetscInt *n,PetscInt *p)
227: {
228: PetscInt w;
229: DM_DA *dd = (DM_DA*)da->data;
233: /* since the xs, xe ... have all been multiplied by the number of degrees
234: of freedom per cell, w = dd->w, we divide that out before returning.*/
235: w = dd->w;
236: if (x) *x = dd->xs/w + dd->xo; if (m) *m = (dd->xe - dd->xs)/w;
237: /* the y and z have NOT been multiplied by w */
238: if (y) *y = dd->ys + dd->yo; if (n) *n = (dd->ye - dd->ys);
239: if (z) *z = dd->zs + dd->zo; if (p) *p = (dd->ze - dd->zs);
240: return(0);
241: }
245: /*@
246: DMDAGetLocalBoundingBox - Returns the local bounding box for the DMDA.
248: Not Collective
250: Input Parameter:
251: . da - the distributed array
253: Output Parameters:
254: + lmin - local minimum coordinates (length dim, optional)
255: - lmax - local maximim coordinates (length dim, optional)
257: Level: beginner
259: .keywords: distributed array, get, coordinates
261: .seealso: DMDAGetCoordinateDA(), DMGetCoordinates(), DMDAGetBoundingBox()
262: @*/
263: PetscErrorCode DMDAGetLocalBoundingBox(DM da,PetscReal lmin[],PetscReal lmax[])
264: {
265: PetscErrorCode ierr;
266: Vec coords = NULL;
267: PetscInt dim,i,j;
268: const PetscScalar *local_coords;
269: PetscReal min[3]={PETSC_MAX_REAL,PETSC_MAX_REAL,PETSC_MAX_REAL},max[3]={PETSC_MIN_REAL,PETSC_MIN_REAL,PETSC_MIN_REAL};
270: PetscInt N,Ni;
271: DM_DA *dd = (DM_DA*)da->data;
275: dim = dd->dim;
276: DMGetCoordinates(da,&coords);
277: if (coords) {
278: VecGetArrayRead(coords,&local_coords);
279: VecGetLocalSize(coords,&N);
280: Ni = N/dim;
281: for (i=0; i<Ni; i++) {
282: for (j=0; j<3; j++) {
283: min[j] = j < dim ? PetscMin(min[j],PetscRealPart(local_coords[i*dim+j])) : 0;
284: max[j] = j < dim ? PetscMax(max[j],PetscRealPart(local_coords[i*dim+j])) : 0;
285: }
286: }
287: VecRestoreArrayRead(coords,&local_coords);
288: } else { /* Just use grid indices */
289: DMDALocalInfo info;
290: DMDAGetLocalInfo(da,&info);
291: min[0] = info.xs;
292: min[1] = info.ys;
293: min[2] = info.zs;
294: max[0] = info.xs + info.xm-1;
295: max[1] = info.ys + info.ym-1;
296: max[2] = info.zs + info.zm-1;
297: }
298: if (lmin) {PetscMemcpy(lmin,min,dim*sizeof(PetscReal));}
299: if (lmax) {PetscMemcpy(lmax,max,dim*sizeof(PetscReal));}
300: return(0);
301: }
305: /*@
306: DMDAGetBoundingBox - Returns the global bounding box for the DMDA.
308: Collective on DMDA
310: Input Parameter:
311: . da - the distributed array
313: Output Parameters:
314: + gmin - global minimum coordinates (length dim, optional)
315: - gmax - global maximim coordinates (length dim, optional)
317: Level: beginner
319: .keywords: distributed array, get, coordinates
321: .seealso: DMDAGetCoordinateDA(), DMGetCoordinates(), DMDAGetLocalBoundingBox()
322: @*/
323: PetscErrorCode DMDAGetBoundingBox(DM da,PetscReal gmin[],PetscReal gmax[])
324: {
326: PetscMPIInt count;
327: PetscReal lmin[3],lmax[3];
328: DM_DA *dd = (DM_DA*)da->data;
332: PetscMPIIntCast(dd->dim,&count);
333: DMDAGetLocalBoundingBox(da,lmin,lmax);
334: if (gmin) {MPI_Allreduce(lmin,gmin,count,MPIU_REAL,MPIU_MIN,PetscObjectComm((PetscObject)da));}
335: if (gmax) {MPI_Allreduce(lmax,gmax,count,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)da));}
336: return(0);
337: }
341: /*@
342: DMDAGetReducedDMDA - Gets the DMDA with the same layout but with fewer or more fields
344: Collective on DMDA
346: Input Parameter:
347: + da - the distributed array
348: . nfields - number of fields in new DMDA
350: Output Parameter:
351: . nda - the new DMDA
353: Level: intermediate
355: .keywords: distributed array, get, corners, nodes, local indices, coordinates
357: .seealso: DMDAGetGhostCorners(), DMSetCoordinates(), DMDASetUniformCoordinates(), DMGetCoordinates(), DMDAGetGhostedCoordinates()
358: @*/
359: PetscErrorCode DMDAGetReducedDMDA(DM da,PetscInt nfields,DM *nda)
360: {
361: PetscErrorCode ierr;
362: DM_DA *dd = (DM_DA*)da->data;
363: PetscInt s,m,n,p,M,N,P,dim,Mo,No,Po;
364: const PetscInt *lx,*ly,*lz;
365: DMDABoundaryType bx,by,bz;
366: DMDAStencilType stencil_type;
367: PetscInt ox,oy,oz;
368: PetscInt cl,rl;
371: dim = dd->dim;
372: M = dd->M;
373: N = dd->N;
374: P = dd->P;
375: m = dd->m;
376: n = dd->n;
377: p = dd->p;
378: s = dd->s;
379: bx = dd->bx;
380: by = dd->by;
381: bz = dd->bz;
383: stencil_type = dd->stencil_type;
385: DMDAGetOwnershipRanges(da,&lx,&ly,&lz);
386: if (dim == 1) {
387: DMDACreate1d(PetscObjectComm((PetscObject)da),bx,M,nfields,s,dd->lx,nda);
388: } else if (dim == 2) {
389: DMDACreate2d(PetscObjectComm((PetscObject)da),bx,by,stencil_type,M,N,m,n,nfields,s,lx,ly,nda);
390: } else if (dim == 3) {
391: DMDACreate3d(PetscObjectComm((PetscObject)da),bx,by,bz,stencil_type,M,N,P,m,n,p,nfields,s,lx,ly,lz,nda);
392: }
393: if (da->coordinates) {
394: PetscObjectReference((PetscObject)da->coordinates);
396: (*nda)->coordinates = da->coordinates;
397: }
399: /* allow for getting a reduced DA corresponding to a domain decomposition */
400: DMDAGetOffset(da,&ox,&oy,&oz,&Mo,&No,&Po);
401: DMDASetOffset(*nda,ox,oy,oz,Mo,No,Po);
403: /* allow for getting a reduced DA corresponding to a coarsened DA */
404: DMGetCoarsenLevel(da,&cl);
405: DMGetRefineLevel(da,&rl);
407: (*nda)->levelup = rl;
408: (*nda)->leveldown = cl;
409: return(0);
410: }