Actual source code: da1.c
2: /*
3: Code for manipulating distributed regular 1d arrays in parallel.
4: This file was created by Peter Mell 6/30/95
5: */
7: #include <petsc/private/dmdaimpl.h>
9: #include <petscdraw.h>
10: static PetscErrorCode DMView_DA_1d(DM da,PetscViewer viewer)
11: {
12: PetscMPIInt rank;
13: PetscBool iascii,isdraw,isglvis,isbinary;
14: DM_DA *dd = (DM_DA*)da->data;
15: #if defined(PETSC_HAVE_MATLAB_ENGINE)
16: PetscBool ismatlab;
17: #endif
19: MPI_Comm_rank(PetscObjectComm((PetscObject)da),&rank);
21: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
22: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
23: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERGLVIS,&isglvis);
24: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
25: #if defined(PETSC_HAVE_MATLAB_ENGINE)
26: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);
27: #endif
28: if (iascii) {
29: PetscViewerFormat format;
31: PetscViewerGetFormat(viewer, &format);
32: if (format == PETSC_VIEWER_LOAD_BALANCE) {
33: PetscInt i,nmax = 0,nmin = PETSC_MAX_INT,navg = 0,*nz,nzlocal;
34: DMDALocalInfo info;
35: PetscMPIInt size;
36: MPI_Comm_size(PetscObjectComm((PetscObject)da),&size);
37: DMDAGetLocalInfo(da,&info);
38: nzlocal = info.xm;
39: PetscMalloc1(size,&nz);
40: MPI_Allgather(&nzlocal,1,MPIU_INT,nz,1,MPIU_INT,PetscObjectComm((PetscObject)da));
41: for (i=0; i<(PetscInt)size; i++) {
42: nmax = PetscMax(nmax,nz[i]);
43: nmin = PetscMin(nmin,nz[i]);
44: navg += nz[i];
45: }
46: PetscFree(nz);
47: navg = navg/size;
48: PetscViewerASCIIPrintf(viewer," Load Balance - Grid Points: Min %D avg %D max %D\n",nmin,navg,nmax);
49: return 0;
50: }
51: if (format != PETSC_VIEWER_ASCII_VTK_DEPRECATED && format != PETSC_VIEWER_ASCII_VTK_CELL_DEPRECATED && format != PETSC_VIEWER_ASCII_GLVIS) {
52: DMDALocalInfo info;
53: DMDAGetLocalInfo(da,&info);
54: PetscViewerASCIIPushSynchronized(viewer);
55: PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D m %D w %D s %D\n",rank,dd->M,dd->m,dd->w,dd->s);
56: PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D\n",info.xs,info.xs+info.xm);
57: PetscViewerFlush(viewer);
58: PetscViewerASCIIPopSynchronized(viewer);
59: } else if (format == PETSC_VIEWER_ASCII_GLVIS) {
60: DMView_DA_GLVis(da,viewer);
61: } else {
62: DMView_DA_VTK(da, viewer);
63: }
64: } else if (isdraw) {
65: PetscDraw draw;
66: double ymin = -1,ymax = 1,xmin = -1,xmax = dd->M,x;
67: PetscInt base;
68: char node[10];
69: PetscBool isnull;
72: PetscViewerDrawGetDraw(viewer,0,&draw);
73: PetscDrawIsNull(draw,&isnull);
74: if (isnull) return 0;
76: PetscDrawCheckResizedWindow(draw);
77: PetscDrawClear(draw);
78: PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);
80: PetscDrawCollectiveBegin(draw);
81: /* first processor draws all node lines */
82: if (rank == 0) {
83: PetscInt xmin_tmp;
84: ymin = 0.0; ymax = 0.3;
85: for (xmin_tmp=0; xmin_tmp < dd->M; xmin_tmp++) {
86: PetscDrawLine(draw,(double)xmin_tmp,ymin,(double)xmin_tmp,ymax,PETSC_DRAW_BLACK);
87: }
88: xmin = 0.0; xmax = dd->M - 1;
89: PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);
90: PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_BLACK);
91: }
92: PetscDrawCollectiveEnd(draw);
93: PetscDrawFlush(draw);
94: PetscDrawPause(draw);
96: PetscDrawCollectiveBegin(draw);
97: /* draw my box */
98: ymin = 0; ymax = 0.3; xmin = dd->xs / dd->w; xmax = (dd->xe / dd->w) - 1;
99: PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);
100: PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);
101: PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);
102: PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);
103: /* Put in index numbers */
104: base = dd->base / dd->w;
105: for (x=xmin; x<=xmax; x++) {
106: PetscSNPrintf(node,sizeof(node),"%d",(int)base++);
107: PetscDrawString(draw,x,ymin,PETSC_DRAW_RED,node);
108: }
109: PetscDrawCollectiveEnd(draw);
110: PetscDrawFlush(draw);
111: PetscDrawPause(draw);
112: PetscDrawSave(draw);
113: } else if (isglvis) {
114: DMView_DA_GLVis(da,viewer);
115: } else if (isbinary) {
116: DMView_DA_Binary(da,viewer);
117: #if defined(PETSC_HAVE_MATLAB_ENGINE)
118: } else if (ismatlab) {
119: DMView_DA_Matlab(da,viewer);
120: #endif
121: }
122: return 0;
123: }
125: PetscErrorCode DMSetUp_DA_1D(DM da)
126: {
127: DM_DA *dd = (DM_DA*)da->data;
128: const PetscInt M = dd->M;
129: const PetscInt dof = dd->w;
130: const PetscInt s = dd->s;
131: const PetscInt sDist = s; /* stencil distance in points */
132: const PetscInt *lx = dd->lx;
133: DMBoundaryType bx = dd->bx;
134: MPI_Comm comm;
135: Vec local, global;
136: VecScatter gtol;
137: IS to, from;
138: PetscBool flg1 = PETSC_FALSE, flg2 = PETSC_FALSE;
139: PetscMPIInt rank, size;
140: PetscInt i,*idx,nn,left,xs,xe,x,Xs,Xe,start,m,IXs,IXe;
142: PetscObjectGetComm((PetscObject) da, &comm);
143: MPI_Comm_size(comm,&size);
144: MPI_Comm_rank(comm,&rank);
146: dd->p = 1;
147: dd->n = 1;
148: dd->m = size;
149: m = dd->m;
151: if (s > 0) {
152: /* if not communicating data then should be ok to have nothing on some processes */
155: }
157: /*
158: Determine locally owned region
159: xs is the first local node number, x is the number of local nodes
160: */
161: if (!lx) {
162: PetscMalloc1(m, &dd->lx);
163: PetscOptionsGetBool(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_partition_blockcomm",&flg1,NULL);
164: PetscOptionsGetBool(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_partition_nodes_at_end",&flg2,NULL);
165: if (flg1) { /* Block Comm type Distribution */
166: xs = rank*M/m;
167: x = (rank + 1)*M/m - xs;
168: } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
169: x = (M + rank)/m;
170: if (M/m == x) xs = rank*x;
171: else xs = rank*(x-1) + (M+rank)%(x*m);
172: } else { /* The odd nodes are evenly distributed across the first k nodes */
173: /* Regular PETSc Distribution */
174: x = M/m + ((M % m) > rank);
175: if (rank >= (M % m)) xs = (rank * (PetscInt)(M/m) + M % m);
176: else xs = rank * (PetscInt)(M/m) + rank;
177: }
178: MPI_Allgather(&xs,1,MPIU_INT,dd->lx,1,MPIU_INT,comm);
179: for (i=0; i<m-1; i++) dd->lx[i] = dd->lx[i+1] - dd->lx[i];
180: dd->lx[m-1] = M - dd->lx[m-1];
181: } else {
182: x = lx[rank];
183: xs = 0;
184: for (i=0; i<rank; i++) xs += lx[i];
185: /* verify that data user provided is consistent */
186: left = xs;
187: for (i=rank; i<size; i++) left += lx[i];
189: }
191: /*
192: check if the scatter requires more than one process neighbor or wraps around
193: the domain more than once
194: */
197: xe = xs + x;
199: /* determine ghost region (Xs) and region scattered into (IXs) */
200: if (xs-sDist > 0) {
201: Xs = xs - sDist;
202: IXs = xs - sDist;
203: } else {
204: if (bx) Xs = xs - sDist;
205: else Xs = 0;
206: IXs = 0;
207: }
208: if (xe+sDist <= M) {
209: Xe = xe + sDist;
210: IXe = xe + sDist;
211: } else {
212: if (bx) Xe = xe + sDist;
213: else Xe = M;
214: IXe = M;
215: }
217: if (bx == DM_BOUNDARY_PERIODIC || bx == DM_BOUNDARY_MIRROR) {
218: Xs = xs - sDist;
219: Xe = xe + sDist;
220: IXs = xs - sDist;
221: IXe = xe + sDist;
222: }
224: /* allocate the base parallel and sequential vectors */
225: dd->Nlocal = dof*x;
226: VecCreateMPIWithArray(comm,dof,dd->Nlocal,PETSC_DECIDE,NULL,&global);
227: dd->nlocal = dof*(Xe-Xs);
228: VecCreateSeqWithArray(PETSC_COMM_SELF,dof,dd->nlocal,NULL,&local);
230: VecGetOwnershipRange(global,&start,NULL);
232: /* Create Global to Local Vector Scatter Context */
233: /* global to local must retrieve ghost points */
234: ISCreateStride(comm,dof*(IXe-IXs),dof*(IXs-Xs),1,&to);
236: PetscMalloc1(x+2*sDist,&idx);
237: PetscLogObjectMemory((PetscObject)da,(x+2*(sDist))*sizeof(PetscInt));
239: for (i=0; i<IXs-Xs; i++) idx[i] = -1; /* prepend with -1s if needed for ghosted case*/
241: nn = IXs-Xs;
242: if (bx == DM_BOUNDARY_PERIODIC) { /* Handle all cases with periodic first */
243: for (i=0; i<sDist; i++) { /* Left ghost points */
244: if ((xs-sDist+i)>=0) idx[nn++] = xs-sDist+i;
245: else idx[nn++] = M+(xs-sDist+i);
246: }
248: for (i=0; i<x; i++) idx [nn++] = xs + i; /* Non-ghost points */
250: for (i=0; i<sDist; i++) { /* Right ghost points */
251: if ((xe+i)<M) idx [nn++] = xe+i;
252: else idx [nn++] = (xe+i) - M;
253: }
254: } else if (bx == DM_BOUNDARY_MIRROR) { /* Handle all cases with periodic first */
255: for (i=0; i<(sDist); i++) { /* Left ghost points */
256: if ((xs-sDist+i)>=0) idx[nn++] = xs-sDist+i;
257: else idx[nn++] = sDist - i;
258: }
260: for (i=0; i<x; i++) idx [nn++] = xs + i; /* Non-ghost points */
262: for (i=0; i<(sDist); i++) { /* Right ghost points */
263: if ((xe+i)<M) idx[nn++] = xe+i;
264: else idx[nn++] = M - (i + 2);
265: }
266: } else { /* Now do all cases with no periodicity */
267: if (0 <= xs-sDist) {
268: for (i=0; i<sDist; i++) idx[nn++] = xs - sDist + i;
269: } else {
270: for (i=0; i<xs; i++) idx[nn++] = i;
271: }
273: for (i=0; i<x; i++) idx [nn++] = xs + i;
275: if ((xe+sDist)<=M) {
276: for (i=0; i<sDist; i++) idx[nn++]=xe+i;
277: } else {
278: for (i=xe; i<M; i++) idx[nn++]=i;
279: }
280: }
282: ISCreateBlock(comm,dof,nn-IXs+Xs,&idx[IXs-Xs],PETSC_USE_POINTER,&from);
283: VecScatterCreate(global,from,local,to,>ol);
284: PetscLogObjectParent((PetscObject)da,(PetscObject)gtol);
285: ISDestroy(&to);
286: ISDestroy(&from);
287: VecDestroy(&local);
288: VecDestroy(&global);
290: dd->xs = dof*xs; dd->xe = dof*xe; dd->ys = 0; dd->ye = 1; dd->zs = 0; dd->ze = 1;
291: dd->Xs = dof*Xs; dd->Xe = dof*Xe; dd->Ys = 0; dd->Ye = 1; dd->Zs = 0; dd->Ze = 1;
293: dd->gtol = gtol;
294: dd->base = dof*xs;
295: da->ops->view = DMView_DA_1d;
297: /*
298: Set the local to global ordering in the global vector, this allows use
299: of VecSetValuesLocal().
300: */
301: for (i=0; i<Xe-IXe; i++) idx[nn++] = -1; /* pad with -1s if needed for ghosted case*/
303: ISLocalToGlobalMappingCreate(comm,dof,nn,idx,PETSC_OWN_POINTER,&da->ltogmap);
304: PetscLogObjectParent((PetscObject)da,(PetscObject)da->ltogmap);
306: return 0;
307: }
309: /*@C
310: DMDACreate1d - Creates an object that will manage the communication of one-dimensional
311: regular array data that is distributed across some processors.
313: Collective
315: Input Parameters:
316: + comm - MPI communicator
317: . bx - type of ghost cells at the boundary the array should have, if any. Use
318: DM_BOUNDARY_NONE, DM_BOUNDARY_GHOSTED, or DM_BOUNDARY_PERIODIC.
319: . M - global dimension of the array (that is the number of grid points)
320: from the command line with -da_grid_x <M>)
321: . dof - number of degrees of freedom per node
322: . s - stencil width
323: - lx - array containing number of nodes in the X direction on each processor,
324: or NULL. If non-null, must be of length as the number of processes in the MPI_Comm.
325: The sum of these entries must equal M
327: Output Parameter:
328: . da - the resulting distributed array object
330: Options Database Key:
331: + -dm_view - Calls DMView() at the conclusion of DMDACreate1d()
332: . -da_grid_x <nx> - number of grid points in x direction
333: . -da_refine_x <rx> - refinement factor
334: - -da_refine <n> - refine the DMDA n times before creating it
336: Level: beginner
338: Notes:
339: The array data itself is NOT stored in the DMDA, it is stored in Vec objects;
340: The appropriate vector objects can be obtained with calls to DMCreateGlobalVector()
341: and DMCreateLocalVector() and calls to VecDuplicate() if more are needed.
343: You must call DMSetUp() after this call before using this DM.
345: If you wish to use the options database to change values in the DMDA call DMSetFromOptions() after this call
346: but before DMSetUp().
348: .seealso: DMDestroy(), DMView(), DMDACreate2d(), DMDACreate3d(), DMGlobalToLocalBegin(), DMDASetRefinementFactor(),
349: DMGlobalToLocalEnd(), DMLocalToGlobalBegin(), DMLocalToLocalBegin(), DMLocalToLocalEnd(), DMDAGetRefinementFactor(),
350: DMDAGetInfo(), DMCreateGlobalVector(), DMCreateLocalVector(), DMDACreateNaturalVector(), DMLoad(), DMDAGetOwnershipRanges(),
351: DMStagCreate1d()
353: @*/
354: PetscErrorCode DMDACreate1d(MPI_Comm comm, DMBoundaryType bx, PetscInt M, PetscInt dof, PetscInt s, const PetscInt lx[], DM *da)
355: {
356: PetscMPIInt size;
358: DMDACreate(comm, da);
359: DMSetDimension(*da, 1);
360: DMDASetSizes(*da, M, 1, 1);
361: MPI_Comm_size(comm, &size);
362: DMDASetNumProcs(*da, size, PETSC_DECIDE, PETSC_DECIDE);
363: DMDASetBoundaryType(*da, bx, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE);
364: DMDASetDof(*da, dof);
365: DMDASetStencilWidth(*da, s);
366: DMDASetOwnershipRanges(*da, lx, NULL, NULL);
367: return 0;
368: }