Actual source code: da2.c
1: #include <petsc/private/dmdaimpl.h>
2: #include <petscdraw.h>
4: static PetscErrorCode DMView_DA_2d(DM da, PetscViewer viewer)
5: {
6: PetscMPIInt rank;
7: PetscBool iascii, isdraw, isglvis, isbinary;
8: DM_DA *dd = (DM_DA *)da->data;
9: #if defined(PETSC_HAVE_MATLAB)
10: PetscBool ismatlab;
11: #endif
13: PetscFunctionBegin;
14: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)da), &rank));
16: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
17: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
18: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERGLVIS, &isglvis));
19: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
20: #if defined(PETSC_HAVE_MATLAB)
21: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERMATLAB, &ismatlab));
22: #endif
23: if (iascii) {
24: PetscViewerFormat format;
26: PetscCall(PetscViewerGetFormat(viewer, &format));
27: if (format == PETSC_VIEWER_LOAD_BALANCE) {
28: PetscInt i, nmax = 0, nmin = PETSC_MAX_INT, navg = 0, *nz, nzlocal;
29: DMDALocalInfo info;
30: PetscMPIInt size;
31: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)da), &size));
32: PetscCall(DMDAGetLocalInfo(da, &info));
33: nzlocal = info.xm * info.ym;
34: PetscCall(PetscMalloc1(size, &nz));
35: PetscCallMPI(MPI_Allgather(&nzlocal, 1, MPIU_INT, nz, 1, MPIU_INT, PetscObjectComm((PetscObject)da)));
36: for (i = 0; i < (PetscInt)size; i++) {
37: nmax = PetscMax(nmax, nz[i]);
38: nmin = PetscMin(nmin, nz[i]);
39: navg += nz[i];
40: }
41: PetscCall(PetscFree(nz));
42: navg = navg / size;
43: PetscCall(PetscViewerASCIIPrintf(viewer, " Load Balance - Grid Points: Min %" PetscInt_FMT " avg %" PetscInt_FMT " max %" PetscInt_FMT "\n", nmin, navg, nmax));
44: PetscFunctionReturn(PETSC_SUCCESS);
45: }
46: if (format != PETSC_VIEWER_ASCII_VTK_DEPRECATED && format != PETSC_VIEWER_ASCII_VTK_CELL_DEPRECATED && format != PETSC_VIEWER_ASCII_GLVIS) {
47: DMDALocalInfo info;
48: PetscCall(DMDAGetLocalInfo(da, &info));
49: PetscCall(PetscViewerASCIIPushSynchronized(viewer));
50: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Processor [%d] M %" PetscInt_FMT " N %" PetscInt_FMT " m %" PetscInt_FMT " n %" PetscInt_FMT " w %" PetscInt_FMT " s %" PetscInt_FMT "\n", rank, dd->M, dd->N, dd->m, dd->n, dd->w, dd->s));
51: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "X range of indices: %" PetscInt_FMT " %" PetscInt_FMT ", Y range of indices: %" PetscInt_FMT " %" PetscInt_FMT "\n", info.xs, info.xs + info.xm, info.ys, info.ys + info.ym));
52: PetscCall(PetscViewerFlush(viewer));
53: PetscCall(PetscViewerASCIIPopSynchronized(viewer));
54: } else if (format == PETSC_VIEWER_ASCII_GLVIS) PetscCall(DMView_DA_GLVis(da, viewer));
55: else PetscCall(DMView_DA_VTK(da, viewer));
56: } else if (isdraw) {
57: PetscDraw draw;
58: double ymin = -1 * dd->s - 1, ymax = dd->N + dd->s;
59: double xmin = -1 * dd->s - 1, xmax = dd->M + dd->s;
60: double x, y;
61: PetscInt base;
62: const PetscInt *idx;
63: char node[10];
64: PetscBool isnull;
66: PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
67: PetscCall(PetscDrawIsNull(draw, &isnull));
68: if (isnull) PetscFunctionReturn(PETSC_SUCCESS);
70: PetscCall(PetscDrawCheckResizedWindow(draw));
71: PetscCall(PetscDrawClear(draw));
72: PetscCall(PetscDrawSetCoordinates(draw, xmin, ymin, xmax, ymax));
74: PetscDrawCollectiveBegin(draw);
75: /* first processor draw all node lines */
76: if (rank == 0) {
77: ymin = 0.0;
78: ymax = dd->N - 1;
79: for (xmin = 0; xmin < dd->M; xmin++) PetscCall(PetscDrawLine(draw, xmin, ymin, xmin, ymax, PETSC_DRAW_BLACK));
80: xmin = 0.0;
81: xmax = dd->M - 1;
82: for (ymin = 0; ymin < dd->N; ymin++) PetscCall(PetscDrawLine(draw, xmin, ymin, xmax, ymin, PETSC_DRAW_BLACK));
83: }
84: PetscDrawCollectiveEnd(draw);
85: PetscCall(PetscDrawFlush(draw));
86: PetscCall(PetscDrawPause(draw));
88: PetscDrawCollectiveBegin(draw);
89: /* draw my box */
90: xmin = dd->xs / dd->w;
91: xmax = (dd->xe - 1) / dd->w;
92: ymin = dd->ys;
93: ymax = dd->ye - 1;
94: PetscCall(PetscDrawLine(draw, xmin, ymin, xmax, ymin, PETSC_DRAW_RED));
95: PetscCall(PetscDrawLine(draw, xmin, ymin, xmin, ymax, PETSC_DRAW_RED));
96: PetscCall(PetscDrawLine(draw, xmin, ymax, xmax, ymax, PETSC_DRAW_RED));
97: PetscCall(PetscDrawLine(draw, xmax, ymin, xmax, ymax, PETSC_DRAW_RED));
98: /* put in numbers */
99: base = (dd->base) / dd->w;
100: for (y = ymin; y <= ymax; y++) {
101: for (x = xmin; x <= xmax; x++) {
102: PetscCall(PetscSNPrintf(node, sizeof(node), "%d", (int)base++));
103: PetscCall(PetscDrawString(draw, x, y, PETSC_DRAW_BLACK, node));
104: }
105: }
106: PetscDrawCollectiveEnd(draw);
107: PetscCall(PetscDrawFlush(draw));
108: PetscCall(PetscDrawPause(draw));
110: PetscDrawCollectiveBegin(draw);
111: /* overlay ghost numbers, useful for error checking */
112: PetscCall(ISLocalToGlobalMappingGetBlockIndices(da->ltogmap, &idx));
113: base = 0;
114: xmin = dd->Xs;
115: xmax = dd->Xe;
116: ymin = dd->Ys;
117: ymax = dd->Ye;
118: for (y = ymin; y < ymax; y++) {
119: for (x = xmin; x < xmax; x++) {
120: if ((base % dd->w) == 0) {
121: PetscCall(PetscSNPrintf(node, sizeof(node), "%d", (int)(idx[base / dd->w])));
122: PetscCall(PetscDrawString(draw, x / dd->w, y, PETSC_DRAW_BLUE, node));
123: }
124: base++;
125: }
126: }
127: PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(da->ltogmap, &idx));
128: PetscDrawCollectiveEnd(draw);
129: PetscCall(PetscDrawFlush(draw));
130: PetscCall(PetscDrawPause(draw));
131: PetscCall(PetscDrawSave(draw));
132: } else if (isglvis) {
133: PetscCall(DMView_DA_GLVis(da, viewer));
134: } else if (isbinary) {
135: PetscCall(DMView_DA_Binary(da, viewer));
136: #if defined(PETSC_HAVE_MATLAB)
137: } else if (ismatlab) {
138: PetscCall(DMView_DA_Matlab(da, viewer));
139: #endif
140: }
141: PetscFunctionReturn(PETSC_SUCCESS);
142: }
144: #if defined(new)
145: /*
146: DMDAGetDiagonal_MFFD - Gets the diagonal for a matrix-free matrix where local
147: function lives on a DMDA
149: y ~= (F(u + ha) - F(u))/h,
150: where F = nonlinear function, as set by SNESSetFunction()
151: u = current iterate
152: h = difference interval
153: */
154: PetscErrorCode DMDAGetDiagonal_MFFD(DM da, Vec U, Vec a)
155: {
156: PetscScalar h, *aa, *ww, v;
157: PetscReal epsilon = PETSC_SQRT_MACHINE_EPSILON, umin = 100.0 * PETSC_SQRT_MACHINE_EPSILON;
158: PetscInt gI, nI;
159: MatStencil stencil;
160: DMDALocalInfo info;
162: PetscFunctionBegin;
163: PetscCall((*ctx->func)(0, U, a, ctx->funcctx));
164: PetscCall((*ctx->funcisetbase)(U, ctx->funcctx));
166: PetscCall(VecGetArray(U, &ww));
167: PetscCall(VecGetArray(a, &aa));
169: nI = 0;
170: h = ww[gI];
171: if (h == 0.0) h = 1.0;
172: if (PetscAbsScalar(h) < umin && PetscRealPart(h) >= 0.0) h = umin;
173: else if (PetscRealPart(h) < 0.0 && PetscAbsScalar(h) < umin) h = -umin;
174: h *= epsilon;
176: ww[gI] += h;
177: PetscCall((*ctx->funci)(i, w, &v, ctx->funcctx));
178: aa[nI] = (v - aa[nI]) / h;
179: ww[gI] -= h;
180: nI++;
182: PetscCall(VecRestoreArray(U, &ww));
183: PetscCall(VecRestoreArray(a, &aa));
184: PetscFunctionReturn(PETSC_SUCCESS);
185: }
186: #endif
188: PetscErrorCode DMSetUp_DA_2D(DM da)
189: {
190: DM_DA *dd = (DM_DA *)da->data;
191: const PetscInt M = dd->M;
192: const PetscInt N = dd->N;
193: PetscInt m = dd->m;
194: PetscInt n = dd->n;
195: const PetscInt dof = dd->w;
196: const PetscInt s = dd->s;
197: DMBoundaryType bx = dd->bx;
198: DMBoundaryType by = dd->by;
199: DMDAStencilType stencil_type = dd->stencil_type;
200: PetscInt *lx = dd->lx;
201: PetscInt *ly = dd->ly;
202: MPI_Comm comm;
203: PetscMPIInt rank, size;
204: PetscInt xs, xe, ys, ye, x, y, Xs, Xe, Ys, Ye, IXs, IXe, IYs, IYe;
205: PetscInt up, down, left, right, i, n0, n1, n2, n3, n5, n6, n7, n8, *idx, nn;
206: PetscInt xbase, *bases, *ldims, j, x_t, y_t, s_t, base, count;
207: PetscInt s_x, s_y; /* s proportionalized to w */
208: PetscInt sn0 = 0, sn2 = 0, sn6 = 0, sn8 = 0;
209: Vec local, global;
210: VecScatter gtol;
211: IS to, from;
213: PetscFunctionBegin;
214: PetscCheck(stencil_type != DMDA_STENCIL_BOX || (bx != DM_BOUNDARY_MIRROR && by != DM_BOUNDARY_MIRROR), PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Mirror boundary and box stencil");
215: PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
216: #if !defined(PETSC_USE_64BIT_INDICES)
217: PetscCheck(((PetscInt64)M) * ((PetscInt64)N) * ((PetscInt64)dof) <= (PetscInt64)PETSC_MPI_INT_MAX, comm, PETSC_ERR_INT_OVERFLOW, "Mesh of %" PetscInt_FMT " by %" PetscInt_FMT " by %" PetscInt_FMT " (dof) is too large for 32-bit indices", M, N, dof);
218: #endif
220: PetscCallMPI(MPI_Comm_size(comm, &size));
221: PetscCallMPI(MPI_Comm_rank(comm, &rank));
223: dd->p = 1;
224: if (m != PETSC_DECIDE) {
225: PetscCheck(m >= 1, comm, PETSC_ERR_ARG_OUTOFRANGE, "Non-positive number of processors in X direction: %" PetscInt_FMT, m);
226: PetscCheck(m <= size, comm, PETSC_ERR_ARG_OUTOFRANGE, "Too many processors in X direction: %" PetscInt_FMT " %d", m, size);
227: }
228: if (n != PETSC_DECIDE) {
229: PetscCheck(n >= 1, comm, PETSC_ERR_ARG_OUTOFRANGE, "Non-positive number of processors in Y direction: %" PetscInt_FMT, n);
230: PetscCheck(n <= size, comm, PETSC_ERR_ARG_OUTOFRANGE, "Too many processors in Y direction: %" PetscInt_FMT " %d", n, size);
231: }
233: if (m == PETSC_DECIDE || n == PETSC_DECIDE) {
234: if (n != PETSC_DECIDE) {
235: m = size / n;
236: } else if (m != PETSC_DECIDE) {
237: n = size / m;
238: } else {
239: /* try for squarish distribution */
240: m = (PetscInt)(0.5 + PetscSqrtReal(((PetscReal)M) * ((PetscReal)size) / ((PetscReal)N)));
241: if (!m) m = 1;
242: while (m > 0) {
243: n = size / m;
244: if (m * n == size) break;
245: m--;
246: }
247: if (M > N && m < n) {
248: PetscInt _m = m;
249: m = n;
250: n = _m;
251: }
252: }
253: PetscCheck(m * n == size, comm, PETSC_ERR_PLIB, "Unable to create partition, check the size of the communicator and input m and n ");
254: } else PetscCheck(m * n == size, comm, PETSC_ERR_ARG_OUTOFRANGE, "Given Bad partition");
256: PetscCheck(M >= m, comm, PETSC_ERR_ARG_OUTOFRANGE, "Partition in x direction is too fine! %" PetscInt_FMT " %" PetscInt_FMT, M, m);
257: PetscCheck(N >= n, comm, PETSC_ERR_ARG_OUTOFRANGE, "Partition in y direction is too fine! %" PetscInt_FMT " %" PetscInt_FMT, N, n);
259: /*
260: Determine locally owned region
261: xs is the first local node number, x is the number of local nodes
262: */
263: if (!lx) {
264: PetscCall(PetscMalloc1(m, &dd->lx));
265: lx = dd->lx;
266: for (i = 0; i < m; i++) lx[i] = M / m + ((M % m) > i);
267: }
268: x = lx[rank % m];
269: xs = 0;
270: for (i = 0; i < (rank % m); i++) xs += lx[i];
271: if (PetscDefined(USE_DEBUG)) {
272: left = xs;
273: for (i = (rank % m); i < m; i++) left += lx[i];
274: PetscCheck(left == M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Sum of lx across processors not equal to M: %" PetscInt_FMT " %" PetscInt_FMT, left, M);
275: }
277: /*
278: Determine locally owned region
279: ys is the first local node number, y is the number of local nodes
280: */
281: if (!ly) {
282: PetscCall(PetscMalloc1(n, &dd->ly));
283: ly = dd->ly;
284: for (i = 0; i < n; i++) ly[i] = N / n + ((N % n) > i);
285: }
286: y = ly[rank / m];
287: ys = 0;
288: for (i = 0; i < (rank / m); i++) ys += ly[i];
289: if (PetscDefined(USE_DEBUG)) {
290: left = ys;
291: for (i = (rank / m); i < n; i++) left += ly[i];
292: PetscCheck(left == N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Sum of ly across processors not equal to N: %" PetscInt_FMT " %" PetscInt_FMT, left, N);
293: }
295: /*
296: check if the scatter requires more than one process neighbor or wraps around
297: the domain more than once
298: */
299: PetscCheck((x >= s) || ((m <= 1) && (bx != DM_BOUNDARY_PERIODIC)), PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local x-width of domain x %" PetscInt_FMT " is smaller than stencil width s %" PetscInt_FMT, x, s);
300: PetscCheck((y >= s) || ((n <= 1) && (by != DM_BOUNDARY_PERIODIC)), PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local y-width of domain y %" PetscInt_FMT " is smaller than stencil width s %" PetscInt_FMT, y, s);
301: PetscCheck((x > s) || ((bx != DM_BOUNDARY_MIRROR)), PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local x-width of domain x %" PetscInt_FMT " is smaller than stencil width s %" PetscInt_FMT " with mirror", x, s);
302: PetscCheck((y > s) || ((by != DM_BOUNDARY_MIRROR)), PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local y-width of domain y %" PetscInt_FMT " is smaller than stencil width s %" PetscInt_FMT " with mirror", y, s);
303: xe = xs + x;
304: ye = ys + y;
306: /* determine ghost region (Xs) and region scattered into (IXs) */
307: if (xs - s > 0) {
308: Xs = xs - s;
309: IXs = xs - s;
310: } else {
311: if (bx) {
312: Xs = xs - s;
313: } else {
314: Xs = 0;
315: }
316: IXs = 0;
317: }
318: if (xe + s <= M) {
319: Xe = xe + s;
320: IXe = xe + s;
321: } else {
322: if (bx) {
323: Xs = xs - s;
324: Xe = xe + s;
325: } else {
326: Xe = M;
327: }
328: IXe = M;
329: }
331: if (bx == DM_BOUNDARY_PERIODIC || bx == DM_BOUNDARY_MIRROR) {
332: IXs = xs - s;
333: IXe = xe + s;
334: Xs = xs - s;
335: Xe = xe + s;
336: }
338: if (ys - s > 0) {
339: Ys = ys - s;
340: IYs = ys - s;
341: } else {
342: if (by) {
343: Ys = ys - s;
344: } else {
345: Ys = 0;
346: }
347: IYs = 0;
348: }
349: if (ye + s <= N) {
350: Ye = ye + s;
351: IYe = ye + s;
352: } else {
353: if (by) {
354: Ye = ye + s;
355: } else {
356: Ye = N;
357: }
358: IYe = N;
359: }
361: if (by == DM_BOUNDARY_PERIODIC || by == DM_BOUNDARY_MIRROR) {
362: IYs = ys - s;
363: IYe = ye + s;
364: Ys = ys - s;
365: Ye = ye + s;
366: }
368: /* stencil length in each direction */
369: s_x = s;
370: s_y = s;
372: /* determine starting point of each processor */
373: nn = x * y;
374: PetscCall(PetscMalloc2(size + 1, &bases, size, &ldims));
375: PetscCallMPI(MPI_Allgather(&nn, 1, MPIU_INT, ldims, 1, MPIU_INT, comm));
376: bases[0] = 0;
377: for (i = 1; i <= size; i++) bases[i] = ldims[i - 1];
378: for (i = 1; i <= size; i++) bases[i] += bases[i - 1];
379: base = bases[rank] * dof;
381: /* allocate the base parallel and sequential vectors */
382: dd->Nlocal = x * y * dof;
383: PetscCall(VecCreateMPIWithArray(comm, dof, dd->Nlocal, PETSC_DECIDE, NULL, &global));
384: dd->nlocal = (Xe - Xs) * (Ye - Ys) * dof;
385: PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, dof, dd->nlocal, NULL, &local));
387: /* generate global to local vector scatter and local to global mapping*/
389: /* global to local must include ghost points within the domain,
390: but not ghost points outside the domain that aren't periodic */
391: PetscCall(PetscMalloc1((IXe - IXs) * (IYe - IYs), &idx));
392: if (stencil_type == DMDA_STENCIL_BOX) {
393: left = IXs - Xs;
394: right = left + (IXe - IXs);
395: down = IYs - Ys;
396: up = down + (IYe - IYs);
397: count = 0;
398: for (i = down; i < up; i++) {
399: for (j = left; j < right; j++) idx[count++] = j + i * (Xe - Xs);
400: }
401: PetscCall(ISCreateBlock(comm, dof, count, idx, PETSC_OWN_POINTER, &to));
403: } else {
404: /* must drop into cross shape region */
405: /* ---------|
406: | top |
407: |--- ---| up
408: | middle |
409: | |
410: ---- ---- down
411: | bottom |
412: -----------
413: Xs xs xe Xe */
414: left = xs - Xs;
415: right = left + x;
416: down = ys - Ys;
417: up = down + y;
418: count = 0;
419: /* bottom */
420: for (i = (IYs - Ys); i < down; i++) {
421: for (j = left; j < right; j++) idx[count++] = j + i * (Xe - Xs);
422: }
423: /* middle */
424: for (i = down; i < up; i++) {
425: for (j = (IXs - Xs); j < (IXe - Xs); j++) idx[count++] = j + i * (Xe - Xs);
426: }
427: /* top */
428: for (i = up; i < up + IYe - ye; i++) {
429: for (j = left; j < right; j++) idx[count++] = j + i * (Xe - Xs);
430: }
431: PetscCall(ISCreateBlock(comm, dof, count, idx, PETSC_OWN_POINTER, &to));
432: }
434: /* determine who lies on each side of us stored in n6 n7 n8
435: n3 n5
436: n0 n1 n2
437: */
439: /* Assume the Non-Periodic Case */
440: n1 = rank - m;
441: if (rank % m) {
442: n0 = n1 - 1;
443: } else {
444: n0 = -1;
445: }
446: if ((rank + 1) % m) {
447: n2 = n1 + 1;
448: n5 = rank + 1;
449: n8 = rank + m + 1;
450: if (n8 >= m * n) n8 = -1;
451: } else {
452: n2 = -1;
453: n5 = -1;
454: n8 = -1;
455: }
456: if (rank % m) {
457: n3 = rank - 1;
458: n6 = n3 + m;
459: if (n6 >= m * n) n6 = -1;
460: } else {
461: n3 = -1;
462: n6 = -1;
463: }
464: n7 = rank + m;
465: if (n7 >= m * n) n7 = -1;
467: if (bx == DM_BOUNDARY_PERIODIC && by == DM_BOUNDARY_PERIODIC) {
468: /* Modify for Periodic Cases */
469: /* Handle all four corners */
470: if ((n6 < 0) && (n7 < 0) && (n3 < 0)) n6 = m - 1;
471: if ((n8 < 0) && (n7 < 0) && (n5 < 0)) n8 = 0;
472: if ((n2 < 0) && (n5 < 0) && (n1 < 0)) n2 = size - m;
473: if ((n0 < 0) && (n3 < 0) && (n1 < 0)) n0 = size - 1;
475: /* Handle Top and Bottom Sides */
476: if (n1 < 0) n1 = rank + m * (n - 1);
477: if (n7 < 0) n7 = rank - m * (n - 1);
478: if ((n3 >= 0) && (n0 < 0)) n0 = size - m + rank - 1;
479: if ((n3 >= 0) && (n6 < 0)) n6 = (rank % m) - 1;
480: if ((n5 >= 0) && (n2 < 0)) n2 = size - m + rank + 1;
481: if ((n5 >= 0) && (n8 < 0)) n8 = (rank % m) + 1;
483: /* Handle Left and Right Sides */
484: if (n3 < 0) n3 = rank + (m - 1);
485: if (n5 < 0) n5 = rank - (m - 1);
486: if ((n1 >= 0) && (n0 < 0)) n0 = rank - 1;
487: if ((n1 >= 0) && (n2 < 0)) n2 = rank - 2 * m + 1;
488: if ((n7 >= 0) && (n6 < 0)) n6 = rank + 2 * m - 1;
489: if ((n7 >= 0) && (n8 < 0)) n8 = rank + 1;
490: } else if (by == DM_BOUNDARY_PERIODIC) { /* Handle Top and Bottom Sides */
491: if (n1 < 0) n1 = rank + m * (n - 1);
492: if (n7 < 0) n7 = rank - m * (n - 1);
493: if ((n3 >= 0) && (n0 < 0)) n0 = size - m + rank - 1;
494: if ((n3 >= 0) && (n6 < 0)) n6 = (rank % m) - 1;
495: if ((n5 >= 0) && (n2 < 0)) n2 = size - m + rank + 1;
496: if ((n5 >= 0) && (n8 < 0)) n8 = (rank % m) + 1;
497: } else if (bx == DM_BOUNDARY_PERIODIC) { /* Handle Left and Right Sides */
498: if (n3 < 0) n3 = rank + (m - 1);
499: if (n5 < 0) n5 = rank - (m - 1);
500: if ((n1 >= 0) && (n0 < 0)) n0 = rank - 1;
501: if ((n1 >= 0) && (n2 < 0)) n2 = rank - 2 * m + 1;
502: if ((n7 >= 0) && (n6 < 0)) n6 = rank + 2 * m - 1;
503: if ((n7 >= 0) && (n8 < 0)) n8 = rank + 1;
504: }
506: PetscCall(PetscMalloc1(9, &dd->neighbors));
508: dd->neighbors[0] = n0;
509: dd->neighbors[1] = n1;
510: dd->neighbors[2] = n2;
511: dd->neighbors[3] = n3;
512: dd->neighbors[4] = rank;
513: dd->neighbors[5] = n5;
514: dd->neighbors[6] = n6;
515: dd->neighbors[7] = n7;
516: dd->neighbors[8] = n8;
518: if (stencil_type == DMDA_STENCIL_STAR) {
519: /* save corner processor numbers */
520: sn0 = n0;
521: sn2 = n2;
522: sn6 = n6;
523: sn8 = n8;
524: n0 = n2 = n6 = n8 = -1;
525: }
527: PetscCall(PetscMalloc1((Xe - Xs) * (Ye - Ys), &idx));
529: nn = 0;
530: xbase = bases[rank];
531: for (i = 1; i <= s_y; i++) {
532: if (n0 >= 0) { /* left below */
533: x_t = lx[n0 % m];
534: y_t = ly[(n0 / m)];
535: s_t = bases[n0] + x_t * y_t - (s_y - i) * x_t - s_x;
536: for (j = 0; j < s_x; j++) idx[nn++] = s_t++;
537: }
539: if (n1 >= 0) { /* directly below */
540: x_t = x;
541: y_t = ly[(n1 / m)];
542: s_t = bases[n1] + x_t * y_t - (s_y + 1 - i) * x_t;
543: for (j = 0; j < x_t; j++) idx[nn++] = s_t++;
544: } else if (by == DM_BOUNDARY_MIRROR) {
545: for (j = 0; j < x; j++) idx[nn++] = bases[rank] + x * (s_y - i + 1) + j;
546: }
548: if (n2 >= 0) { /* right below */
549: x_t = lx[n2 % m];
550: y_t = ly[(n2 / m)];
551: s_t = bases[n2] + x_t * y_t - (s_y + 1 - i) * x_t;
552: for (j = 0; j < s_x; j++) idx[nn++] = s_t++;
553: }
554: }
556: for (i = 0; i < y; i++) {
557: if (n3 >= 0) { /* directly left */
558: x_t = lx[n3 % m];
559: /* y_t = y; */
560: s_t = bases[n3] + (i + 1) * x_t - s_x;
561: for (j = 0; j < s_x; j++) idx[nn++] = s_t++;
562: } else if (bx == DM_BOUNDARY_MIRROR) {
563: for (j = 0; j < s_x; j++) idx[nn++] = bases[rank] + x * i + s_x - j;
564: }
566: for (j = 0; j < x; j++) idx[nn++] = xbase++; /* interior */
568: if (n5 >= 0) { /* directly right */
569: x_t = lx[n5 % m];
570: /* y_t = y; */
571: s_t = bases[n5] + (i)*x_t;
572: for (j = 0; j < s_x; j++) idx[nn++] = s_t++;
573: } else if (bx == DM_BOUNDARY_MIRROR) {
574: for (j = 0; j < s_x; j++) idx[nn++] = bases[rank] + x * (i + 1) - 2 - j;
575: }
576: }
578: for (i = 1; i <= s_y; i++) {
579: if (n6 >= 0) { /* left above */
580: x_t = lx[n6 % m];
581: /* y_t = ly[(n6/m)]; */
582: s_t = bases[n6] + (i)*x_t - s_x;
583: for (j = 0; j < s_x; j++) idx[nn++] = s_t++;
584: }
586: if (n7 >= 0) { /* directly above */
587: x_t = x;
588: /* y_t = ly[(n7/m)]; */
589: s_t = bases[n7] + (i - 1) * x_t;
590: for (j = 0; j < x_t; j++) idx[nn++] = s_t++;
591: } else if (by == DM_BOUNDARY_MIRROR) {
592: for (j = 0; j < x; j++) idx[nn++] = bases[rank] + x * (y - i - 1) + j;
593: }
595: if (n8 >= 0) { /* right above */
596: x_t = lx[n8 % m];
597: /* y_t = ly[(n8/m)]; */
598: s_t = bases[n8] + (i - 1) * x_t;
599: for (j = 0; j < s_x; j++) idx[nn++] = s_t++;
600: }
601: }
603: PetscCall(ISCreateBlock(comm, dof, nn, idx, PETSC_USE_POINTER, &from));
604: PetscCall(VecScatterCreate(global, from, local, to, >ol));
605: PetscCall(ISDestroy(&to));
606: PetscCall(ISDestroy(&from));
608: if (stencil_type == DMDA_STENCIL_STAR) {
609: n0 = sn0;
610: n2 = sn2;
611: n6 = sn6;
612: n8 = sn8;
613: }
615: if (((stencil_type == DMDA_STENCIL_STAR) || (bx && bx != DM_BOUNDARY_PERIODIC) || (by && by != DM_BOUNDARY_PERIODIC))) {
616: /*
617: Recompute the local to global mappings, this time keeping the
618: information about the cross corner processor numbers and any ghosted
619: but not periodic indices.
620: */
621: nn = 0;
622: xbase = bases[rank];
623: for (i = 1; i <= s_y; i++) {
624: if (n0 >= 0) { /* left below */
625: x_t = lx[n0 % m];
626: y_t = ly[(n0 / m)];
627: s_t = bases[n0] + x_t * y_t - (s_y - i) * x_t - s_x;
628: for (j = 0; j < s_x; j++) idx[nn++] = s_t++;
629: } else if (xs - Xs > 0 && ys - Ys > 0) {
630: for (j = 0; j < s_x; j++) idx[nn++] = -1;
631: }
632: if (n1 >= 0) { /* directly below */
633: x_t = x;
634: y_t = ly[(n1 / m)];
635: s_t = bases[n1] + x_t * y_t - (s_y + 1 - i) * x_t;
636: for (j = 0; j < x_t; j++) idx[nn++] = s_t++;
637: } else if (ys - Ys > 0) {
638: if (by == DM_BOUNDARY_MIRROR) {
639: for (j = 0; j < x; j++) idx[nn++] = bases[rank] + x * (s_y - i + 1) + j;
640: } else {
641: for (j = 0; j < x; j++) idx[nn++] = -1;
642: }
643: }
644: if (n2 >= 0) { /* right below */
645: x_t = lx[n2 % m];
646: y_t = ly[(n2 / m)];
647: s_t = bases[n2] + x_t * y_t - (s_y + 1 - i) * x_t;
648: for (j = 0; j < s_x; j++) idx[nn++] = s_t++;
649: } else if (Xe - xe > 0 && ys - Ys > 0) {
650: for (j = 0; j < s_x; j++) idx[nn++] = -1;
651: }
652: }
654: for (i = 0; i < y; i++) {
655: if (n3 >= 0) { /* directly left */
656: x_t = lx[n3 % m];
657: /* y_t = y; */
658: s_t = bases[n3] + (i + 1) * x_t - s_x;
659: for (j = 0; j < s_x; j++) idx[nn++] = s_t++;
660: } else if (xs - Xs > 0) {
661: if (bx == DM_BOUNDARY_MIRROR) {
662: for (j = 0; j < s_x; j++) idx[nn++] = bases[rank] + x * i + s_x - j;
663: } else {
664: for (j = 0; j < s_x; j++) idx[nn++] = -1;
665: }
666: }
668: for (j = 0; j < x; j++) idx[nn++] = xbase++; /* interior */
670: if (n5 >= 0) { /* directly right */
671: x_t = lx[n5 % m];
672: /* y_t = y; */
673: s_t = bases[n5] + (i)*x_t;
674: for (j = 0; j < s_x; j++) idx[nn++] = s_t++;
675: } else if (Xe - xe > 0) {
676: if (bx == DM_BOUNDARY_MIRROR) {
677: for (j = 0; j < s_x; j++) idx[nn++] = bases[rank] + x * (i + 1) - 2 - j;
678: } else {
679: for (j = 0; j < s_x; j++) idx[nn++] = -1;
680: }
681: }
682: }
684: for (i = 1; i <= s_y; i++) {
685: if (n6 >= 0) { /* left above */
686: x_t = lx[n6 % m];
687: /* y_t = ly[(n6/m)]; */
688: s_t = bases[n6] + (i)*x_t - s_x;
689: for (j = 0; j < s_x; j++) idx[nn++] = s_t++;
690: } else if (xs - Xs > 0 && Ye - ye > 0) {
691: for (j = 0; j < s_x; j++) idx[nn++] = -1;
692: }
693: if (n7 >= 0) { /* directly above */
694: x_t = x;
695: /* y_t = ly[(n7/m)]; */
696: s_t = bases[n7] + (i - 1) * x_t;
697: for (j = 0; j < x_t; j++) idx[nn++] = s_t++;
698: } else if (Ye - ye > 0) {
699: if (by == DM_BOUNDARY_MIRROR) {
700: for (j = 0; j < x; j++) idx[nn++] = bases[rank] + x * (y - i - 1) + j;
701: } else {
702: for (j = 0; j < x; j++) idx[nn++] = -1;
703: }
704: }
705: if (n8 >= 0) { /* right above */
706: x_t = lx[n8 % m];
707: /* y_t = ly[(n8/m)]; */
708: s_t = bases[n8] + (i - 1) * x_t;
709: for (j = 0; j < s_x; j++) idx[nn++] = s_t++;
710: } else if (Xe - xe > 0 && Ye - ye > 0) {
711: for (j = 0; j < s_x; j++) idx[nn++] = -1;
712: }
713: }
714: }
715: /*
716: Set the local to global ordering in the global vector, this allows use
717: of VecSetValuesLocal().
718: */
719: PetscCall(ISLocalToGlobalMappingCreate(comm, dof, nn, idx, PETSC_OWN_POINTER, &da->ltogmap));
721: PetscCall(PetscFree2(bases, ldims));
722: dd->m = m;
723: dd->n = n;
724: /* note petsc expects xs/xe/Xs/Xe to be multiplied by #dofs in many places */
725: dd->xs = xs * dof;
726: dd->xe = xe * dof;
727: dd->ys = ys;
728: dd->ye = ye;
729: dd->zs = 0;
730: dd->ze = 1;
731: dd->Xs = Xs * dof;
732: dd->Xe = Xe * dof;
733: dd->Ys = Ys;
734: dd->Ye = Ye;
735: dd->Zs = 0;
736: dd->Ze = 1;
738: PetscCall(VecDestroy(&local));
739: PetscCall(VecDestroy(&global));
741: dd->gtol = gtol;
742: dd->base = base;
743: da->ops->view = DMView_DA_2d;
744: dd->ltol = NULL;
745: dd->ao = NULL;
746: PetscFunctionReturn(PETSC_SUCCESS);
747: }
749: /*@C
750: DMDACreate2d - Creates an object that will manage the communication of two-dimensional
751: regular array data that is distributed across one or more MPI processes.
753: Collective
755: Input Parameters:
756: + comm - MPI communicator
757: . bx - type of ghost nodes the x array have. Use one of `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`.
758: . by - type of ghost nodes the y array have. Use one of `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`.
759: . stencil_type - stencil type. Use either `DMDA_STENCIL_BOX` or `DMDA_STENCIL_STAR`.
760: . M - global dimension in x direction of the array
761: . N - global dimension in y direction of the array
762: . m - corresponding number of processors in x dimension (or `PETSC_DECIDE` to have calculated)
763: . n - corresponding number of processors in y dimension (or `PETSC_DECIDE` to have calculated)
764: . dof - number of degrees of freedom per node
765: . s - stencil width
766: . lx - arrays containing the number of nodes in each cell along the x coordinates, or `NULL`.
767: - ly - arrays containing the number of nodes in each cell along the y coordinates, or `NULL`.
769: Output Parameter:
770: . da - the resulting distributed array object
772: Options Database Keys:
773: + -dm_view - Calls `DMView()` at the conclusion of `DMDACreate2d()`
774: . -da_grid_x <nx> - number of grid points in x direction
775: . -da_grid_y <ny> - number of grid points in y direction
776: . -da_processors_x <nx> - number of processors in x direction
777: . -da_processors_y <ny> - number of processors in y direction
778: . -da_refine_x <rx> - refinement ratio in x direction
779: . -da_refine_y <ry> - refinement ratio in y direction
780: - -da_refine <n> - refine the `DMDA` n times before creating
782: Level: beginner
784: Notes:
785: If `lx` or `ly` are non-null, these must be of length as `m` and `n`, and the corresponding
786: `m` and `n` cannot be `PETSC_DECIDE`. The sum of the `lx` entries must be `M`, and the sum of
787: the `ly` entries must be `N`.
789: The stencil type `DMDA_STENCIL_STAR` with width 1 corresponds to the
790: standard 5-pt stencil, while `DMDA_STENCIL_BOX` with width 1 denotes
791: the standard 9-pt stencil.
793: The array data itself is NOT stored in the `DMDA`, it is stored in `Vec` objects;
794: The appropriate vector objects can be obtained with calls to `DMCreateGlobalVector()`
795: and DMCreateLocalVector() and calls to `VecDuplicate()` if more are needed.
797: You must call `DMSetUp()` after this call before using this `DM`.
799: To use the options database to change values in the `DMDA` call `DMSetFromOptions()` after this call
800: but before `DMSetUp()`.
802: .seealso: [](sec_struct), `DM`, `DMDA`, `DMDestroy()`, `DMView()`, `DMDACreate1d()`, `DMDACreate3d()`, `DMGlobalToLocalBegin()`, `DMDAGetRefinementFactor()`,
803: `DMGlobalToLocalEnd()`, `DMLocalToGlobalBegin()`, `DMLocalToLocalBegin()`, `DMLocalToLocalEnd()`, `DMDASetRefinementFactor()`,
804: `DMDAGetInfo()`, `DMCreateGlobalVector()`, `DMCreateLocalVector()`, `DMDACreateNaturalVector()`, `DMLoad()`, `DMDAGetOwnershipRanges()`,
805: `DMStagCreate2d()`, `DMBoundaryType`
806: @*/
807: PetscErrorCode DMDACreate2d(MPI_Comm comm, DMBoundaryType bx, DMBoundaryType by, DMDAStencilType stencil_type, PetscInt M, PetscInt N, PetscInt m, PetscInt n, PetscInt dof, PetscInt s, const PetscInt lx[], const PetscInt ly[], DM *da)
808: {
809: PetscFunctionBegin;
810: PetscCall(DMDACreate(comm, da));
811: PetscCall(DMSetDimension(*da, 2));
812: PetscCall(DMDASetSizes(*da, M, N, 1));
813: PetscCall(DMDASetNumProcs(*da, m, n, PETSC_DECIDE));
814: PetscCall(DMDASetBoundaryType(*da, bx, by, DM_BOUNDARY_NONE));
815: PetscCall(DMDASetDof(*da, dof));
816: PetscCall(DMDASetStencilType(*da, stencil_type));
817: PetscCall(DMDASetStencilWidth(*da, s));
818: PetscCall(DMDASetOwnershipRanges(*da, lx, ly, NULL));
819: PetscFunctionReturn(PETSC_SUCCESS);
820: }