Actual source code: patch.c
1: #include <petsc/private/dmpatchimpl.h>
2: #include <petscdmda.h>
3: #include <petscsf.h>
5: /*
6: Solver loop to update \tau:
8: DMZoom(dmc, &dmz)
9: DMRefine(dmz, &dmf),
10: Scatter Xcoarse -> Xzoom,
11: Interpolate Xzoom -> Xfine (note that this may be on subcomms),
12: Smooth Xfine using two-step smoother
13: normal smoother plus Kaczmarz---moves back and forth from dmzoom to dmfine
14: Compute residual Rfine
15: Restrict Rfine to Rzoom_restricted
16: Scatter Rzoom_restricted -> Rcoarse_restricted
17: Compute global residual Rcoarse
18: TauCoarse = Rcoarse - Rcoarse_restricted
19: */
21: /*@C
22: DMPatchZoom - Create patches of a `DMDA` on subsets of processes, indicated by `commz`
24: Collective
26: Input Parameters:
27: + dm - the `DM`
28: . lower - the lower left corner of the requested patch
29: . upper - the upper right corner of the requested patch
30: - commz - the new communicator for the patch, `MPI_COMM_NULL` indicates that the given rank will not own a patch
32: Output Parameters:
33: + dmz - the patch `DM`
34: . sfz - the `PetscSF` mapping the patch+halo to the zoomed version (optional)
35: - sfzr - the `PetscSF` mapping the patch to the restricted zoomed version
37: Level: intermediate
39: .seealso: `DMPatchSolve()`, `DMDACreatePatchIS()`
40: @*/
41: PetscErrorCode DMPatchZoom(DM dm, MatStencil lower, MatStencil upper, MPI_Comm commz, DM *dmz, PetscSF *sfz, PetscSF *sfzr)
42: {
43: DMDAStencilType st;
44: MatStencil blower, bupper, loclower, locupper;
45: IS is;
46: const PetscInt *ranges, *indices;
47: PetscInt *localPoints = NULL;
48: PetscSFNode *remotePoints = NULL;
49: PetscInt dim, dof;
50: PetscInt M, N, P, rM, rN, rP, halo = 1, sxb, syb, szb, sxr, syr, szr, exr, eyr, ezr, mxb, myb, mzb, i, j, k, l, q;
51: PetscMPIInt size;
52: PetscBool patchis_offproc = PETSC_TRUE;
53: Vec X;
55: PetscFunctionBegin;
56: if (!sfz) halo = 0;
57: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size));
58: /* Create patch DM */
59: PetscCall(DMDAGetInfo(dm, &dim, &M, &N, &P, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, &st));
61: /* Get piece for rank r, expanded by halo */
62: bupper.i = PetscMin(M, upper.i + halo);
63: blower.i = PetscMax(lower.i - halo, 0);
64: bupper.j = PetscMin(N, upper.j + halo);
65: blower.j = PetscMax(lower.j - halo, 0);
66: bupper.k = PetscMin(P, upper.k + halo);
67: blower.k = PetscMax(lower.k - halo, 0);
68: rM = bupper.i - blower.i;
69: rN = bupper.j - blower.j;
70: rP = bupper.k - blower.k;
72: if (commz != MPI_COMM_NULL) {
73: PetscCall(DMDACreate(commz, dmz));
74: PetscCall(DMSetDimension(*dmz, dim));
75: PetscCall(DMDASetSizes(*dmz, rM, rN, rP));
76: PetscCall(DMDASetNumProcs(*dmz, PETSC_DECIDE, PETSC_DECIDE, PETSC_DECIDE));
77: PetscCall(DMDASetBoundaryType(*dmz, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE));
78: PetscCall(DMDASetDof(*dmz, dof));
79: PetscCall(DMDASetStencilType(*dmz, st));
80: PetscCall(DMDASetStencilWidth(*dmz, 0));
81: PetscCall(DMDASetOwnershipRanges(*dmz, NULL, NULL, NULL));
82: PetscCall(DMSetFromOptions(*dmz));
83: PetscCall(DMSetUp(*dmz));
84: PetscCall(DMDAGetCorners(*dmz, &sxb, &syb, &szb, &mxb, &myb, &mzb));
85: sxr = PetscMax(sxb, lower.i - blower.i);
86: syr = PetscMax(syb, lower.j - blower.j);
87: szr = PetscMax(szb, lower.k - blower.k);
88: exr = PetscMin(sxb + mxb, upper.i - blower.i);
89: eyr = PetscMin(syb + myb, upper.j - blower.j);
90: ezr = PetscMin(szb + mzb, upper.k - blower.k);
91: PetscCall(PetscMalloc2(dof * rM * rN * PetscMax(rP, 1), &localPoints, dof * rM * rN * PetscMax(rP, 1), &remotePoints));
92: } else {
93: sxr = syr = szr = exr = eyr = ezr = sxb = syb = szb = mxb = myb = mzb = 0;
94: }
96: /* Create SF for restricted map */
97: PetscCall(DMCreateGlobalVector(dm, &X));
98: PetscCall(VecGetOwnershipRanges(X, &ranges));
100: loclower.i = blower.i + sxr;
101: locupper.i = blower.i + exr;
102: loclower.j = blower.j + syr;
103: locupper.j = blower.j + eyr;
104: loclower.k = blower.k + szr;
105: locupper.k = blower.k + ezr;
107: PetscCall(DMDACreatePatchIS(dm, &loclower, &locupper, &is, patchis_offproc));
108: PetscCall(ISGetIndices(is, &indices));
110: if (dim < 3) {
111: mzb = 1;
112: ezr = 1;
113: }
114: q = 0;
115: for (k = szb; k < szb + mzb; ++k) {
116: if ((k < szr) || (k >= ezr)) continue;
117: for (j = syb; j < syb + myb; ++j) {
118: if ((j < syr) || (j >= eyr)) continue;
119: for (i = sxb; i < sxb + mxb; ++i) {
120: for (l = 0; l < dof; l++) {
121: const PetscInt lp = l + dof * (((k - szb) * rN + (j - syb)) * rM + i - sxb);
122: PetscInt r;
124: if ((i < sxr) || (i >= exr)) continue;
125: localPoints[q] = lp;
126: PetscCall(PetscFindInt(indices[q], size + 1, ranges, &r));
128: remotePoints[q].rank = r < 0 ? -(r + 1) - 1 : r;
129: remotePoints[q].index = indices[q] - ranges[remotePoints[q].rank];
130: ++q;
131: }
132: }
133: }
134: }
135: PetscCall(ISRestoreIndices(is, &indices));
136: PetscCall(ISDestroy(&is));
137: PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), sfzr));
138: PetscCall(PetscObjectSetName((PetscObject)*sfzr, "Restricted Map"));
139: PetscCall(PetscSFSetGraph(*sfzr, dof * M * N * P, q, localPoints, PETSC_COPY_VALUES, remotePoints, PETSC_COPY_VALUES));
141: if (sfz) {
142: /* Create SF for buffered map */
143: loclower.i = blower.i + sxb;
144: locupper.i = blower.i + sxb + mxb;
145: loclower.j = blower.j + syb;
146: locupper.j = blower.j + syb + myb;
147: loclower.k = blower.k + szb;
148: locupper.k = blower.k + szb + mzb;
150: PetscCall(DMDACreatePatchIS(dm, &loclower, &locupper, &is, patchis_offproc));
151: PetscCall(ISGetIndices(is, &indices));
153: q = 0;
154: for (k = szb; k < szb + mzb; ++k) {
155: for (j = syb; j < syb + myb; ++j) {
156: for (i = sxb; i < sxb + mxb; ++i, ++q) {
157: PetscInt r;
159: localPoints[q] = q;
160: PetscCall(PetscFindInt(indices[q], size + 1, ranges, &r));
161: remotePoints[q].rank = r < 0 ? -(r + 1) - 1 : r;
162: remotePoints[q].index = indices[q] - ranges[remotePoints[q].rank];
163: }
164: }
165: }
166: PetscCall(ISRestoreIndices(is, &indices));
167: PetscCall(ISDestroy(&is));
168: PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), sfz));
169: PetscCall(PetscObjectSetName((PetscObject)*sfz, "Buffered Map"));
170: PetscCall(PetscSFSetGraph(*sfz, M * N * P, q, localPoints, PETSC_COPY_VALUES, remotePoints, PETSC_COPY_VALUES));
171: }
173: PetscCall(VecDestroy(&X));
174: PetscCall(PetscFree2(localPoints, remotePoints));
175: PetscFunctionReturn(PETSC_SUCCESS);
176: }
178: typedef enum {
179: PATCH_COMM_TYPE_WORLD = 0,
180: PATCH_COMM_TYPE_SELF = 1
181: } PatchCommType;
183: PetscErrorCode DMPatchSolve(DM dm)
184: {
185: MPI_Comm comm;
186: MPI_Comm commz;
187: DM dmc;
188: PetscSF sfz, sfzr;
189: Vec XC;
190: MatStencil patchSize, commSize, gridRank, lower, upper;
191: PetscInt M, N, P, i, j, k, l, m, n, p = 0;
192: PetscMPIInt rank, size;
193: PetscInt debug = 0;
195: PetscFunctionBegin;
196: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
197: PetscCallMPI(MPI_Comm_rank(comm, &rank));
198: PetscCallMPI(MPI_Comm_size(comm, &size));
199: PetscCall(DMPatchGetCoarse(dm, &dmc));
200: PetscCall(DMPatchGetPatchSize(dm, &patchSize));
201: PetscCall(DMPatchGetCommSize(dm, &commSize));
202: PetscCall(DMPatchGetCommSize(dm, &commSize));
203: PetscCall(DMGetGlobalVector(dmc, &XC));
204: PetscCall(DMDAGetInfo(dmc, NULL, &M, &N, &P, &l, &m, &n, NULL, NULL, NULL, NULL, NULL, NULL));
205: M = PetscMax(M, 1);
206: l = PetscMax(l, 1);
207: N = PetscMax(N, 1);
208: m = PetscMax(m, 1);
209: P = PetscMax(P, 1);
210: n = PetscMax(n, 1);
212: gridRank.i = rank % l;
213: gridRank.j = rank / l % m;
214: gridRank.k = rank / (l * m) % n;
216: if (commSize.i * commSize.j * commSize.k == size || commSize.i * commSize.j * commSize.k == 0) {
217: commSize.i = l;
218: commSize.j = m;
219: commSize.k = n;
220: commz = comm;
221: } else if (commSize.i * commSize.j * commSize.k == 1) {
222: commz = PETSC_COMM_SELF;
223: } else {
224: const PetscMPIInt newComm = ((gridRank.k / commSize.k) * (m / commSize.j) + gridRank.j / commSize.j) * (l / commSize.i) + (gridRank.i / commSize.i);
225: const PetscMPIInt newRank = ((gridRank.k % commSize.k) * commSize.j + gridRank.j % commSize.j) * commSize.i + (gridRank.i % commSize.i);
227: PetscCallMPI(MPI_Comm_split(comm, newComm, newRank, &commz));
228: if (debug) PetscCall(PetscPrintf(PETSC_COMM_SELF, "Rank %d color %d key %d commz %p\n", rank, newComm, newRank, (void *)(MPI_Aint)commz));
229: }
230: /*
231: Assumptions:
232: - patchSize divides gridSize
233: - commSize divides gridSize
234: - commSize divides l,m,n
235: Ignore multiple patches per rank for now
237: Multiple ranks per patch:
238: - l,m,n divides patchSize
239: - commSize divides patchSize
240: */
241: for (k = 0; k < P; k += PetscMax(patchSize.k, 1)) {
242: for (j = 0; j < N; j += PetscMax(patchSize.j, 1)) {
243: for (i = 0; i < M; i += PetscMax(patchSize.i, 1), ++p) {
244: MPI_Comm commp = MPI_COMM_NULL;
245: DM dmz = NULL;
246: #if 0
247: DM dmf = NULL;
248: Mat interpz = NULL;
249: #endif
250: Vec XZ = NULL;
251: PetscScalar *xcarray = NULL;
252: PetscScalar *xzarray = NULL;
254: if ((gridRank.k / commSize.k == p / (l / commSize.i * m / commSize.j) % n / commSize.k) && (gridRank.j / commSize.j == p / (l / commSize.i) % m / commSize.j) && (gridRank.i / commSize.i == p % l / commSize.i)) {
255: if (debug) PetscCall(PetscPrintf(PETSC_COMM_SELF, "Rank %d is accepting Patch %" PetscInt_FMT "\n", rank, p));
256: commp = commz;
257: }
258: /* Zoom to coarse patch */
259: lower.i = i;
260: lower.j = j;
261: lower.k = k;
262: upper.i = i + patchSize.i;
263: upper.j = j + patchSize.j;
264: upper.k = k + patchSize.k;
265: PetscCall(DMPatchZoom(dmc, lower, upper, commp, &dmz, &sfz, &sfzr));
266: lower.c = 0; /* initialize member, otherwise compiler issues warnings */
267: upper.c = 0; /* initialize member, otherwise compiler issues warnings */
268: if (debug)
269: PetscCall(PetscPrintf(comm, "Patch %" PetscInt_FMT ": (%" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ")--(%" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ")\n", p, lower.i, lower.j, lower.k, upper.i, upper.j, upper.k));
270: if (dmz) PetscCall(DMView(dmz, PETSC_VIEWER_STDOUT_(commz)));
271: PetscCall(PetscSFView(sfz, PETSC_VIEWER_STDOUT_(comm)));
272: PetscCall(PetscSFView(sfzr, PETSC_VIEWER_STDOUT_(comm)));
273: /* Scatter Xcoarse -> Xzoom */
274: if (dmz) PetscCall(DMGetGlobalVector(dmz, &XZ));
275: if (XZ) PetscCall(VecGetArray(XZ, &xzarray));
276: PetscCall(VecGetArray(XC, &xcarray));
277: PetscCall(PetscSFBcastBegin(sfz, MPIU_SCALAR, xcarray, xzarray, MPI_REPLACE));
278: PetscCall(PetscSFBcastEnd(sfz, MPIU_SCALAR, xcarray, xzarray, MPI_REPLACE));
279: PetscCall(VecRestoreArray(XC, &xcarray));
280: if (XZ) PetscCall(VecRestoreArray(XZ, &xzarray));
281: #if 0
282: /* Interpolate Xzoom -> Xfine, note that this may be on subcomms */
283: PetscCall(DMRefine(dmz, MPI_COMM_NULL, &dmf));
284: PetscCall(DMCreateInterpolation(dmz, dmf, &interpz, NULL));
285: PetscCall(DMInterpolate(dmz, interpz, dmf));
286: /* Smooth Xfine using two-step smoother, normal smoother plus Kaczmarz---moves back and forth from dmzoom to dmfine */
287: /* Compute residual Rfine */
288: /* Restrict Rfine to Rzoom_restricted */
289: #endif
290: /* Scatter Rzoom_restricted -> Rcoarse_restricted */
291: if (XZ) PetscCall(VecGetArray(XZ, &xzarray));
292: PetscCall(VecGetArray(XC, &xcarray));
293: PetscCall(PetscSFReduceBegin(sfzr, MPIU_SCALAR, xzarray, xcarray, MPIU_SUM));
294: PetscCall(PetscSFReduceEnd(sfzr, MPIU_SCALAR, xzarray, xcarray, MPIU_SUM));
295: PetscCall(VecRestoreArray(XC, &xcarray));
296: if (XZ) PetscCall(VecRestoreArray(XZ, &xzarray));
297: if (dmz) PetscCall(DMRestoreGlobalVector(dmz, &XZ));
298: /* Compute global residual Rcoarse */
299: /* TauCoarse = Rcoarse - Rcoarse_restricted */
301: PetscCall(PetscSFDestroy(&sfz));
302: PetscCall(PetscSFDestroy(&sfzr));
303: PetscCall(DMDestroy(&dmz));
304: }
305: }
306: }
307: PetscCall(DMRestoreGlobalVector(dmc, &XC));
308: PetscFunctionReturn(PETSC_SUCCESS);
309: }
311: static PetscErrorCode DMPatchView_ASCII(DM dm, PetscViewer viewer)
312: {
313: DM_Patch *mesh = (DM_Patch *)dm->data;
314: PetscViewerFormat format;
315: const char *name;
317: PetscFunctionBegin;
318: PetscCall(PetscViewerGetFormat(viewer, &format));
319: /* if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) */
320: PetscCall(PetscObjectGetName((PetscObject)dm, &name));
321: PetscCall(PetscViewerASCIIPrintf(viewer, "Patch DM %s\n", name));
322: PetscCall(PetscViewerASCIIPushTab(viewer));
323: PetscCall(PetscViewerASCIIPrintf(viewer, "Coarse DM\n"));
324: PetscCall(DMView(mesh->dmCoarse, viewer));
325: PetscCall(PetscViewerASCIIPopTab(viewer));
326: PetscFunctionReturn(PETSC_SUCCESS);
327: }
329: PetscErrorCode DMView_Patch(DM dm, PetscViewer viewer)
330: {
331: PetscBool iascii, isbinary;
333: PetscFunctionBegin;
336: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
337: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
338: if (iascii) PetscCall(DMPatchView_ASCII(dm, viewer));
339: PetscFunctionReturn(PETSC_SUCCESS);
340: }
342: PetscErrorCode DMDestroy_Patch(DM dm)
343: {
344: DM_Patch *mesh = (DM_Patch *)dm->data;
346: PetscFunctionBegin;
347: if (--mesh->refct > 0) PetscFunctionReturn(PETSC_SUCCESS);
348: PetscCall(DMDestroy(&mesh->dmCoarse));
349: /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
350: PetscCall(PetscFree(mesh));
351: PetscFunctionReturn(PETSC_SUCCESS);
352: }
354: PetscErrorCode DMSetUp_Patch(DM dm)
355: {
356: DM_Patch *mesh = (DM_Patch *)dm->data;
358: PetscFunctionBegin;
360: PetscCall(DMSetUp(mesh->dmCoarse));
361: PetscFunctionReturn(PETSC_SUCCESS);
362: }
364: PetscErrorCode DMCreateGlobalVector_Patch(DM dm, Vec *g)
365: {
366: DM_Patch *mesh = (DM_Patch *)dm->data;
368: PetscFunctionBegin;
370: PetscCall(DMCreateGlobalVector(mesh->dmCoarse, g));
371: PetscFunctionReturn(PETSC_SUCCESS);
372: }
374: PetscErrorCode DMCreateLocalVector_Patch(DM dm, Vec *l)
375: {
376: DM_Patch *mesh = (DM_Patch *)dm->data;
378: PetscFunctionBegin;
380: PetscCall(DMCreateLocalVector(mesh->dmCoarse, l));
381: PetscFunctionReturn(PETSC_SUCCESS);
382: }
384: PetscErrorCode DMCreateSubDM_Patch(DM dm, PetscInt numFields, const PetscInt fields[], IS *is, DM *subdm)
385: {
386: SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Tell me to code this");
387: }
389: PetscErrorCode DMPatchGetCoarse(DM dm, DM *dmCoarse)
390: {
391: DM_Patch *mesh = (DM_Patch *)dm->data;
393: PetscFunctionBegin;
395: *dmCoarse = mesh->dmCoarse;
396: PetscFunctionReturn(PETSC_SUCCESS);
397: }
399: PetscErrorCode DMPatchGetPatchSize(DM dm, MatStencil *patchSize)
400: {
401: DM_Patch *mesh = (DM_Patch *)dm->data;
403: PetscFunctionBegin;
405: PetscAssertPointer(patchSize, 2);
406: *patchSize = mesh->patchSize;
407: PetscFunctionReturn(PETSC_SUCCESS);
408: }
410: PetscErrorCode DMPatchSetPatchSize(DM dm, MatStencil patchSize)
411: {
412: DM_Patch *mesh = (DM_Patch *)dm->data;
414: PetscFunctionBegin;
416: mesh->patchSize = patchSize;
417: PetscFunctionReturn(PETSC_SUCCESS);
418: }
420: PetscErrorCode DMPatchGetCommSize(DM dm, MatStencil *commSize)
421: {
422: DM_Patch *mesh = (DM_Patch *)dm->data;
424: PetscFunctionBegin;
426: PetscAssertPointer(commSize, 2);
427: *commSize = mesh->commSize;
428: PetscFunctionReturn(PETSC_SUCCESS);
429: }
431: PetscErrorCode DMPatchSetCommSize(DM dm, MatStencil commSize)
432: {
433: DM_Patch *mesh = (DM_Patch *)dm->data;
435: PetscFunctionBegin;
437: mesh->commSize = commSize;
438: PetscFunctionReturn(PETSC_SUCCESS);
439: }