Actual source code: plexdd.c
1: #include <petsc/private/dmpleximpl.h>
2: #include <petsc/private/dmlabelimpl.h>
3: #include <petsc/private/sectionimpl.h>
4: #include <petsc/private/sfimpl.h>
6: static PetscErrorCode DMTransferMaterialParameters(DM dm, PetscSF sf, DM odm)
7: {
8: Vec A;
10: PetscFunctionBegin;
11: /* TODO handle regions? */
12: PetscCall(DMGetAuxiliaryVec(dm, NULL, 0, 0, &A));
13: if (A) {
14: DM dmAux, ocdm, odmAux;
15: Vec oA, oAt;
16: PetscSection sec, osec;
18: PetscCall(VecGetDM(A, &dmAux));
19: PetscCall(DMClone(odm, &odmAux));
20: PetscCall(DMGetCoordinateDM(odm, &ocdm));
21: PetscCall(DMSetCoordinateDM(odmAux, ocdm));
22: PetscCall(DMCopyDisc(dmAux, odmAux));
24: PetscCall(DMGetLocalSection(dmAux, &sec));
25: if (sf) {
26: PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)sec), &osec));
27: PetscCall(VecCreate(PetscObjectComm((PetscObject)A), &oAt));
28: PetscCall(DMPlexDistributeField(dmAux, sf, sec, A, osec, oAt));
29: } else {
30: PetscCall(PetscObjectReference((PetscObject)sec));
31: osec = sec;
32: PetscCall(PetscObjectReference((PetscObject)A));
33: oAt = A;
34: }
35: PetscCall(DMSetLocalSection(odmAux, osec));
36: PetscCall(PetscSectionDestroy(&osec));
37: PetscCall(DMCreateLocalVector(odmAux, &oA));
38: PetscCall(DMDestroy(&odmAux));
39: PetscCall(VecCopy(oAt, oA));
40: PetscCall(VecDestroy(&oAt));
41: PetscCall(DMSetAuxiliaryVec(odm, NULL, 0, 0, oA));
42: PetscCall(VecDestroy(&oA));
43: }
44: PetscFunctionReturn(PETSC_SUCCESS);
45: }
47: PetscErrorCode DMCreateDomainDecomposition_Plex(DM dm, PetscInt *nsub, char ***names, IS **innerises, IS **outerises, DM **dms)
48: {
49: DM odm;
50: PetscSF migrationSF = NULL, sectionSF;
51: PetscSection sec, tsec, ogsec, olsec;
52: PetscInt n, mh, ddovl = 0, pStart, pEnd, ni, no, nl;
53: PetscDS ds;
54: DMLabel label;
55: const char *oname = "__internal_plex_dd_ovl_";
56: IS gi_is, li_is, go_is, gl_is, ll_is;
57: IS gis, lis;
58: PetscInt rst, ren, c, *gidxs, *lidxs, *tidxs;
59: Vec gvec;
61: PetscFunctionBegin;
62: n = 1;
63: if (nsub) *nsub = n;
64: if (names) PetscCall(PetscCalloc1(n, names));
65: if (innerises) PetscCall(PetscCalloc1(n, innerises));
66: if (outerises) PetscCall(PetscCalloc1(n, outerises));
67: if (dms) PetscCall(PetscCalloc1(n, dms));
69: PetscObjectOptionsBegin((PetscObject)dm);
70: PetscCall(PetscOptionsBoundedInt("-dm_plex_dd_overlap", "The size of the overlap halo for the subdomains", "DMCreateDomainDecomposition", ddovl, &ddovl, NULL, 0));
71: PetscOptionsEnd();
73: PetscCall(DMViewFromOptions(dm, NULL, "-dm_plex_dd_dm_view"));
74: PetscCall(DMPlexDistributeOverlap_Internal(dm, ddovl + 1, PETSC_COMM_SELF, oname, &migrationSF, &odm));
75: if (!odm) PetscCall(DMClone(dm, &odm));
76: if (migrationSF) PetscCall(PetscSFViewFromOptions(migrationSF, (PetscObject)dm, "-dm_plex_dd_sf_view"));
78: PetscCall(DMPlexGetMaxProjectionHeight(dm, &mh));
79: PetscCall(DMPlexSetMaxProjectionHeight(odm, mh));
81: /* share discretization */
82: /* TODO Labels for regions may need to updated,
83: now it uses the original ones, not the ones for the odm.
84: Not sure what to do here */
85: /* PetscCall(DMCopyDisc(dm, odm)); */
86: PetscCall(DMGetDS(odm, &ds));
87: if (!ds) {
88: PetscCall(DMGetLocalSection(dm, &sec));
89: PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)odm), &tsec));
90: if (migrationSF) {
91: PetscCall(PetscSFDistributeSection(migrationSF, sec, NULL, tsec));
92: } else {
93: PetscCall(PetscSectionCopy(sec, tsec));
94: }
95: PetscCall(DMSetLocalSection(dm, tsec));
96: PetscCall(PetscSectionDestroy(&tsec));
97: }
98: /* TODO: what if these values changes? add to some DM hook? */
99: PetscCall(DMTransferMaterialParameters(dm, migrationSF, odm));
101: PetscCall(DMViewFromOptions(odm, (PetscObject)dm, "-dm_plex_dd_overlap_dm_view"));
102: #if 0
103: {
104: DM seqdm;
105: Vec val;
106: IS is;
107: PetscInt vStart, vEnd;
108: const PetscInt *vnum;
109: char name[256];
110: PetscViewer viewer;
112: PetscCall(DMPlexDistributeOverlap_Internal(dm, 0, PETSC_COMM_SELF, "local_mesh", NULL, &seqdm));
113: PetscCall(PetscSNPrintf(name, sizeof(name), "local_mesh_%d.vtu", PetscGlobalRank));
114: PetscCall(PetscViewerVTKOpen(PetscObjectComm((PetscObject)seqdm), name, FILE_MODE_WRITE, &viewer));
115: PetscCall(DMGetLabel(seqdm, "local_mesh", &label));
116: PetscCall(DMPlexLabelComplete(seqdm, label));
117: PetscCall(DMPlexCreateLabelField(seqdm, label, &val));
118: PetscCall(VecView(val, viewer));
119: PetscCall(VecDestroy(&val));
120: PetscCall(PetscViewerDestroy(&viewer));
122: PetscCall(PetscSNPrintf(name, sizeof(name), "asm_vertices_%d.vtu", PetscGlobalRank));
123: PetscCall(PetscViewerVTKOpen(PetscObjectComm((PetscObject)seqdm), name, FILE_MODE_WRITE, &viewer));
124: PetscCall(DMCreateLabel(seqdm, "asm_vertices"));
125: PetscCall(DMGetLabel(seqdm, "asm_vertices", &label));
126: PetscCall(DMPlexGetVertexNumbering(dm, &is));
127: PetscCall(DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd));
128: PetscCall(ISGetIndices(is, &vnum));
129: for (PetscInt v = 0; v < vEnd - vStart; v++) {
130: if (vnum[v] < 0) continue;
131: PetscCall(DMLabelSetValue(label, v + vStart, 1));
132: }
133: PetscCall(DMPlexCreateLabelField(seqdm, label, &val));
134: PetscCall(VecView(val, viewer));
135: PetscCall(VecDestroy(&val));
136: PetscCall(ISRestoreIndices(is, &vnum));
137: PetscCall(PetscViewerDestroy(&viewer));
139: PetscCall(DMDestroy(&seqdm));
140: PetscCall(PetscSNPrintf(name, sizeof(name), "ovl_mesh_%d.vtu", PetscGlobalRank));
141: PetscCall(PetscViewerVTKOpen(PetscObjectComm((PetscObject)odm), name, FILE_MODE_WRITE, &viewer));
142: PetscCall(DMView(odm, viewer));
143: PetscCall(PetscViewerDestroy(&viewer));
144: }
145: #endif
147: /* propagate original global ordering to overlapping DM */
148: PetscCall(DMGetSectionSF(dm, §ionSF));
149: PetscCall(DMGetLocalSection(dm, &sec));
150: PetscCall(PetscSectionGetStorageSize(sec, &nl));
151: PetscCall(DMGetGlobalVector(dm, &gvec));
152: PetscCall(VecGetOwnershipRange(gvec, &rst, &ren));
153: PetscCall(DMRestoreGlobalVector(dm, &gvec));
154: PetscCall(ISCreateStride(PETSC_COMM_SELF, ren - rst, rst, 1, &gi_is)); /* non-overlapping dofs */
155: PetscCall(PetscMalloc1(nl, &lidxs));
156: for (PetscInt i = 0; i < nl; i++) lidxs[i] = -1;
157: PetscCall(ISGetIndices(gi_is, (const PetscInt **)&gidxs));
158: PetscCall(PetscSFBcastBegin(sectionSF, MPIU_INT, gidxs, lidxs, MPI_REPLACE));
159: PetscCall(PetscSFBcastEnd(sectionSF, MPIU_INT, gidxs, lidxs, MPI_REPLACE));
160: PetscCall(ISRestoreIndices(gi_is, (const PetscInt **)&gidxs));
161: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)dm), nl, lidxs, PETSC_OWN_POINTER, &lis));
162: if (migrationSF) {
163: PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)odm), &tsec));
164: PetscCall(DMPlexDistributeFieldIS(dm, migrationSF, sec, lis, tsec, &gis));
165: } else {
166: PetscCall(PetscObjectReference((PetscObject)lis));
167: gis = lis;
168: tsec = NULL;
169: }
170: PetscCall(PetscSectionDestroy(&tsec));
171: PetscCall(ISDestroy(&lis));
172: PetscCall(PetscSFDestroy(&migrationSF));
174: /* make dofs on the overlap boundary (not the global boundary) constrained */
175: PetscCall(DMGetLabel(odm, oname, &label));
176: if (label) {
177: PetscCall(DMPlexLabelComplete(odm, label));
178: PetscCall(DMGetLocalSection(odm, &tsec));
179: PetscCall(PetscSectionGetChart(tsec, &pStart, &pEnd));
180: PetscCall(DMLabelCreateIndex(label, pStart, pEnd));
181: PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)tsec), &sec));
182: PetscCall(PetscSectionCopy_Internal(tsec, sec, label->bt));
183: PetscCall(DMSetLocalSection(odm, sec));
184: PetscCall(PetscSectionDestroy(&sec));
185: PetscCall(DMRemoveLabel(odm, oname, NULL));
186: } else { /* sequential case */
187: PetscCall(DMGetLocalSection(dm, &sec));
188: PetscCall(DMSetLocalSection(odm, sec));
189: }
191: /* Create index sets for dofs in the overlap dm */
192: PetscCall(DMGetSectionSF(odm, §ionSF));
193: PetscCall(DMGetLocalSection(odm, &olsec));
194: PetscCall(DMGetGlobalSection(odm, &ogsec));
195: PetscCall(PetscSectionViewFromOptions(ogsec, (PetscObject)dm, "-dm_plex_dd_overlap_gsection_view"));
196: PetscCall(PetscSectionViewFromOptions(olsec, (PetscObject)dm, "-dm_plex_dd_overlap_lsection_view"));
197: ni = ren - rst;
198: PetscCall(PetscSectionGetConstrainedStorageSize(ogsec, &no)); /* dofs in the overlap */
199: PetscCall(PetscSectionGetStorageSize(olsec, &nl)); /* local dofs in the overlap */
200: PetscCall(PetscMalloc1(no, &gidxs));
201: PetscCall(ISGetIndices(gis, (const PetscInt **)&lidxs));
202: PetscCall(PetscSFReduceBegin(sectionSF, MPIU_INT, lidxs, gidxs, MPI_REPLACE));
203: PetscCall(PetscSFReduceEnd(sectionSF, MPIU_INT, lidxs, gidxs, MPI_REPLACE));
204: PetscCall(ISRestoreIndices(gis, (const PetscInt **)&lidxs));
206: /* non-overlapping dofs */
207: PetscCall(PetscMalloc1(no, &lidxs));
208: c = 0;
209: for (PetscInt i = 0; i < no; i++) {
210: if (gidxs[i] >= rst && gidxs[i] < ren) lidxs[c++] = i;
211: }
212: PetscCheck(c == ni, PETSC_COMM_SELF, PETSC_ERR_PLIB, "%" PetscInt_FMT " != %" PetscInt_FMT, c, ni);
213: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)odm), ni, lidxs, PETSC_OWN_POINTER, &li_is));
215: /* global dofs in the overlap */
216: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)dm), no, gidxs, PETSC_OWN_POINTER, &go_is));
217: PetscCall(ISViewFromOptions(go_is, (PetscObject)dm, "-dm_plex_dd_overlap_gois_view"));
218: /* PetscCall(ISCreateStride(PetscObjectComm((PetscObject)odm), no, 0, 1, &lo_is)); */
220: /* local dofs of the overlapping subdomain (we actually need only dofs on the boundary of the subdomain) */
221: PetscCall(PetscMalloc1(nl, &lidxs));
222: PetscCall(PetscMalloc1(nl, &gidxs));
223: PetscCall(ISGetIndices(gis, (const PetscInt **)&tidxs));
224: c = 0;
225: for (PetscInt i = 0; i < nl; i++) {
226: if (tidxs[i] < 0) continue;
227: lidxs[c] = i;
228: gidxs[c] = tidxs[i];
229: c++;
230: }
231: PetscCall(ISRestoreIndices(gis, (const PetscInt **)&tidxs));
232: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)dm), c, gidxs, PETSC_OWN_POINTER, &gl_is));
233: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)odm), c, lidxs, PETSC_OWN_POINTER, &ll_is));
234: PetscCall(ISViewFromOptions(gl_is, (PetscObject)dm, "-dm_plex_dd_overlap_glis_view"));
236: PetscCall(PetscObjectCompose((PetscObject)odm, "__Plex_DD_IS_gi", (PetscObject)gi_is));
237: PetscCall(PetscObjectCompose((PetscObject)odm, "__Plex_DD_IS_li", (PetscObject)li_is));
238: PetscCall(PetscObjectCompose((PetscObject)odm, "__Plex_DD_IS_go", (PetscObject)go_is));
239: PetscCall(PetscObjectCompose((PetscObject)odm, "__Plex_DD_IS_gl", (PetscObject)gl_is));
240: PetscCall(PetscObjectCompose((PetscObject)odm, "__Plex_DD_IS_ll", (PetscObject)ll_is));
242: if (innerises) (*innerises)[0] = gi_is;
243: else PetscCall(ISDestroy(&gi_is));
244: if (outerises) (*outerises)[0] = go_is;
245: else PetscCall(ISDestroy(&go_is));
246: if (dms) (*dms)[0] = odm;
247: else PetscCall(DMDestroy(&odm));
248: PetscCall(ISDestroy(&li_is));
249: PetscCall(ISDestroy(&gl_is));
250: PetscCall(ISDestroy(&ll_is));
251: PetscCall(ISDestroy(&gis));
252: PetscFunctionReturn(PETSC_SUCCESS);
253: }
255: PetscErrorCode DMCreateDomainDecompositionScatters_Plex(DM dm, PetscInt n, DM *subdms, VecScatter **iscat, VecScatter **oscat, VecScatter **lscat)
256: {
257: Vec gvec, svec, lvec;
258: IS gi_is, li_is, go_is, gl_is, ll_is;
260: PetscFunctionBegin;
261: if (iscat) PetscCall(PetscMalloc1(n, iscat));
262: if (oscat) PetscCall(PetscMalloc1(n, oscat));
263: if (lscat) PetscCall(PetscMalloc1(n, lscat));
265: PetscCall(DMGetGlobalVector(dm, &gvec));
266: for (PetscInt i = 0; i < n; i++) {
267: PetscCall(DMGetGlobalVector(subdms[i], &svec));
268: PetscCall(DMGetLocalVector(subdms[i], &lvec));
269: PetscCall(PetscObjectQuery((PetscObject)subdms[i], "__Plex_DD_IS_gi", (PetscObject *)&gi_is));
270: PetscCall(PetscObjectQuery((PetscObject)subdms[i], "__Plex_DD_IS_li", (PetscObject *)&li_is));
271: PetscCall(PetscObjectQuery((PetscObject)subdms[i], "__Plex_DD_IS_go", (PetscObject *)&go_is));
272: PetscCall(PetscObjectQuery((PetscObject)subdms[i], "__Plex_DD_IS_gl", (PetscObject *)&gl_is));
273: PetscCall(PetscObjectQuery((PetscObject)subdms[i], "__Plex_DD_IS_ll", (PetscObject *)&ll_is));
274: PetscCheck(gi_is, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "SubDM not obtained from DMCreateDomainDecomposition");
275: PetscCheck(li_is, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "SubDM not obtained from DMCreateDomainDecomposition");
276: PetscCheck(go_is, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "SubDM not obtained from DMCreateDomainDecomposition");
277: PetscCheck(gl_is, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "SubDM not obtained from DMCreateDomainDecomposition");
278: PetscCheck(ll_is, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "SubDM not obtained from DMCreateDomainDecomposition");
279: if (iscat) PetscCall(VecScatterCreate(gvec, gi_is, svec, li_is, &(*iscat)[i]));
280: if (oscat) PetscCall(VecScatterCreate(gvec, go_is, svec, NULL, &(*oscat)[i]));
281: if (lscat) PetscCall(VecScatterCreate(gvec, gl_is, lvec, ll_is, &(*lscat)[i]));
282: PetscCall(DMRestoreGlobalVector(subdms[i], &svec));
283: PetscCall(DMRestoreLocalVector(subdms[i], &lvec));
284: }
285: PetscCall(DMRestoreGlobalVector(dm, &gvec));
286: PetscFunctionReturn(PETSC_SUCCESS);
287: }
289: /*
290: DMCreateNeumannOverlap_Plex - Generates an IS, an unassembled (Neumann) Mat, a setup function, and the corresponding context to be used by PCHPDDM.
292: Input Parameter:
293: . dm - preconditioner context
295: Output Parameters:
296: + ovl - index set of overlapping subdomains
297: . J - unassembled (Neumann) local matrix
298: . setup - function for generating the matrix
299: - setup_ctx - context for setup
301: Options Database Keys:
302: + -dm_plex_view_neumann_original - view the DM without overlap
303: - -dm_plex_view_neumann_overlap - view the DM with overlap as needed by PCHPDDM
305: Level: advanced
307: .seealso: `DMCreate()`, `DM`, `MATIS`, `PCHPDDM`, `PCHPDDMSetAuxiliaryMat()`
308: */
309: PetscErrorCode DMCreateNeumannOverlap_Plex(DM dm, IS *ovl, Mat *J, PetscErrorCode (**setup)(Mat, PetscReal, Vec, Vec, PetscReal, IS, void *), void **setup_ctx)
310: {
311: DM odm;
312: Mat pJ;
313: PetscSF sf = NULL;
314: PetscSection sec, osec;
315: ISLocalToGlobalMapping l2g;
316: const PetscInt *idxs;
317: PetscInt n, mh;
319: PetscFunctionBegin;
320: *setup = NULL;
321: *setup_ctx = NULL;
322: *ovl = NULL;
323: *J = NULL;
325: /* Overlapped mesh
326: overlap is a little more generous, since it is not computed starting from the owned (Dirichlet) points, but from the locally owned cells */
327: PetscCall(DMPlexDistributeOverlap(dm, 1, &sf, &odm));
328: if (!odm) {
329: PetscCall(PetscSFDestroy(&sf));
330: PetscFunctionReturn(PETSC_SUCCESS);
331: }
333: /* share discretization */
334: PetscCall(DMGetLocalSection(dm, &sec));
335: PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)sec), &osec));
336: PetscCall(PetscSFDistributeSection(sf, sec, NULL, osec));
337: /* what to do here? using both is fine? */
338: PetscCall(DMSetLocalSection(odm, osec));
339: PetscCall(DMCopyDisc(dm, odm));
340: PetscCall(DMPlexGetMaxProjectionHeight(dm, &mh));
341: PetscCall(DMPlexSetMaxProjectionHeight(odm, mh));
342: PetscCall(PetscSectionDestroy(&osec));
344: /* material parameters */
345: /* TODO: what if these values changes? add to some DM hook? */
346: PetscCall(DMTransferMaterialParameters(dm, sf, odm));
347: PetscCall(PetscSFDestroy(&sf));
349: PetscCall(DMViewFromOptions(dm, NULL, "-dm_plex_view_neumann_original"));
350: PetscCall(PetscObjectSetName((PetscObject)odm, "OVL"));
351: PetscCall(DMViewFromOptions(odm, NULL, "-dm_plex_view_neumann_overlap"));
353: /* MATIS for the overlap region
354: the HPDDM interface wants local matrices,
355: we attach the global MATIS to the overlap IS,
356: since we need it to do assembly */
357: PetscCall(DMSetMatType(odm, MATIS));
358: PetscCall(DMCreateMatrix(odm, &pJ));
359: PetscCall(MatISGetLocalMat(pJ, J));
360: PetscCall(PetscObjectReference((PetscObject)*J));
362: /* overlap IS */
363: PetscCall(MatISGetLocalToGlobalMapping(pJ, &l2g, NULL));
364: PetscCall(ISLocalToGlobalMappingGetSize(l2g, &n));
365: PetscCall(ISLocalToGlobalMappingGetIndices(l2g, &idxs));
366: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)odm), n, idxs, PETSC_COPY_VALUES, ovl));
367: PetscCall(ISLocalToGlobalMappingRestoreIndices(l2g, &idxs));
368: PetscCall(PetscObjectCompose((PetscObject)*ovl, "_DM_Overlap_HPDDM_MATIS", (PetscObject)pJ));
369: PetscCall(DMDestroy(&odm));
370: PetscCall(MatDestroy(&pJ));
372: /* special purpose setup function (composed in DMPlexSetSNESLocalFEM) */
373: PetscCall(PetscObjectQueryFunction((PetscObject)dm, "MatComputeNeumannOverlap_C", setup));
374: if (*setup) PetscCall(PetscObjectCompose((PetscObject)*ovl, "_DM_Original_HPDDM", (PetscObject)dm));
375: PetscFunctionReturn(PETSC_SUCCESS);
376: }