Actual source code: gasm.c
1: /*
2: This file defines an "generalized" additive Schwarz preconditioner for any Mat implementation.
3: In this version each MPI rank may intersect multiple subdomains and any subdomain may
4: intersect multiple MPI ranks. Intersections of subdomains with MPI ranks are called *local
5: subdomains*.
7: N - total number of distinct global subdomains (set explicitly in PCGASMSetTotalSubdomains() or implicitly PCGASMSetSubdomains() and then calculated in PCSetUp_GASM())
8: n - actual number of local subdomains on this rank (set in PCGASMSetSubdomains() or calculated in PCGASMSetTotalSubdomains())
9: nmax - maximum number of local subdomains per rank (calculated in PCSetUp_GASM())
10: */
11: #include <petsc/private/pcimpl.h>
12: #include <petscdm.h>
14: typedef struct {
15: PetscInt N, n, nmax;
16: PetscInt overlap; /* overlap requested by user */
17: PCGASMType type; /* use reduced interpolation, restriction or both */
18: PetscBool type_set; /* if user set this value (so won't change it for symmetric problems) */
19: PetscBool same_subdomain_solvers; /* flag indicating whether all local solvers are same */
20: PetscBool sort_indices; /* flag to sort subdomain indices */
21: PetscBool user_subdomains; /* whether the user set explicit subdomain index sets -- keep them on PCReset() */
22: PetscBool dm_subdomains; /* whether DM is allowed to define subdomains */
23: PetscBool hierarchicalpartitioning;
24: IS *ois; /* index sets that define the outer (conceptually, overlapping) subdomains */
25: IS *iis; /* index sets that define the inner (conceptually, nonoverlapping) subdomains */
26: KSP *ksp; /* linear solvers for each subdomain */
27: Mat *pmat; /* subdomain block matrices */
28: Vec gx, gy; /* Merged work vectors */
29: Vec *x, *y; /* Split work vectors; storage aliases pieces of storage of the above merged vectors. */
30: VecScatter gorestriction; /* merged restriction to disjoint union of outer subdomains */
31: VecScatter girestriction; /* merged restriction to disjoint union of inner subdomains */
32: VecScatter pctoouter;
33: IS permutationIS;
34: Mat permutationP;
35: Mat pcmat;
36: Vec pcx, pcy;
37: } PC_GASM;
39: static PetscErrorCode PCGASMComputeGlobalSubdomainNumbering_Private(PC pc, PetscInt **numbering, PetscInt **permutation)
40: {
41: PC_GASM *osm = (PC_GASM *)pc->data;
42: PetscInt i;
44: PetscFunctionBegin;
45: /* Determine the number of globally-distinct subdomains and compute a global numbering for them. */
46: PetscCall(PetscMalloc2(osm->n, numbering, osm->n, permutation));
47: PetscCall(PetscObjectsListGetGlobalNumbering(PetscObjectComm((PetscObject)pc), osm->n, (PetscObject *)osm->iis, NULL, *numbering));
48: for (i = 0; i < osm->n; ++i) (*permutation)[i] = i;
49: PetscCall(PetscSortIntWithPermutation(osm->n, *numbering, *permutation));
50: PetscFunctionReturn(PETSC_SUCCESS);
51: }
53: static PetscErrorCode PCGASMSubdomainView_Private(PC pc, PetscInt i, PetscViewer viewer)
54: {
55: PC_GASM *osm = (PC_GASM *)pc->data;
56: PetscInt j, nidx;
57: const PetscInt *idx;
58: PetscViewer sviewer;
59: char *cidx;
61: PetscFunctionBegin;
62: PetscCheck(i >= 0 && i < osm->n, PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_WRONG, "Invalid subdomain %" PetscInt_FMT ": must nonnegative and less than %" PetscInt_FMT, i, osm->n);
63: /* Inner subdomains. */
64: PetscCall(ISGetLocalSize(osm->iis[i], &nidx));
65: /*
66: No more than 15 characters per index plus a space.
67: PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx,
68: in case nidx == 0. That will take care of the space for the trailing '\0' as well.
69: For nidx == 0, the whole string 16 '\0'.
70: */
71: #define len 16 * (nidx + 1) + 1
72: PetscCall(PetscMalloc1(len, &cidx));
73: PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, cidx, len, &sviewer));
74: #undef len
75: PetscCall(ISGetIndices(osm->iis[i], &idx));
76: for (j = 0; j < nidx; ++j) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
77: PetscCall(ISRestoreIndices(osm->iis[i], &idx));
78: PetscCall(PetscViewerDestroy(&sviewer));
79: PetscCall(PetscViewerASCIIPrintf(viewer, "Inner subdomain:\n"));
80: PetscCall(PetscViewerFlush(viewer));
81: PetscCall(PetscViewerASCIIPushSynchronized(viewer));
82: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx));
83: PetscCall(PetscViewerFlush(viewer));
84: PetscCall(PetscViewerASCIIPopSynchronized(viewer));
85: PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
86: PetscCall(PetscViewerFlush(viewer));
87: PetscCall(PetscFree(cidx));
88: /* Outer subdomains. */
89: PetscCall(ISGetLocalSize(osm->ois[i], &nidx));
90: /*
91: No more than 15 characters per index plus a space.
92: PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx,
93: in case nidx == 0. That will take care of the space for the trailing '\0' as well.
94: For nidx == 0, the whole string 16 '\0'.
95: */
96: #define len 16 * (nidx + 1) + 1
97: PetscCall(PetscMalloc1(len, &cidx));
98: PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, cidx, len, &sviewer));
99: #undef len
100: PetscCall(ISGetIndices(osm->ois[i], &idx));
101: for (j = 0; j < nidx; ++j) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
102: PetscCall(PetscViewerDestroy(&sviewer));
103: PetscCall(ISRestoreIndices(osm->ois[i], &idx));
104: PetscCall(PetscViewerASCIIPrintf(viewer, "Outer subdomain:\n"));
105: PetscCall(PetscViewerFlush(viewer));
106: PetscCall(PetscViewerASCIIPushSynchronized(viewer));
107: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx));
108: PetscCall(PetscViewerFlush(viewer));
109: PetscCall(PetscViewerASCIIPopSynchronized(viewer));
110: PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
111: PetscCall(PetscViewerFlush(viewer));
112: PetscCall(PetscFree(cidx));
113: PetscFunctionReturn(PETSC_SUCCESS);
114: }
116: static PetscErrorCode PCGASMPrintSubdomains(PC pc)
117: {
118: PC_GASM *osm = (PC_GASM *)pc->data;
119: const char *prefix;
120: char fname[PETSC_MAX_PATH_LEN + 1];
121: PetscInt l, d, count;
122: PetscBool found;
123: PetscViewer viewer, sviewer = NULL;
124: PetscInt *numbering, *permutation; /* global numbering of locally-supported subdomains and the permutation from the local ordering */
126: PetscFunctionBegin;
127: PetscCall(PCGetOptionsPrefix(pc, &prefix));
128: PetscCall(PetscOptionsHasName(NULL, prefix, "-pc_gasm_print_subdomains", &found));
129: if (!found) PetscFunctionReturn(PETSC_SUCCESS);
130: PetscCall(PetscOptionsGetString(NULL, prefix, "-pc_gasm_print_subdomains", fname, sizeof(fname), &found));
131: if (!found) PetscCall(PetscStrncpy(fname, "stdout", sizeof(fname)));
132: PetscCall(PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc), fname, &viewer));
133: /*
134: Make sure the viewer has a name. Otherwise this may cause a deadlock or other weird errors when creating a subcomm viewer:
135: the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed collectively on the comm.
136: */
137: PetscCall(PetscObjectName((PetscObject)viewer));
138: l = 0;
139: PetscCall(PCGASMComputeGlobalSubdomainNumbering_Private(pc, &numbering, &permutation));
140: for (count = 0; count < osm->N; ++count) {
141: /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */
142: if (l < osm->n) {
143: d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */
144: if (numbering[d] == count) {
145: PetscCall(PetscViewerGetSubViewer(viewer, ((PetscObject)osm->ois[d])->comm, &sviewer));
146: PetscCall(PCGASMSubdomainView_Private(pc, d, sviewer));
147: PetscCall(PetscViewerRestoreSubViewer(viewer, ((PetscObject)osm->ois[d])->comm, &sviewer));
148: ++l;
149: }
150: }
151: PetscCallMPI(MPI_Barrier(PetscObjectComm((PetscObject)pc)));
152: }
153: PetscCall(PetscFree2(numbering, permutation));
154: PetscCall(PetscViewerDestroy(&viewer));
155: PetscFunctionReturn(PETSC_SUCCESS);
156: }
158: static PetscErrorCode PCView_GASM(PC pc, PetscViewer viewer)
159: {
160: PC_GASM *osm = (PC_GASM *)pc->data;
161: const char *prefix;
162: PetscMPIInt rank, size;
163: PetscInt bsz;
164: PetscBool iascii, view_subdomains = PETSC_FALSE;
165: PetscViewer sviewer;
166: PetscInt count, l;
167: char overlap[256] = "user-defined overlap";
168: char gsubdomains[256] = "unknown total number of subdomains";
169: char msubdomains[256] = "unknown max number of local subdomains";
170: PetscInt *numbering, *permutation; /* global numbering of locally-supported subdomains and the permutation from the local ordering */
172: PetscFunctionBegin;
173: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
174: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
176: if (osm->overlap >= 0) PetscCall(PetscSNPrintf(overlap, sizeof(overlap), "requested amount of overlap = %" PetscInt_FMT, osm->overlap));
177: if (osm->N != PETSC_DETERMINE) PetscCall(PetscSNPrintf(gsubdomains, sizeof(gsubdomains), "total number of subdomains = %" PetscInt_FMT, osm->N));
178: if (osm->nmax != PETSC_DETERMINE) PetscCall(PetscSNPrintf(msubdomains, sizeof(msubdomains), "max number of local subdomains = %" PetscInt_FMT, osm->nmax));
180: PetscCall(PCGetOptionsPrefix(pc, &prefix));
181: PetscCall(PetscOptionsGetBool(NULL, prefix, "-pc_gasm_view_subdomains", &view_subdomains, NULL));
183: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
184: if (iascii) {
185: /*
186: Make sure the viewer has a name. Otherwise this may cause a deadlock when creating a subcomm viewer:
187: the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed
188: collectively on the comm.
189: */
190: PetscCall(PetscObjectName((PetscObject)viewer));
191: PetscCall(PetscViewerASCIIPrintf(viewer, " Restriction/interpolation type: %s\n", PCGASMTypes[osm->type]));
192: PetscCall(PetscViewerASCIIPrintf(viewer, " %s\n", overlap));
193: PetscCall(PetscViewerASCIIPrintf(viewer, " %s\n", gsubdomains));
194: PetscCall(PetscViewerASCIIPrintf(viewer, " %s\n", msubdomains));
195: PetscCall(PetscViewerASCIIPushSynchronized(viewer));
196: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " [%d|%d] number of locally-supported subdomains = %" PetscInt_FMT "\n", rank, size, osm->n));
197: PetscCall(PetscViewerFlush(viewer));
198: PetscCall(PetscViewerASCIIPopSynchronized(viewer));
199: /* Cannot take advantage of osm->same_subdomain_solvers without a global numbering of subdomains. */
200: PetscCall(PetscViewerASCIIPrintf(viewer, " Subdomain solver info is as follows:\n"));
201: PetscCall(PetscViewerASCIIPushTab(viewer));
202: PetscCall(PetscViewerASCIIPrintf(viewer, " - - - - - - - - - - - - - - - - - -\n"));
203: /* Make sure that everybody waits for the banner to be printed. */
204: PetscCallMPI(MPI_Barrier(PetscObjectComm((PetscObject)viewer)));
205: /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */
206: PetscCall(PCGASMComputeGlobalSubdomainNumbering_Private(pc, &numbering, &permutation));
207: l = 0;
208: for (count = 0; count < osm->N; ++count) {
209: PetscMPIInt srank, ssize;
210: if (l < osm->n) {
211: PetscInt d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */
212: if (numbering[d] == count) {
213: PetscCallMPI(MPI_Comm_size(((PetscObject)osm->ois[d])->comm, &ssize));
214: PetscCallMPI(MPI_Comm_rank(((PetscObject)osm->ois[d])->comm, &srank));
215: PetscCall(PetscViewerGetSubViewer(viewer, ((PetscObject)osm->ois[d])->comm, &sviewer));
216: PetscCall(ISGetLocalSize(osm->ois[d], &bsz));
217: PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer, " [%d|%d] (subcomm [%d|%d]) local subdomain number %" PetscInt_FMT ", local size = %" PetscInt_FMT "\n", rank, size, srank, ssize, d, bsz));
218: PetscCall(PetscViewerFlush(sviewer));
219: if (view_subdomains) PetscCall(PCGASMSubdomainView_Private(pc, d, sviewer));
220: if (!pc->setupcalled) {
221: PetscCall(PetscViewerASCIIPrintf(sviewer, " Solver not set up yet: PCSetUp() not yet called\n"));
222: } else {
223: PetscCall(KSPView(osm->ksp[d], sviewer));
224: }
225: PetscCall(PetscViewerASCIIPrintf(sviewer, " - - - - - - - - - - - - - - - - - -\n"));
226: PetscCall(PetscViewerFlush(sviewer));
227: PetscCall(PetscViewerRestoreSubViewer(viewer, ((PetscObject)osm->ois[d])->comm, &sviewer));
228: ++l;
229: }
230: }
231: PetscCallMPI(MPI_Barrier(PetscObjectComm((PetscObject)pc)));
232: }
233: PetscCall(PetscFree2(numbering, permutation));
234: PetscCall(PetscViewerASCIIPopTab(viewer));
235: PetscCall(PetscViewerFlush(viewer));
236: /* this line is needed to match the extra PetscViewerASCIIPushSynchronized() in PetscViewerGetSubViewer() */
237: PetscCall(PetscViewerASCIIPopSynchronized(viewer));
238: }
239: PetscFunctionReturn(PETSC_SUCCESS);
240: }
242: PETSC_INTERN PetscErrorCode PCGASMCreateLocalSubdomains(Mat A, PetscInt nloc, IS *iis[]);
244: static PetscErrorCode PCGASMSetHierarchicalPartitioning(PC pc)
245: {
246: PC_GASM *osm = (PC_GASM *)pc->data;
247: MatPartitioning part;
248: MPI_Comm comm;
249: PetscMPIInt size;
250: PetscInt nlocalsubdomains, fromrows_localsize;
251: IS partitioning, fromrows, isn;
252: Vec outervec;
254: PetscFunctionBegin;
255: PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
256: PetscCallMPI(MPI_Comm_size(comm, &size));
257: /* we do not need a hierarchical partitioning when
258: * the total number of subdomains is consistent with
259: * the number of MPI tasks.
260: * For the following cases, we do not need to use HP
261: * */
262: if (osm->N == PETSC_DETERMINE || osm->N >= size || osm->N == 1) PetscFunctionReturn(PETSC_SUCCESS);
263: PetscCheck(size % osm->N == 0, PETSC_COMM_WORLD, PETSC_ERR_ARG_INCOMP, "have to specify the total number of subdomains %" PetscInt_FMT " to be a factor of the number of ranks %d ", osm->N, size);
264: nlocalsubdomains = size / osm->N;
265: osm->n = 1;
266: PetscCall(MatPartitioningCreate(comm, &part));
267: PetscCall(MatPartitioningSetAdjacency(part, pc->pmat));
268: PetscCall(MatPartitioningSetType(part, MATPARTITIONINGHIERARCH));
269: PetscCall(MatPartitioningHierarchicalSetNcoarseparts(part, osm->N));
270: PetscCall(MatPartitioningHierarchicalSetNfineparts(part, nlocalsubdomains));
271: PetscCall(MatPartitioningSetFromOptions(part));
272: /* get new rank owner number of each vertex */
273: PetscCall(MatPartitioningApply(part, &partitioning));
274: PetscCall(ISBuildTwoSided(partitioning, NULL, &fromrows));
275: PetscCall(ISPartitioningToNumbering(partitioning, &isn));
276: PetscCall(ISDestroy(&isn));
277: PetscCall(ISGetLocalSize(fromrows, &fromrows_localsize));
278: PetscCall(MatPartitioningDestroy(&part));
279: PetscCall(MatCreateVecs(pc->pmat, &outervec, NULL));
280: PetscCall(VecCreateMPI(comm, fromrows_localsize, PETSC_DETERMINE, &(osm->pcx)));
281: PetscCall(VecDuplicate(osm->pcx, &(osm->pcy)));
282: PetscCall(VecScatterCreate(osm->pcx, NULL, outervec, fromrows, &(osm->pctoouter)));
283: PetscCall(MatCreateSubMatrix(pc->pmat, fromrows, fromrows, MAT_INITIAL_MATRIX, &(osm->permutationP)));
284: PetscCall(PetscObjectReference((PetscObject)fromrows));
285: osm->permutationIS = fromrows;
286: osm->pcmat = pc->pmat;
287: PetscCall(PetscObjectReference((PetscObject)osm->permutationP));
288: pc->pmat = osm->permutationP;
289: PetscCall(VecDestroy(&outervec));
290: PetscCall(ISDestroy(&fromrows));
291: PetscCall(ISDestroy(&partitioning));
292: osm->n = PETSC_DETERMINE;
293: PetscFunctionReturn(PETSC_SUCCESS);
294: }
296: static PetscErrorCode PCSetUp_GASM(PC pc)
297: {
298: PC_GASM *osm = (PC_GASM *)pc->data;
299: PetscInt i, nInnerIndices, nTotalInnerIndices;
300: PetscMPIInt rank, size;
301: MatReuse scall = MAT_REUSE_MATRIX;
302: KSP ksp;
303: PC subpc;
304: const char *prefix, *pprefix;
305: Vec x, y;
306: PetscInt oni; /* Number of indices in the i-th local outer subdomain. */
307: const PetscInt *oidxi; /* Indices from the i-th subdomain local outer subdomain. */
308: PetscInt on; /* Number of indices in the disjoint union of local outer subdomains. */
309: PetscInt *oidx; /* Indices in the disjoint union of local outer subdomains. */
310: IS gois; /* Disjoint union the global indices of outer subdomains. */
311: IS goid; /* Identity IS of the size of the disjoint union of outer subdomains. */
312: PetscScalar *gxarray, *gyarray;
313: PetscInt gostart; /* Start of locally-owned indices in the vectors -- osm->gx,osm->gy -- over the disjoint union of outer subdomains. */
314: PetscInt num_subdomains = 0;
315: DM *subdomain_dm = NULL;
316: char **subdomain_names = NULL;
317: PetscInt *numbering;
319: PetscFunctionBegin;
320: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
321: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
322: if (!pc->setupcalled) {
323: /* use a hierarchical partitioning */
324: if (osm->hierarchicalpartitioning) PetscCall(PCGASMSetHierarchicalPartitioning(pc));
325: if (osm->n == PETSC_DETERMINE) {
326: if (osm->N != PETSC_DETERMINE) {
327: /* No local subdomains given, but the desired number of total subdomains is known, so construct them accordingly. */
328: PetscCall(PCGASMCreateSubdomains(pc->pmat, osm->N, &osm->n, &osm->iis));
329: } else if (osm->dm_subdomains && pc->dm) {
330: /* try pc->dm next, if allowed */
331: PetscInt d;
332: IS *inner_subdomain_is, *outer_subdomain_is;
333: PetscCall(DMCreateDomainDecomposition(pc->dm, &num_subdomains, &subdomain_names, &inner_subdomain_is, &outer_subdomain_is, &subdomain_dm));
334: if (num_subdomains) PetscCall(PCGASMSetSubdomains(pc, num_subdomains, inner_subdomain_is, outer_subdomain_is));
335: for (d = 0; d < num_subdomains; ++d) {
336: if (inner_subdomain_is) PetscCall(ISDestroy(&inner_subdomain_is[d]));
337: if (outer_subdomain_is) PetscCall(ISDestroy(&outer_subdomain_is[d]));
338: }
339: PetscCall(PetscFree(inner_subdomain_is));
340: PetscCall(PetscFree(outer_subdomain_is));
341: } else {
342: /* still no subdomains; use one per rank */
343: osm->nmax = osm->n = 1;
344: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
345: osm->N = size;
346: PetscCall(PCGASMCreateLocalSubdomains(pc->pmat, osm->n, &osm->iis));
347: }
348: }
349: if (!osm->iis) {
350: /*
351: osm->n was set in PCGASMSetSubdomains(), but the actual subdomains have not been supplied.
352: We create the requisite number of local inner subdomains and then expand them into
353: out subdomains, if necessary.
354: */
355: PetscCall(PCGASMCreateLocalSubdomains(pc->pmat, osm->n, &osm->iis));
356: }
357: if (!osm->ois) {
358: /*
359: Initially make outer subdomains the same as inner subdomains. If nonzero additional overlap
360: has been requested, copy the inner subdomains over so they can be modified.
361: */
362: PetscCall(PetscMalloc1(osm->n, &osm->ois));
363: for (i = 0; i < osm->n; ++i) {
364: if (osm->overlap > 0 && osm->N > 1) { /* With positive overlap, osm->iis[i] will be modified */
365: PetscCall(ISDuplicate(osm->iis[i], (osm->ois) + i));
366: PetscCall(ISCopy(osm->iis[i], osm->ois[i]));
367: } else {
368: PetscCall(PetscObjectReference((PetscObject)((osm->iis)[i])));
369: osm->ois[i] = osm->iis[i];
370: }
371: }
372: if (osm->overlap > 0 && osm->N > 1) {
373: /* Extend the "overlapping" regions by a number of steps */
374: PetscCall(MatIncreaseOverlapSplit(pc->pmat, osm->n, osm->ois, osm->overlap));
375: }
376: }
378: /* Now the subdomains are defined. Determine their global and max local numbers, if necessary. */
379: if (osm->nmax == PETSC_DETERMINE) {
380: PetscMPIInt inwork, outwork;
381: /* determine global number of subdomains and the max number of local subdomains */
382: inwork = osm->n;
383: PetscCall(MPIU_Allreduce(&inwork, &outwork, 1, MPI_INT, MPI_MAX, PetscObjectComm((PetscObject)pc)));
384: osm->nmax = outwork;
385: }
386: if (osm->N == PETSC_DETERMINE) {
387: /* Determine the number of globally-distinct subdomains and compute a global numbering for them. */
388: PetscCall(PetscObjectsListGetGlobalNumbering(PetscObjectComm((PetscObject)pc), osm->n, (PetscObject *)osm->ois, &osm->N, NULL));
389: }
391: if (osm->sort_indices) {
392: for (i = 0; i < osm->n; i++) {
393: PetscCall(ISSort(osm->ois[i]));
394: PetscCall(ISSort(osm->iis[i]));
395: }
396: }
397: PetscCall(PCGetOptionsPrefix(pc, &prefix));
398: PetscCall(PCGASMPrintSubdomains(pc));
400: /*
401: Merge the ISs, create merged vectors and restrictions.
402: */
403: /* Merge outer subdomain ISs and construct a restriction onto the disjoint union of local outer subdomains. */
404: on = 0;
405: for (i = 0; i < osm->n; i++) {
406: PetscCall(ISGetLocalSize(osm->ois[i], &oni));
407: on += oni;
408: }
409: PetscCall(PetscMalloc1(on, &oidx));
410: on = 0;
411: /* Merge local indices together */
412: for (i = 0; i < osm->n; i++) {
413: PetscCall(ISGetLocalSize(osm->ois[i], &oni));
414: PetscCall(ISGetIndices(osm->ois[i], &oidxi));
415: PetscCall(PetscArraycpy(oidx + on, oidxi, oni));
416: PetscCall(ISRestoreIndices(osm->ois[i], &oidxi));
417: on += oni;
418: }
419: PetscCall(ISCreateGeneral(((PetscObject)(pc))->comm, on, oidx, PETSC_OWN_POINTER, &gois));
420: nTotalInnerIndices = 0;
421: for (i = 0; i < osm->n; i++) {
422: PetscCall(ISGetLocalSize(osm->iis[i], &nInnerIndices));
423: nTotalInnerIndices += nInnerIndices;
424: }
425: PetscCall(VecCreateMPI(((PetscObject)(pc))->comm, nTotalInnerIndices, PETSC_DETERMINE, &x));
426: PetscCall(VecDuplicate(x, &y));
428: PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)pc), on, PETSC_DECIDE, &osm->gx));
429: PetscCall(VecDuplicate(osm->gx, &osm->gy));
430: PetscCall(VecGetOwnershipRange(osm->gx, &gostart, NULL));
431: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), on, gostart, 1, &goid));
432: /* gois might indices not on local */
433: PetscCall(VecScatterCreate(x, gois, osm->gx, goid, &(osm->gorestriction)));
434: PetscCall(PetscMalloc1(osm->n, &numbering));
435: PetscCall(PetscObjectsListGetGlobalNumbering(PetscObjectComm((PetscObject)pc), osm->n, (PetscObject *)osm->ois, NULL, numbering));
436: PetscCall(VecDestroy(&x));
437: PetscCall(ISDestroy(&gois));
439: /* Merge inner subdomain ISs and construct a restriction onto the disjoint union of local inner subdomains. */
440: {
441: PetscInt ini; /* Number of indices the i-th a local inner subdomain. */
442: PetscInt in; /* Number of indices in the disjoint union of local inner subdomains. */
443: PetscInt *iidx; /* Global indices in the merged local inner subdomain. */
444: PetscInt *ioidx; /* Global indices of the disjoint union of inner subdomains within the disjoint union of outer subdomains. */
445: IS giis; /* IS for the disjoint union of inner subdomains. */
446: IS giois; /* IS for the disjoint union of inner subdomains within the disjoint union of outer subdomains. */
447: PetscScalar *array;
448: const PetscInt *indices;
449: PetscInt k;
450: on = 0;
451: for (i = 0; i < osm->n; i++) {
452: PetscCall(ISGetLocalSize(osm->ois[i], &oni));
453: on += oni;
454: }
455: PetscCall(PetscMalloc1(on, &iidx));
456: PetscCall(PetscMalloc1(on, &ioidx));
457: PetscCall(VecGetArray(y, &array));
458: /* set communicator id to determine where overlap is */
459: in = 0;
460: for (i = 0; i < osm->n; i++) {
461: PetscCall(ISGetLocalSize(osm->iis[i], &ini));
462: for (k = 0; k < ini; ++k) array[in + k] = numbering[i];
463: in += ini;
464: }
465: PetscCall(VecRestoreArray(y, &array));
466: PetscCall(VecScatterBegin(osm->gorestriction, y, osm->gy, INSERT_VALUES, SCATTER_FORWARD));
467: PetscCall(VecScatterEnd(osm->gorestriction, y, osm->gy, INSERT_VALUES, SCATTER_FORWARD));
468: PetscCall(VecGetOwnershipRange(osm->gy, &gostart, NULL));
469: PetscCall(VecGetArray(osm->gy, &array));
470: on = 0;
471: in = 0;
472: for (i = 0; i < osm->n; i++) {
473: PetscCall(ISGetLocalSize(osm->ois[i], &oni));
474: PetscCall(ISGetIndices(osm->ois[i], &indices));
475: for (k = 0; k < oni; k++) {
476: /* skip overlapping indices to get inner domain */
477: if (PetscRealPart(array[on + k]) != numbering[i]) continue;
478: iidx[in] = indices[k];
479: ioidx[in++] = gostart + on + k;
480: }
481: PetscCall(ISRestoreIndices(osm->ois[i], &indices));
482: on += oni;
483: }
484: PetscCall(VecRestoreArray(osm->gy, &array));
485: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), in, iidx, PETSC_OWN_POINTER, &giis));
486: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), in, ioidx, PETSC_OWN_POINTER, &giois));
487: PetscCall(VecScatterCreate(y, giis, osm->gy, giois, &osm->girestriction));
488: PetscCall(VecDestroy(&y));
489: PetscCall(ISDestroy(&giis));
490: PetscCall(ISDestroy(&giois));
491: }
492: PetscCall(ISDestroy(&goid));
493: PetscCall(PetscFree(numbering));
495: /* Create the subdomain work vectors. */
496: PetscCall(PetscMalloc1(osm->n, &osm->x));
497: PetscCall(PetscMalloc1(osm->n, &osm->y));
498: PetscCall(VecGetArray(osm->gx, &gxarray));
499: PetscCall(VecGetArray(osm->gy, &gyarray));
500: for (i = 0, on = 0; i < osm->n; ++i, on += oni) {
501: PetscInt oNi;
502: PetscCall(ISGetLocalSize(osm->ois[i], &oni));
503: /* on a sub communicator */
504: PetscCall(ISGetSize(osm->ois[i], &oNi));
505: PetscCall(VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm, 1, oni, oNi, gxarray + on, &osm->x[i]));
506: PetscCall(VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm, 1, oni, oNi, gyarray + on, &osm->y[i]));
507: }
508: PetscCall(VecRestoreArray(osm->gx, &gxarray));
509: PetscCall(VecRestoreArray(osm->gy, &gyarray));
510: /* Create the subdomain solvers */
511: PetscCall(PetscMalloc1(osm->n, &osm->ksp));
512: for (i = 0; i < osm->n; i++) {
513: char subprefix[PETSC_MAX_PATH_LEN + 1];
514: PetscCall(KSPCreate(((PetscObject)(osm->ois[i]))->comm, &ksp));
515: PetscCall(KSPSetNestLevel(ksp, pc->kspnestlevel));
516: PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure));
517: PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1));
518: PetscCall(KSPSetType(ksp, KSPPREONLY));
519: PetscCall(KSPGetPC(ksp, &subpc)); /* Why do we need this here? */
520: if (subdomain_dm) {
521: PetscCall(KSPSetDM(ksp, subdomain_dm[i]));
522: PetscCall(DMDestroy(subdomain_dm + i));
523: }
524: PetscCall(PCGetOptionsPrefix(pc, &prefix));
525: PetscCall(KSPSetOptionsPrefix(ksp, prefix));
526: if (subdomain_names && subdomain_names[i]) {
527: PetscCall(PetscSNPrintf(subprefix, PETSC_MAX_PATH_LEN, "sub_%s_", subdomain_names[i]));
528: PetscCall(KSPAppendOptionsPrefix(ksp, subprefix));
529: PetscCall(PetscFree(subdomain_names[i]));
530: }
531: PetscCall(KSPAppendOptionsPrefix(ksp, "sub_"));
532: osm->ksp[i] = ksp;
533: }
534: PetscCall(PetscFree(subdomain_dm));
535: PetscCall(PetscFree(subdomain_names));
536: scall = MAT_INITIAL_MATRIX;
537: } else { /* if (pc->setupcalled) */
538: /*
539: Destroy the submatrices from the previous iteration
540: */
541: if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
542: PetscCall(MatDestroyMatrices(osm->n, &osm->pmat));
543: scall = MAT_INITIAL_MATRIX;
544: }
545: if (osm->permutationIS) {
546: PetscCall(MatCreateSubMatrix(pc->pmat, osm->permutationIS, osm->permutationIS, scall, &osm->permutationP));
547: PetscCall(PetscObjectReference((PetscObject)osm->permutationP));
548: osm->pcmat = pc->pmat;
549: pc->pmat = osm->permutationP;
550: }
551: }
553: /*
554: Extract the submatrices.
555: */
556: if (size > 1) {
557: PetscCall(MatCreateSubMatricesMPI(pc->pmat, osm->n, osm->ois, osm->ois, scall, &osm->pmat));
558: } else {
559: PetscCall(MatCreateSubMatrices(pc->pmat, osm->n, osm->ois, osm->ois, scall, &osm->pmat));
560: }
561: if (scall == MAT_INITIAL_MATRIX) {
562: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc->pmat, &pprefix));
563: for (i = 0; i < osm->n; i++) PetscCall(PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i], pprefix));
564: }
566: /* Return control to the user so that the submatrices can be modified (e.g., to apply
567: different boundary conditions for the submatrices than for the global problem) */
568: PetscCall(PCModifySubMatrices(pc, osm->n, osm->ois, osm->ois, osm->pmat, pc->modifysubmatricesP));
570: /*
571: Loop over submatrices putting them into local ksps
572: */
573: for (i = 0; i < osm->n; i++) {
574: PetscCall(KSPSetOperators(osm->ksp[i], osm->pmat[i], osm->pmat[i]));
575: PetscCall(KSPGetOptionsPrefix(osm->ksp[i], &prefix));
576: PetscCall(MatSetOptionsPrefix(osm->pmat[i], prefix));
577: if (!pc->setupcalled) PetscCall(KSPSetFromOptions(osm->ksp[i]));
578: }
579: if (osm->pcmat) {
580: PetscCall(MatDestroy(&pc->pmat));
581: pc->pmat = osm->pcmat;
582: osm->pcmat = NULL;
583: }
584: PetscFunctionReturn(PETSC_SUCCESS);
585: }
587: static PetscErrorCode PCSetUpOnBlocks_GASM(PC pc)
588: {
589: PC_GASM *osm = (PC_GASM *)pc->data;
590: PetscInt i;
592: PetscFunctionBegin;
593: for (i = 0; i < osm->n; i++) PetscCall(KSPSetUp(osm->ksp[i]));
594: PetscFunctionReturn(PETSC_SUCCESS);
595: }
597: static PetscErrorCode PCApply_GASM(PC pc, Vec xin, Vec yout)
598: {
599: PC_GASM *osm = (PC_GASM *)pc->data;
600: PetscInt i;
601: Vec x, y;
602: ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
604: PetscFunctionBegin;
605: if (osm->pctoouter) {
606: PetscCall(VecScatterBegin(osm->pctoouter, xin, osm->pcx, INSERT_VALUES, SCATTER_REVERSE));
607: PetscCall(VecScatterEnd(osm->pctoouter, xin, osm->pcx, INSERT_VALUES, SCATTER_REVERSE));
608: x = osm->pcx;
609: y = osm->pcy;
610: } else {
611: x = xin;
612: y = yout;
613: }
614: /*
615: support for limiting the restriction or interpolation only to the inner
616: subdomain values (leaving the other values 0).
617: */
618: if (!(osm->type & PC_GASM_RESTRICT)) {
619: /* have to zero the work RHS since scatter may leave some slots empty */
620: PetscCall(VecZeroEntries(osm->gx));
621: PetscCall(VecScatterBegin(osm->girestriction, x, osm->gx, INSERT_VALUES, forward));
622: } else {
623: PetscCall(VecScatterBegin(osm->gorestriction, x, osm->gx, INSERT_VALUES, forward));
624: }
625: PetscCall(VecZeroEntries(osm->gy));
626: if (!(osm->type & PC_GASM_RESTRICT)) {
627: PetscCall(VecScatterEnd(osm->girestriction, x, osm->gx, INSERT_VALUES, forward));
628: } else {
629: PetscCall(VecScatterEnd(osm->gorestriction, x, osm->gx, INSERT_VALUES, forward));
630: }
631: /* do the subdomain solves */
632: for (i = 0; i < osm->n; ++i) {
633: PetscCall(KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]));
634: PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
635: }
636: /* do we need to zero y? */
637: PetscCall(VecZeroEntries(y));
638: if (!(osm->type & PC_GASM_INTERPOLATE)) {
639: PetscCall(VecScatterBegin(osm->girestriction, osm->gy, y, ADD_VALUES, reverse));
640: PetscCall(VecScatterEnd(osm->girestriction, osm->gy, y, ADD_VALUES, reverse));
641: } else {
642: PetscCall(VecScatterBegin(osm->gorestriction, osm->gy, y, ADD_VALUES, reverse));
643: PetscCall(VecScatterEnd(osm->gorestriction, osm->gy, y, ADD_VALUES, reverse));
644: }
645: if (osm->pctoouter) {
646: PetscCall(VecScatterBegin(osm->pctoouter, y, yout, INSERT_VALUES, SCATTER_FORWARD));
647: PetscCall(VecScatterEnd(osm->pctoouter, y, yout, INSERT_VALUES, SCATTER_FORWARD));
648: }
649: PetscFunctionReturn(PETSC_SUCCESS);
650: }
652: static PetscErrorCode PCMatApply_GASM(PC pc, Mat Xin, Mat Yout)
653: {
654: PC_GASM *osm = (PC_GASM *)pc->data;
655: Mat X, Y, O = NULL, Z, W;
656: Vec x, y;
657: PetscInt i, m, M, N;
658: ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
660: PetscFunctionBegin;
661: PetscCheck(osm->n == 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not yet implemented");
662: PetscCall(MatGetSize(Xin, NULL, &N));
663: if (osm->pctoouter) {
664: PetscCall(VecGetLocalSize(osm->pcx, &m));
665: PetscCall(VecGetSize(osm->pcx, &M));
666: PetscCall(MatCreateDense(PetscObjectComm((PetscObject)osm->ois[0]), m, PETSC_DECIDE, M, N, NULL, &O));
667: for (i = 0; i < N; ++i) {
668: PetscCall(MatDenseGetColumnVecRead(Xin, i, &x));
669: PetscCall(MatDenseGetColumnVecWrite(O, i, &y));
670: PetscCall(VecScatterBegin(osm->pctoouter, x, y, INSERT_VALUES, SCATTER_REVERSE));
671: PetscCall(VecScatterEnd(osm->pctoouter, x, y, INSERT_VALUES, SCATTER_REVERSE));
672: PetscCall(MatDenseRestoreColumnVecWrite(O, i, &y));
673: PetscCall(MatDenseRestoreColumnVecRead(Xin, i, &x));
674: }
675: X = Y = O;
676: } else {
677: X = Xin;
678: Y = Yout;
679: }
680: /*
681: support for limiting the restriction or interpolation only to the inner
682: subdomain values (leaving the other values 0).
683: */
684: PetscCall(VecGetLocalSize(osm->x[0], &m));
685: PetscCall(VecGetSize(osm->x[0], &M));
686: PetscCall(MatCreateDense(PetscObjectComm((PetscObject)osm->ois[0]), m, PETSC_DECIDE, M, N, NULL, &Z));
687: for (i = 0; i < N; ++i) {
688: PetscCall(MatDenseGetColumnVecRead(X, i, &x));
689: PetscCall(MatDenseGetColumnVecWrite(Z, i, &y));
690: if (!(osm->type & PC_GASM_RESTRICT)) {
691: /* have to zero the work RHS since scatter may leave some slots empty */
692: PetscCall(VecZeroEntries(y));
693: PetscCall(VecScatterBegin(osm->girestriction, x, y, INSERT_VALUES, forward));
694: PetscCall(VecScatterEnd(osm->girestriction, x, y, INSERT_VALUES, forward));
695: } else {
696: PetscCall(VecScatterBegin(osm->gorestriction, x, y, INSERT_VALUES, forward));
697: PetscCall(VecScatterEnd(osm->gorestriction, x, y, INSERT_VALUES, forward));
698: }
699: PetscCall(MatDenseRestoreColumnVecWrite(Z, i, &y));
700: PetscCall(MatDenseRestoreColumnVecRead(X, i, &x));
701: }
702: PetscCall(MatCreateDense(PetscObjectComm((PetscObject)osm->ois[0]), m, PETSC_DECIDE, M, N, NULL, &W));
703: PetscCall(MatSetOption(Z, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
704: PetscCall(MatAssemblyBegin(Z, MAT_FINAL_ASSEMBLY));
705: PetscCall(MatAssemblyEnd(Z, MAT_FINAL_ASSEMBLY));
706: /* do the subdomain solve */
707: PetscCall(KSPMatSolve(osm->ksp[0], Z, W));
708: PetscCall(KSPCheckSolve(osm->ksp[0], pc, NULL));
709: PetscCall(MatDestroy(&Z));
710: /* do we need to zero y? */
711: PetscCall(MatZeroEntries(Y));
712: for (i = 0; i < N; ++i) {
713: PetscCall(MatDenseGetColumnVecWrite(Y, i, &y));
714: PetscCall(MatDenseGetColumnVecRead(W, i, &x));
715: if (!(osm->type & PC_GASM_INTERPOLATE)) {
716: PetscCall(VecScatterBegin(osm->girestriction, x, y, ADD_VALUES, reverse));
717: PetscCall(VecScatterEnd(osm->girestriction, x, y, ADD_VALUES, reverse));
718: } else {
719: PetscCall(VecScatterBegin(osm->gorestriction, x, y, ADD_VALUES, reverse));
720: PetscCall(VecScatterEnd(osm->gorestriction, x, y, ADD_VALUES, reverse));
721: }
722: PetscCall(MatDenseRestoreColumnVecRead(W, i, &x));
723: if (osm->pctoouter) {
724: PetscCall(MatDenseGetColumnVecWrite(Yout, i, &x));
725: PetscCall(VecScatterBegin(osm->pctoouter, y, x, INSERT_VALUES, SCATTER_FORWARD));
726: PetscCall(VecScatterEnd(osm->pctoouter, y, x, INSERT_VALUES, SCATTER_FORWARD));
727: PetscCall(MatDenseRestoreColumnVecRead(Yout, i, &x));
728: }
729: PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &y));
730: }
731: PetscCall(MatDestroy(&W));
732: PetscCall(MatDestroy(&O));
733: PetscFunctionReturn(PETSC_SUCCESS);
734: }
736: static PetscErrorCode PCApplyTranspose_GASM(PC pc, Vec xin, Vec yout)
737: {
738: PC_GASM *osm = (PC_GASM *)pc->data;
739: PetscInt i;
740: Vec x, y;
741: ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;
743: PetscFunctionBegin;
744: if (osm->pctoouter) {
745: PetscCall(VecScatterBegin(osm->pctoouter, xin, osm->pcx, INSERT_VALUES, SCATTER_REVERSE));
746: PetscCall(VecScatterEnd(osm->pctoouter, xin, osm->pcx, INSERT_VALUES, SCATTER_REVERSE));
747: x = osm->pcx;
748: y = osm->pcy;
749: } else {
750: x = xin;
751: y = yout;
752: }
753: /*
754: Support for limiting the restriction or interpolation to only local
755: subdomain values (leaving the other values 0).
757: Note: these are reversed from the PCApply_GASM() because we are applying the
758: transpose of the three terms
759: */
760: if (!(osm->type & PC_GASM_INTERPOLATE)) {
761: /* have to zero the work RHS since scatter may leave some slots empty */
762: PetscCall(VecZeroEntries(osm->gx));
763: PetscCall(VecScatterBegin(osm->girestriction, x, osm->gx, INSERT_VALUES, forward));
764: } else {
765: PetscCall(VecScatterBegin(osm->gorestriction, x, osm->gx, INSERT_VALUES, forward));
766: }
767: PetscCall(VecZeroEntries(osm->gy));
768: if (!(osm->type & PC_GASM_INTERPOLATE)) {
769: PetscCall(VecScatterEnd(osm->girestriction, x, osm->gx, INSERT_VALUES, forward));
770: } else {
771: PetscCall(VecScatterEnd(osm->gorestriction, x, osm->gx, INSERT_VALUES, forward));
772: }
773: /* do the local solves */
774: for (i = 0; i < osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */
775: PetscCall(KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]));
776: PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
777: }
778: PetscCall(VecZeroEntries(y));
779: if (!(osm->type & PC_GASM_RESTRICT)) {
780: PetscCall(VecScatterBegin(osm->girestriction, osm->gy, y, ADD_VALUES, reverse));
781: PetscCall(VecScatterEnd(osm->girestriction, osm->gy, y, ADD_VALUES, reverse));
782: } else {
783: PetscCall(VecScatterBegin(osm->gorestriction, osm->gy, y, ADD_VALUES, reverse));
784: PetscCall(VecScatterEnd(osm->gorestriction, osm->gy, y, ADD_VALUES, reverse));
785: }
786: if (osm->pctoouter) {
787: PetscCall(VecScatterBegin(osm->pctoouter, y, yout, INSERT_VALUES, SCATTER_FORWARD));
788: PetscCall(VecScatterEnd(osm->pctoouter, y, yout, INSERT_VALUES, SCATTER_FORWARD));
789: }
790: PetscFunctionReturn(PETSC_SUCCESS);
791: }
793: static PetscErrorCode PCReset_GASM(PC pc)
794: {
795: PC_GASM *osm = (PC_GASM *)pc->data;
796: PetscInt i;
798: PetscFunctionBegin;
799: if (osm->ksp) {
800: for (i = 0; i < osm->n; i++) PetscCall(KSPReset(osm->ksp[i]));
801: }
802: if (osm->pmat) {
803: if (osm->n > 0) {
804: PetscMPIInt size;
805: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
806: if (size > 1) {
807: /* osm->pmat is created by MatCreateSubMatricesMPI(), cannot use MatDestroySubMatrices() */
808: PetscCall(MatDestroyMatrices(osm->n, &osm->pmat));
809: } else {
810: PetscCall(MatDestroySubMatrices(osm->n, &osm->pmat));
811: }
812: }
813: }
814: if (osm->x) {
815: for (i = 0; i < osm->n; i++) {
816: PetscCall(VecDestroy(&osm->x[i]));
817: PetscCall(VecDestroy(&osm->y[i]));
818: }
819: }
820: PetscCall(VecDestroy(&osm->gx));
821: PetscCall(VecDestroy(&osm->gy));
823: PetscCall(VecScatterDestroy(&osm->gorestriction));
824: PetscCall(VecScatterDestroy(&osm->girestriction));
825: if (!osm->user_subdomains) {
826: PetscCall(PCGASMDestroySubdomains(osm->n, &osm->ois, &osm->iis));
827: osm->N = PETSC_DETERMINE;
828: osm->nmax = PETSC_DETERMINE;
829: }
830: if (osm->pctoouter) PetscCall(VecScatterDestroy(&(osm->pctoouter)));
831: if (osm->permutationIS) PetscCall(ISDestroy(&(osm->permutationIS)));
832: if (osm->pcx) PetscCall(VecDestroy(&(osm->pcx)));
833: if (osm->pcy) PetscCall(VecDestroy(&(osm->pcy)));
834: if (osm->permutationP) PetscCall(MatDestroy(&(osm->permutationP)));
835: if (osm->pcmat) PetscCall(MatDestroy(&osm->pcmat));
836: PetscFunctionReturn(PETSC_SUCCESS);
837: }
839: static PetscErrorCode PCDestroy_GASM(PC pc)
840: {
841: PC_GASM *osm = (PC_GASM *)pc->data;
842: PetscInt i;
844: PetscFunctionBegin;
845: PetscCall(PCReset_GASM(pc));
846: /* PCReset will not destroy subdomains, if user_subdomains is true. */
847: PetscCall(PCGASMDestroySubdomains(osm->n, &osm->ois, &osm->iis));
848: if (osm->ksp) {
849: for (i = 0; i < osm->n; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
850: PetscCall(PetscFree(osm->ksp));
851: }
852: PetscCall(PetscFree(osm->x));
853: PetscCall(PetscFree(osm->y));
854: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGASMSetSubdomains_C", NULL));
855: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGASMSetOverlap_C", NULL));
856: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGASMSetType_C", NULL));
857: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGASMSetSortIndices_C", NULL));
858: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGASMGetSubKSP_C", NULL));
859: PetscCall(PetscFree(pc->data));
860: PetscFunctionReturn(PETSC_SUCCESS);
861: }
863: static PetscErrorCode PCSetFromOptions_GASM(PC pc, PetscOptionItems *PetscOptionsObject)
864: {
865: PC_GASM *osm = (PC_GASM *)pc->data;
866: PetscInt blocks, ovl;
867: PetscBool flg;
868: PCGASMType gasmtype;
870: PetscFunctionBegin;
871: PetscOptionsHeadBegin(PetscOptionsObject, "Generalized additive Schwarz options");
872: PetscCall(PetscOptionsBool("-pc_gasm_use_dm_subdomains", "If subdomains aren't set, use DMCreateDomainDecomposition() to define subdomains.", "PCGASMSetUseDMSubdomains", osm->dm_subdomains, &osm->dm_subdomains, &flg));
873: PetscCall(PetscOptionsInt("-pc_gasm_total_subdomains", "Total number of subdomains across communicator", "PCGASMSetTotalSubdomains", osm->N, &blocks, &flg));
874: if (flg) PetscCall(PCGASMSetTotalSubdomains(pc, blocks));
875: PetscCall(PetscOptionsInt("-pc_gasm_overlap", "Number of overlapping degrees of freedom", "PCGASMSetOverlap", osm->overlap, &ovl, &flg));
876: if (flg) {
877: PetscCall(PCGASMSetOverlap(pc, ovl));
878: osm->dm_subdomains = PETSC_FALSE;
879: }
880: flg = PETSC_FALSE;
881: PetscCall(PetscOptionsEnum("-pc_gasm_type", "Type of restriction/extension", "PCGASMSetType", PCGASMTypes, (PetscEnum)osm->type, (PetscEnum *)&gasmtype, &flg));
882: if (flg) PetscCall(PCGASMSetType(pc, gasmtype));
883: PetscCall(PetscOptionsBool("-pc_gasm_use_hierachical_partitioning", "use hierarchical partitioning", NULL, osm->hierarchicalpartitioning, &osm->hierarchicalpartitioning, &flg));
884: PetscOptionsHeadEnd();
885: PetscFunctionReturn(PETSC_SUCCESS);
886: }
888: /*@
889: PCGASMSetTotalSubdomains - sets the total number of subdomains to use across the communicator for `PCGASM`
891: Logically Collective
893: Input Parameters:
894: + pc - the preconditioner
895: - N - total number of subdomains
897: Level: beginner
899: .seealso: [](ch_ksp), `PCGASM`, `PCGASMSetSubdomains()`, `PCGASMSetOverlap()`
900: `PCGASMCreateSubdomains2D()`
901: @*/
902: PetscErrorCode PCGASMSetTotalSubdomains(PC pc, PetscInt N)
903: {
904: PC_GASM *osm = (PC_GASM *)pc->data;
905: PetscMPIInt size, rank;
907: PetscFunctionBegin;
908: PetscCheck(N >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Total number of subdomains must be 1 or more, got N = %" PetscInt_FMT, N);
909: PetscCheck(!pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCGASMSetTotalSubdomains() should be called before calling PCSetUp().");
911: PetscCall(PCGASMDestroySubdomains(osm->n, &osm->iis, &osm->ois));
912: osm->ois = osm->iis = NULL;
914: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
915: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
916: osm->N = N;
917: osm->n = PETSC_DETERMINE;
918: osm->nmax = PETSC_DETERMINE;
919: osm->dm_subdomains = PETSC_FALSE;
920: PetscFunctionReturn(PETSC_SUCCESS);
921: }
923: static PetscErrorCode PCGASMSetSubdomains_GASM(PC pc, PetscInt n, IS iis[], IS ois[])
924: {
925: PC_GASM *osm = (PC_GASM *)pc->data;
926: PetscInt i;
928: PetscFunctionBegin;
929: PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Each MPI rank must have 1 or more subdomains, got n = %" PetscInt_FMT, n);
930: PetscCheck(!pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCGASMSetSubdomains() should be called before calling PCSetUp().");
932: PetscCall(PCGASMDestroySubdomains(osm->n, &osm->iis, &osm->ois));
933: osm->iis = osm->ois = NULL;
934: osm->n = n;
935: osm->N = PETSC_DETERMINE;
936: osm->nmax = PETSC_DETERMINE;
937: if (ois) {
938: PetscCall(PetscMalloc1(n, &osm->ois));
939: for (i = 0; i < n; i++) {
940: PetscCall(PetscObjectReference((PetscObject)ois[i]));
941: osm->ois[i] = ois[i];
942: }
943: /*
944: Since the user set the outer subdomains, even if nontrivial overlap was requested via PCGASMSetOverlap(),
945: it will be ignored. To avoid confusion later on (e.g., when viewing the PC), the overlap size is set to -1.
946: */
947: osm->overlap = -1;
948: /* inner subdomains must be provided */
949: PetscCheck(iis, PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "inner indices have to be provided ");
950: } /* end if */
951: if (iis) {
952: PetscCall(PetscMalloc1(n, &osm->iis));
953: for (i = 0; i < n; i++) {
954: PetscCall(PetscObjectReference((PetscObject)iis[i]));
955: osm->iis[i] = iis[i];
956: }
957: if (!ois) {
958: osm->ois = NULL;
959: /* if user does not provide outer indices, we will create the corresponding outer indices using osm->overlap =1 in PCSetUp_GASM */
960: }
961: }
962: if (PetscDefined(USE_DEBUG)) {
963: PetscInt j, rstart, rend, *covered, lsize;
964: const PetscInt *indices;
965: /* check if the inner indices cover and only cover the local portion of the preconditioning matrix */
966: PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend));
967: PetscCall(PetscCalloc1(rend - rstart, &covered));
968: /* check if the current MPI rank owns indices from others */
969: for (i = 0; i < n; i++) {
970: PetscCall(ISGetIndices(osm->iis[i], &indices));
971: PetscCall(ISGetLocalSize(osm->iis[i], &lsize));
972: for (j = 0; j < lsize; j++) {
973: PetscCheck(indices[j] >= rstart && indices[j] < rend, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "inner subdomains can not own an index %" PetscInt_FMT " from other ranks", indices[j]);
974: PetscCheck(covered[indices[j] - rstart] != 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "inner subdomains can not have an overlapping index %" PetscInt_FMT " ", indices[j]);
975: covered[indices[j] - rstart] = 1;
976: }
977: PetscCall(ISRestoreIndices(osm->iis[i], &indices));
978: }
979: /* check if we miss any indices */
980: for (i = rstart; i < rend; i++) PetscCheck(covered[i - rstart], PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "local entity %" PetscInt_FMT " was not covered by inner subdomains", i);
981: PetscCall(PetscFree(covered));
982: }
983: if (iis) osm->user_subdomains = PETSC_TRUE;
984: PetscFunctionReturn(PETSC_SUCCESS);
985: }
987: static PetscErrorCode PCGASMSetOverlap_GASM(PC pc, PetscInt ovl)
988: {
989: PC_GASM *osm = (PC_GASM *)pc->data;
991: PetscFunctionBegin;
992: PetscCheck(ovl >= 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Negative overlap value requested");
993: PetscCheck(!pc->setupcalled || ovl == osm->overlap, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCGASMSetOverlap() should be called before PCSetUp().");
994: if (!pc->setupcalled) osm->overlap = ovl;
995: PetscFunctionReturn(PETSC_SUCCESS);
996: }
998: static PetscErrorCode PCGASMSetType_GASM(PC pc, PCGASMType type)
999: {
1000: PC_GASM *osm = (PC_GASM *)pc->data;
1002: PetscFunctionBegin;
1003: osm->type = type;
1004: osm->type_set = PETSC_TRUE;
1005: PetscFunctionReturn(PETSC_SUCCESS);
1006: }
1008: static PetscErrorCode PCGASMSetSortIndices_GASM(PC pc, PetscBool doSort)
1009: {
1010: PC_GASM *osm = (PC_GASM *)pc->data;
1012: PetscFunctionBegin;
1013: osm->sort_indices = doSort;
1014: PetscFunctionReturn(PETSC_SUCCESS);
1015: }
1017: /*
1018: FIXME: This routine might need to be modified now that multiple ranks per subdomain are allowed.
1019: In particular, it would upset the global subdomain number calculation.
1020: */
1021: static PetscErrorCode PCGASMGetSubKSP_GASM(PC pc, PetscInt *n, PetscInt *first, KSP **ksp)
1022: {
1023: PC_GASM *osm = (PC_GASM *)pc->data;
1025: PetscFunctionBegin;
1026: PetscCheck(osm->n >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");
1028: if (n) *n = osm->n;
1029: if (first) {
1030: PetscCallMPI(MPI_Scan(&osm->n, first, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc)));
1031: *first -= osm->n;
1032: }
1033: if (ksp) {
1034: /* Assume that local solves are now different; not necessarily
1035: true, though! This flag is used only for PCView_GASM() */
1036: *ksp = osm->ksp;
1037: osm->same_subdomain_solvers = PETSC_FALSE;
1038: }
1039: PetscFunctionReturn(PETSC_SUCCESS);
1040: } /* PCGASMGetSubKSP_GASM() */
1042: /*@C
1043: PCGASMSetSubdomains - Sets the subdomains for this MPI rank
1044: for the additive Schwarz preconditioner with multiple MPI ranks per subdomain, `PCGASM`
1046: Collective
1048: Input Parameters:
1049: + pc - the preconditioner object
1050: . n - the number of subdomains for this MPI rank
1051: . iis - the index sets that define the inner subdomains (or `NULL` for PETSc to determine subdomains)
1052: - ois - the index sets that define the outer subdomains (or `NULL` to use the same as `iis`, or to construct by expanding `iis` by
1053: the requested overlap)
1055: Level: advanced
1057: Notes:
1058: The `IS` indices use the parallel, global numbering of the vector entries.
1060: Inner subdomains are those where the correction is applied.
1062: Outer subdomains are those where the residual necessary to obtain the
1063: corrections is obtained (see `PCGASMType` for the use of inner/outer subdomains).
1065: Both inner and outer subdomains can extend over several MPI ranks.
1066: This rank's portion of a subdomain is known as a local subdomain.
1068: Inner subdomains can not overlap with each other, do not have any entities from remote ranks,
1069: and have to cover the entire local subdomain owned by the current rank. The index sets on each
1070: rank should be ordered such that the ith local subdomain is connected to the ith remote subdomain
1071: on another MPI rank.
1073: By default the `PGASM` preconditioner uses 1 (local) subdomain per MPI rank.
1075: The `iis` and `ois` arrays may be freed after this call using `PCGASMDestroySubdomains()`
1077: .seealso: [](ch_ksp), `PCGASM`, `PCGASMSetOverlap()`, `PCGASMGetSubKSP()`, `PCGASMDestroySubdomains()`,
1078: `PCGASMCreateSubdomains2D()`, `PCGASMGetSubdomains()`
1079: @*/
1080: PetscErrorCode PCGASMSetSubdomains(PC pc, PetscInt n, IS iis[], IS ois[])
1081: {
1082: PC_GASM *osm = (PC_GASM *)pc->data;
1084: PetscFunctionBegin;
1086: PetscTryMethod(pc, "PCGASMSetSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, n, iis, ois));
1087: osm->dm_subdomains = PETSC_FALSE;
1088: PetscFunctionReturn(PETSC_SUCCESS);
1089: }
1091: /*@
1092: PCGASMSetOverlap - Sets the overlap between a pair of subdomains for the
1093: additive Schwarz preconditioner `PCGASM`. Either all or no MPI ranks in the
1094: pc communicator must call this routine.
1096: Logically Collective
1098: Input Parameters:
1099: + pc - the preconditioner context
1100: - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 0)
1102: Options Database Key:
1103: . -pc_gasm_overlap <overlap> - Sets overlap
1105: Level: intermediate
1107: Notes:
1108: By default the `PCGASM` preconditioner uses 1 subdomain per rank. To use
1109: multiple subdomain per perocessor or "straddling" subdomains that intersect
1110: multiple ranks use `PCGASMSetSubdomains()` (or option `-pc_gasm_total_subdomains` <n>).
1112: The overlap defaults to 0, so if one desires that no additional
1113: overlap be computed beyond what may have been set with a call to
1114: `PCGASMSetSubdomains()`, then `ovl` must be set to be 0. In particular, if one does
1115: not explicitly set the subdomains in application code, then all overlap would be computed
1116: internally by PETSc, and using an overlap of 0 would result in an `PCGASM`
1117: variant that is equivalent to the block Jacobi preconditioner.
1119: One can define initial index sets with any overlap via
1120: `PCGASMSetSubdomains()`; the routine `PCGASMSetOverlap()` merely allows
1121: PETSc to extend that overlap further, if desired.
1123: .seealso: [](ch_ksp), `PCGASM`, `PCGASMSetSubdomains()`, `PCGASMGetSubKSP()`,
1124: `PCGASMCreateSubdomains2D()`, `PCGASMGetSubdomains()`
1125: @*/
1126: PetscErrorCode PCGASMSetOverlap(PC pc, PetscInt ovl)
1127: {
1128: PC_GASM *osm = (PC_GASM *)pc->data;
1130: PetscFunctionBegin;
1133: PetscTryMethod(pc, "PCGASMSetOverlap_C", (PC, PetscInt), (pc, ovl));
1134: osm->dm_subdomains = PETSC_FALSE;
1135: PetscFunctionReturn(PETSC_SUCCESS);
1136: }
1138: /*@
1139: PCGASMSetType - Sets the type of restriction and interpolation used
1140: for local problems in the `PCGASM` additive Schwarz method.
1142: Logically Collective
1144: Input Parameters:
1145: + pc - the preconditioner context
1146: - type - variant of `PCGASM`, one of
1147: .vb
1148: `PC_GASM_BASIC` - full interpolation and restriction
1149: `PC_GASM_RESTRICT` - full restriction, local MPI rank interpolation
1150: `PC_GASM_INTERPOLATE` - full interpolation, local MPI rank restriction
1151: `PC_GASM_NONE` - local MPI rank restriction and interpolation
1152: .ve
1154: Options Database Key:
1155: . -pc_gasm_type [basic,restrict,interpolate,none] - Sets `PCGASM` type
1157: Level: intermediate
1159: .seealso: [](ch_ksp), `PCGASM`, `PCGASMSetSubdomains()`, `PCGASMGetSubKSP()`,
1160: `PCGASMCreateSubdomains2D()`, `PCASM`, `PCASMSetType()`
1161: @*/
1162: PetscErrorCode PCGASMSetType(PC pc, PCGASMType type)
1163: {
1164: PetscFunctionBegin;
1167: PetscTryMethod(pc, "PCGASMSetType_C", (PC, PCGASMType), (pc, type));
1168: PetscFunctionReturn(PETSC_SUCCESS);
1169: }
1171: /*@
1172: PCGASMSetSortIndices - Determines whether subdomain indices are sorted.
1174: Logically Collective
1176: Input Parameters:
1177: + pc - the preconditioner context
1178: - doSort - sort the subdomain indices
1180: Level: intermediate
1182: .seealso: [](ch_ksp), `PCGASM`, `PCGASMSetSubdomains()`, `PCGASMGetSubKSP()`,
1183: `PCGASMCreateSubdomains2D()`
1184: @*/
1185: PetscErrorCode PCGASMSetSortIndices(PC pc, PetscBool doSort)
1186: {
1187: PetscFunctionBegin;
1190: PetscTryMethod(pc, "PCGASMSetSortIndices_C", (PC, PetscBool), (pc, doSort));
1191: PetscFunctionReturn(PETSC_SUCCESS);
1192: }
1194: /*@C
1195: PCGASMGetSubKSP - Gets the local `KSP` contexts for all subdomains on
1196: this MPI rank.
1198: Collective iff first_local is requested
1200: Input Parameter:
1201: . pc - the preconditioner context
1203: Output Parameters:
1204: + n_local - the number of blocks on this MPI rank or `NULL`
1205: . first_local - the global number of the first block on this rank or `NULL`,
1206: all ranks must request or all must pass `NULL`
1207: - ksp - the array of `KSP` contexts
1209: Level: advanced
1211: Note:
1212: After `PCGASMGetSubKSP()` the array of `KSP`es is not to be freed
1214: Currently for some matrix implementations only 1 block per MPI process
1215: is supported.
1217: You must call `KSPSetUp()` before calling `PCGASMGetSubKSP()`.
1219: .seealso: [](ch_ksp), `PCGASM`, `PCGASMSetSubdomains()`, `PCGASMSetOverlap()`,
1220: `PCGASMCreateSubdomains2D()`,
1221: @*/
1222: PetscErrorCode PCGASMGetSubKSP(PC pc, PetscInt *n_local, PetscInt *first_local, KSP *ksp[])
1223: {
1224: PetscFunctionBegin;
1226: PetscUseMethod(pc, "PCGASMGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (pc, n_local, first_local, ksp));
1227: PetscFunctionReturn(PETSC_SUCCESS);
1228: }
1230: /*MC
1231: PCGASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1232: its own `KSP` object on a subset of MPI processes
1234: Options Database Keys:
1235: + -pc_gasm_total_subdomains <n> - Sets total number of local subdomains to be distributed among the MPI processes
1236: . -pc_gasm_view_subdomains - activates the printing of subdomain indices in `PCView()`, -ksp_view or -snes_view
1237: . -pc_gasm_print_subdomains - activates the printing of subdomain indices in `PCSetUp()`
1238: . -pc_gasm_overlap <ovl> - Sets overlap by which to (automatically) extend local subdomains
1239: - -pc_gasm_type [basic,restrict,interpolate,none] - Sets `PCGASMType`
1241: Level: beginner
1243: Notes:
1244: To set options on the solvers for each block append `-sub_` to all the `KSP`, and `PC`
1245: options database keys. For example, `-sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly`
1247: To set the options on the solvers separate for each block call `PCGASMGetSubKSP()`
1248: and set the options directly on the resulting `KSP` object (you can access its `PC`
1249: with `KSPGetPC()`)
1251: See {cite}`dryja1987additive` and {cite}`1sbg` for details on additive Schwarz algorithms
1253: .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCASM`, `PCGASMType`, `PCGASMSetType()`,
1254: `PCBJACOBI`, `PCGASMGetSubKSP()`, `PCGASMSetSubdomains()`,
1255: `PCSetModifySubMatrices()`, `PCGASMSetOverlap()`, `PCGASMSetType()`
1256: M*/
1258: PETSC_EXTERN PetscErrorCode PCCreate_GASM(PC pc)
1259: {
1260: PC_GASM *osm;
1262: PetscFunctionBegin;
1263: PetscCall(PetscNew(&osm));
1265: osm->N = PETSC_DETERMINE;
1266: osm->n = PETSC_DECIDE;
1267: osm->nmax = PETSC_DETERMINE;
1268: osm->overlap = 0;
1269: osm->ksp = NULL;
1270: osm->gorestriction = NULL;
1271: osm->girestriction = NULL;
1272: osm->pctoouter = NULL;
1273: osm->gx = NULL;
1274: osm->gy = NULL;
1275: osm->x = NULL;
1276: osm->y = NULL;
1277: osm->pcx = NULL;
1278: osm->pcy = NULL;
1279: osm->permutationIS = NULL;
1280: osm->permutationP = NULL;
1281: osm->pcmat = NULL;
1282: osm->ois = NULL;
1283: osm->iis = NULL;
1284: osm->pmat = NULL;
1285: osm->type = PC_GASM_RESTRICT;
1286: osm->same_subdomain_solvers = PETSC_TRUE;
1287: osm->sort_indices = PETSC_TRUE;
1288: osm->dm_subdomains = PETSC_FALSE;
1289: osm->hierarchicalpartitioning = PETSC_FALSE;
1291: pc->data = (void *)osm;
1292: pc->ops->apply = PCApply_GASM;
1293: pc->ops->matapply = PCMatApply_GASM;
1294: pc->ops->applytranspose = PCApplyTranspose_GASM;
1295: pc->ops->setup = PCSetUp_GASM;
1296: pc->ops->reset = PCReset_GASM;
1297: pc->ops->destroy = PCDestroy_GASM;
1298: pc->ops->setfromoptions = PCSetFromOptions_GASM;
1299: pc->ops->setuponblocks = PCSetUpOnBlocks_GASM;
1300: pc->ops->view = PCView_GASM;
1301: pc->ops->applyrichardson = NULL;
1303: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGASMSetSubdomains_C", PCGASMSetSubdomains_GASM));
1304: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGASMSetOverlap_C", PCGASMSetOverlap_GASM));
1305: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGASMSetType_C", PCGASMSetType_GASM));
1306: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGASMSetSortIndices_C", PCGASMSetSortIndices_GASM));
1307: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGASMGetSubKSP_C", PCGASMGetSubKSP_GASM));
1308: PetscFunctionReturn(PETSC_SUCCESS);
1309: }
1311: PetscErrorCode PCGASMCreateLocalSubdomains(Mat A, PetscInt nloc, IS *iis[])
1312: {
1313: MatPartitioning mpart;
1314: const char *prefix;
1315: PetscInt i, j, rstart, rend, bs;
1316: PetscBool hasop, isbaij = PETSC_FALSE, foundpart = PETSC_FALSE;
1317: Mat Ad = NULL, adj;
1318: IS ispart, isnumb, *is;
1320: PetscFunctionBegin;
1321: PetscCheck(nloc >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of local subdomains must > 0, got nloc = %" PetscInt_FMT, nloc);
1323: /* Get prefix, row distribution, and block size */
1324: PetscCall(MatGetOptionsPrefix(A, &prefix));
1325: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
1326: PetscCall(MatGetBlockSize(A, &bs));
1327: PetscCheck(rstart / bs * bs == rstart && rend / bs * bs == rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "bad row distribution [%" PetscInt_FMT ",%" PetscInt_FMT ") for matrix block size %" PetscInt_FMT, rstart, rend, bs);
1329: /* Get diagonal block from matrix if possible */
1330: PetscCall(MatHasOperation(A, MATOP_GET_DIAGONAL_BLOCK, &hasop));
1331: if (hasop) PetscCall(MatGetDiagonalBlock(A, &Ad));
1332: if (Ad) {
1333: PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQBAIJ, &isbaij));
1334: if (!isbaij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQSBAIJ, &isbaij));
1335: }
1336: if (Ad && nloc > 1) {
1337: PetscBool match, done;
1338: /* Try to setup a good matrix partitioning if available */
1339: PetscCall(MatPartitioningCreate(PETSC_COMM_SELF, &mpart));
1340: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)mpart, prefix));
1341: PetscCall(MatPartitioningSetFromOptions(mpart));
1342: PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGCURRENT, &match));
1343: if (!match) PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGSQUARE, &match));
1344: if (!match) { /* assume a "good" partitioner is available */
1345: PetscInt na;
1346: const PetscInt *ia, *ja;
1347: PetscCall(MatGetRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1348: if (done) {
1349: /* Build adjacency matrix by hand. Unfortunately a call to
1350: MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1351: remove the block-aij structure and we cannot expect
1352: MatPartitioning to split vertices as we need */
1353: PetscInt i, j, len, nnz, cnt, *iia = NULL, *jja = NULL;
1354: const PetscInt *row;
1355: nnz = 0;
1356: for (i = 0; i < na; i++) { /* count number of nonzeros */
1357: len = ia[i + 1] - ia[i];
1358: row = ja + ia[i];
1359: for (j = 0; j < len; j++) {
1360: if (row[j] == i) { /* don't count diagonal */
1361: len--;
1362: break;
1363: }
1364: }
1365: nnz += len;
1366: }
1367: PetscCall(PetscMalloc1(na + 1, &iia));
1368: PetscCall(PetscMalloc1(nnz, &jja));
1369: nnz = 0;
1370: iia[0] = 0;
1371: for (i = 0; i < na; i++) { /* fill adjacency */
1372: cnt = 0;
1373: len = ia[i + 1] - ia[i];
1374: row = ja + ia[i];
1375: for (j = 0; j < len; j++) {
1376: if (row[j] != i) jja[nnz + cnt++] = row[j]; /* if not diagonal */
1377: }
1378: nnz += cnt;
1379: iia[i + 1] = nnz;
1380: }
1381: /* Partitioning of the adjacency matrix */
1382: PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, na, na, iia, jja, NULL, &adj));
1383: PetscCall(MatPartitioningSetAdjacency(mpart, adj));
1384: PetscCall(MatPartitioningSetNParts(mpart, nloc));
1385: PetscCall(MatPartitioningApply(mpart, &ispart));
1386: PetscCall(ISPartitioningToNumbering(ispart, &isnumb));
1387: PetscCall(MatDestroy(&adj));
1388: foundpart = PETSC_TRUE;
1389: }
1390: PetscCall(MatRestoreRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1391: }
1392: PetscCall(MatPartitioningDestroy(&mpart));
1393: }
1394: PetscCall(PetscMalloc1(nloc, &is));
1395: if (!foundpart) {
1396: /* Partitioning by contiguous chunks of rows */
1398: PetscInt mbs = (rend - rstart) / bs;
1399: PetscInt start = rstart;
1400: for (i = 0; i < nloc; i++) {
1401: PetscInt count = (mbs / nloc + ((mbs % nloc) > i)) * bs;
1402: PetscCall(ISCreateStride(PETSC_COMM_SELF, count, start, 1, &is[i]));
1403: start += count;
1404: }
1406: } else {
1407: /* Partitioning by adjacency of diagonal block */
1409: const PetscInt *numbering;
1410: PetscInt *count, nidx, *indices, *newidx, start = 0;
1411: /* Get node count in each partition */
1412: PetscCall(PetscMalloc1(nloc, &count));
1413: PetscCall(ISPartitioningCount(ispart, nloc, count));
1414: if (isbaij && bs > 1) { /* adjust for the block-aij case */
1415: for (i = 0; i < nloc; i++) count[i] *= bs;
1416: }
1417: /* Build indices from node numbering */
1418: PetscCall(ISGetLocalSize(isnumb, &nidx));
1419: PetscCall(PetscMalloc1(nidx, &indices));
1420: for (i = 0; i < nidx; i++) indices[i] = i; /* needs to be initialized */
1421: PetscCall(ISGetIndices(isnumb, &numbering));
1422: PetscCall(PetscSortIntWithPermutation(nidx, numbering, indices));
1423: PetscCall(ISRestoreIndices(isnumb, &numbering));
1424: if (isbaij && bs > 1) { /* adjust for the block-aij case */
1425: PetscCall(PetscMalloc1(nidx * bs, &newidx));
1426: for (i = 0; i < nidx; i++) {
1427: for (j = 0; j < bs; j++) newidx[i * bs + j] = indices[i] * bs + j;
1428: }
1429: PetscCall(PetscFree(indices));
1430: nidx *= bs;
1431: indices = newidx;
1432: }
1433: /* Shift to get global indices */
1434: for (i = 0; i < nidx; i++) indices[i] += rstart;
1436: /* Build the index sets for each block */
1437: for (i = 0; i < nloc; i++) {
1438: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, count[i], &indices[start], PETSC_COPY_VALUES, &is[i]));
1439: PetscCall(ISSort(is[i]));
1440: start += count[i];
1441: }
1443: PetscCall(PetscFree(count));
1444: PetscCall(PetscFree(indices));
1445: PetscCall(ISDestroy(&isnumb));
1446: PetscCall(ISDestroy(&ispart));
1447: }
1448: *iis = is;
1449: PetscFunctionReturn(PETSC_SUCCESS);
1450: }
1452: PETSC_INTERN PetscErrorCode PCGASMCreateStraddlingSubdomains(Mat A, PetscInt N, PetscInt *n, IS *iis[])
1453: {
1454: PetscFunctionBegin;
1455: PetscCall(MatSubdomainsCreateCoalesce(A, N, n, iis));
1456: PetscFunctionReturn(PETSC_SUCCESS);
1457: }
1459: /*@C
1460: PCGASMCreateSubdomains - Creates `n` index sets defining `n` nonoverlapping subdomains on this MPI process for the `PCGASM` additive
1461: Schwarz preconditioner for a any problem based on its matrix.
1463: Collective
1465: Input Parameters:
1466: + A - The global matrix operator
1467: - N - the number of global subdomains requested
1469: Output Parameters:
1470: + n - the number of subdomains created on this MPI rank
1471: - iis - the array of index sets defining the local inner subdomains (on which the correction is applied)
1473: Level: advanced
1475: Notes:
1476: When `N` >= A's communicator size, each subdomain is local -- contained within a single MPI process.
1477: When `N` < size, the subdomains are 'straddling' (rank boundaries) and are no longer local.
1478: The resulting subdomains can be use in `PCGASMSetSubdomains`(pc,n,iss,`NULL`). The overlapping
1479: outer subdomains will be automatically generated from these according to the requested amount of
1480: overlap; this is currently supported only with local subdomains.
1482: Use `PCGASMDestroySubdomains()` to free the array and the list of index sets.
1484: .seealso: [](ch_ksp), `PCGASM`, `PCGASMSetSubdomains()`, `PCGASMDestroySubdomains()`
1485: @*/
1486: PetscErrorCode PCGASMCreateSubdomains(Mat A, PetscInt N, PetscInt *n, IS *iis[])
1487: {
1488: PetscMPIInt size;
1490: PetscFunctionBegin;
1492: PetscAssertPointer(iis, 4);
1494: PetscCheck(N >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Number of subdomains must be > 0, N = %" PetscInt_FMT, N);
1495: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
1496: if (N >= size) {
1497: *n = N / size + (N % size);
1498: PetscCall(PCGASMCreateLocalSubdomains(A, *n, iis));
1499: } else {
1500: PetscCall(PCGASMCreateStraddlingSubdomains(A, N, n, iis));
1501: }
1502: PetscFunctionReturn(PETSC_SUCCESS);
1503: }
1505: /*@C
1506: PCGASMDestroySubdomains - Destroys the index sets created with
1507: `PCGASMCreateSubdomains()` or `PCGASMCreateSubdomains2D()`. Should be
1508: called after setting subdomains with `PCGASMSetSubdomains()`.
1510: Collective
1512: Input Parameters:
1513: + n - the number of index sets
1514: . iis - the array of inner subdomains
1515: - ois - the array of outer subdomains, can be `NULL`
1517: Level: intermediate
1519: Note:
1520: This is a convenience subroutine that walks each list,
1521: destroys each `IS` on the list, and then frees the list. At the end the
1522: list pointers are set to `NULL`.
1524: .seealso: [](ch_ksp), `PCGASM`, `PCGASMCreateSubdomains()`, `PCGASMSetSubdomains()`
1525: @*/
1526: PetscErrorCode PCGASMDestroySubdomains(PetscInt n, IS **iis, IS **ois)
1527: {
1528: PetscInt i;
1530: PetscFunctionBegin;
1531: if (n <= 0) PetscFunctionReturn(PETSC_SUCCESS);
1532: if (ois) {
1533: PetscAssertPointer(ois, 3);
1534: if (*ois) {
1535: PetscAssertPointer(*ois, 3);
1536: for (i = 0; i < n; i++) PetscCall(ISDestroy(&(*ois)[i]));
1537: PetscCall(PetscFree((*ois)));
1538: }
1539: }
1540: if (iis) {
1541: PetscAssertPointer(iis, 2);
1542: if (*iis) {
1543: PetscAssertPointer(*iis, 2);
1544: for (i = 0; i < n; i++) PetscCall(ISDestroy(&(*iis)[i]));
1545: PetscCall(PetscFree((*iis)));
1546: }
1547: }
1548: PetscFunctionReturn(PETSC_SUCCESS);
1549: }
1551: #define PCGASMLocalSubdomainBounds2D(M, N, xleft, ylow, xright, yhigh, first, last, xleft_loc, ylow_loc, xright_loc, yhigh_loc, n) \
1552: do { \
1553: PetscInt first_row = first / M, last_row = last / M + 1; \
1554: /* \
1555: Compute ylow_loc and yhigh_loc so that (ylow_loc,xleft) and (yhigh_loc,xright) are the corners \
1556: of the bounding box of the intersection of the subdomain with the local ownership range (local \
1557: subdomain). \
1558: Also compute xleft_loc and xright_loc as the lower and upper bounds on the first and last rows \
1559: of the intersection. \
1560: */ \
1561: /* ylow_loc is the grid row containing the first element of the local sumbdomain */ \
1562: *ylow_loc = PetscMax(first_row, ylow); \
1563: /* xleft_loc is the offset of first element of the local subdomain within its grid row (might actually be outside the local subdomain) */ \
1564: *xleft_loc = *ylow_loc == first_row ? PetscMax(first % M, xleft) : xleft; \
1565: /* yhigh_loc is the grid row above the last local subdomain element */ \
1566: *yhigh_loc = PetscMin(last_row, yhigh); \
1567: /* xright is the offset of the end of the local subdomain within its grid row (might actually be outside the local subdomain) */ \
1568: *xright_loc = *yhigh_loc == last_row ? PetscMin(xright, last % M) : xright; \
1569: /* Now compute the size of the local subdomain n. */ \
1570: *n = 0; \
1571: if (*ylow_loc < *yhigh_loc) { \
1572: PetscInt width = xright - xleft; \
1573: *n += width * (*yhigh_loc - *ylow_loc - 1); \
1574: *n += PetscMin(PetscMax(*xright_loc - xleft, 0), width); \
1575: *n -= PetscMin(PetscMax(*xleft_loc - xleft, 0), width); \
1576: } \
1577: } while (0)
1579: /*@C
1580: PCGASMCreateSubdomains2D - Creates the index sets for the `PCGASM` overlapping Schwarz
1581: preconditioner for a two-dimensional problem on a regular grid.
1583: Collective
1585: Input Parameters:
1586: + pc - the preconditioner context
1587: . M - the global number of grid points in the x direction
1588: . N - the global number of grid points in the y direction
1589: . Mdomains - the global number of subdomains in the x direction
1590: . Ndomains - the global number of subdomains in the y direction
1591: . dof - degrees of freedom per node
1592: - overlap - overlap in mesh lines
1594: Output Parameters:
1595: + nsub - the number of local subdomains created
1596: . iis - array of index sets defining inner (nonoverlapping) subdomains
1597: - ois - array of index sets defining outer (overlapping, if overlap > 0) subdomains
1599: Level: advanced
1601: Note:
1602: Use `PCGASMDestroySubdomains()` to free the index sets and the arrays
1604: Fortran Notes:
1605: The `IS` must be declared as an array of length long enough to hold `Nsub` entries
1607: .seealso: [](ch_ksp), `PCGASM`, `PCGASMSetSubdomains()`, `PCGASMGetSubKSP()`, `PCGASMSetOverlap()`, `PCASMCreateSubdomains2D()`,
1608: `PCGASMDestroySubdomains()`
1609: @*/
1610: PetscErrorCode PCGASMCreateSubdomains2D(PC pc, PetscInt M, PetscInt N, PetscInt Mdomains, PetscInt Ndomains, PetscInt dof, PetscInt overlap, PetscInt *nsub, IS **iis, IS **ois)
1611: {
1612: PetscMPIInt size, rank;
1613: PetscInt i, j;
1614: PetscInt maxheight, maxwidth;
1615: PetscInt xstart, xleft, xright, xleft_loc, xright_loc;
1616: PetscInt ystart, ylow, yhigh, ylow_loc, yhigh_loc;
1617: PetscInt x[2][2], y[2][2], n[2];
1618: PetscInt first, last;
1619: PetscInt nidx, *idx;
1620: PetscInt ii, jj, s, q, d;
1621: PetscInt k, kk;
1622: PetscMPIInt color;
1623: MPI_Comm comm, subcomm;
1624: IS **xis = NULL, **is = ois, **is_local = iis;
1626: PetscFunctionBegin;
1627: PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
1628: PetscCallMPI(MPI_Comm_size(comm, &size));
1629: PetscCallMPI(MPI_Comm_rank(comm, &rank));
1630: PetscCall(MatGetOwnershipRange(pc->pmat, &first, &last));
1631: PetscCheck((first % dof) == 0 && (last % dof) == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE,
1632: "Matrix row partitioning unsuitable for domain decomposition: local row range (%" PetscInt_FMT ",%" PetscInt_FMT ") "
1633: "does not respect the number of degrees of freedom per grid point %" PetscInt_FMT,
1634: first, last, dof);
1636: /* Determine the number of domains with nonzero intersections with the local ownership range. */
1637: s = 0;
1638: ystart = 0;
1639: for (j = 0; j < Ndomains; ++j) {
1640: maxheight = N / Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */
1641: PetscCheck(maxheight >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many %" PetscInt_FMT " subdomains in the vertical direction for mesh height %" PetscInt_FMT, Ndomains, N);
1642: /* Vertical domain limits with an overlap. */
1643: ylow = PetscMax(ystart - overlap, 0);
1644: yhigh = PetscMin(ystart + maxheight + overlap, N);
1645: xstart = 0;
1646: for (i = 0; i < Mdomains; ++i) {
1647: maxwidth = M / Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */
1648: PetscCheck(maxwidth >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many %" PetscInt_FMT " subdomains in the horizontal direction for mesh width %" PetscInt_FMT, Mdomains, M);
1649: /* Horizontal domain limits with an overlap. */
1650: xleft = PetscMax(xstart - overlap, 0);
1651: xright = PetscMin(xstart + maxwidth + overlap, M);
1652: /*
1653: Determine whether this subdomain intersects this rank's ownership range of pc->pmat.
1654: */
1655: PCGASMLocalSubdomainBounds2D(M, N, xleft, ylow, xright, yhigh, first, last, (&xleft_loc), (&ylow_loc), (&xright_loc), (&yhigh_loc), (&nidx));
1656: if (nidx) ++s;
1657: xstart += maxwidth;
1658: } /* for (i = 0; i < Mdomains; ++i) */
1659: ystart += maxheight;
1660: } /* for (j = 0; j < Ndomains; ++j) */
1662: /* Now we can allocate the necessary number of ISs. */
1663: *nsub = s;
1664: PetscCall(PetscMalloc1(*nsub, is));
1665: PetscCall(PetscMalloc1(*nsub, is_local));
1666: s = 0;
1667: ystart = 0;
1668: for (j = 0; j < Ndomains; ++j) {
1669: maxheight = N / Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */
1670: PetscCheck(maxheight >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many %" PetscInt_FMT " subdomains in the vertical direction for mesh height %" PetscInt_FMT, Ndomains, N);
1671: /* Vertical domain limits with an overlap. */
1672: y[0][0] = PetscMax(ystart - overlap, 0);
1673: y[0][1] = PetscMin(ystart + maxheight + overlap, N);
1674: /* Vertical domain limits without an overlap. */
1675: y[1][0] = ystart;
1676: y[1][1] = PetscMin(ystart + maxheight, N);
1677: xstart = 0;
1678: for (i = 0; i < Mdomains; ++i) {
1679: maxwidth = M / Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */
1680: PetscCheck(maxwidth >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many %" PetscInt_FMT " subdomains in the horizontal direction for mesh width %" PetscInt_FMT, Mdomains, M);
1681: /* Horizontal domain limits with an overlap. */
1682: x[0][0] = PetscMax(xstart - overlap, 0);
1683: x[0][1] = PetscMin(xstart + maxwidth + overlap, M);
1684: /* Horizontal domain limits without an overlap. */
1685: x[1][0] = xstart;
1686: x[1][1] = PetscMin(xstart + maxwidth, M);
1687: /*
1688: Determine whether this domain intersects this rank's ownership range of pc->pmat.
1689: Do this twice: first for the domains with overlaps, and once without.
1690: During the first pass create the subcommunicators, and use them on the second pass as well.
1691: */
1692: for (q = 0; q < 2; ++q) {
1693: PetscBool split = PETSC_FALSE;
1694: /*
1695: domain limits, (xleft, xright) and (ylow, yheigh) are adjusted
1696: according to whether the domain with an overlap or without is considered.
1697: */
1698: xleft = x[q][0];
1699: xright = x[q][1];
1700: ylow = y[q][0];
1701: yhigh = y[q][1];
1702: PCGASMLocalSubdomainBounds2D(M, N, xleft, ylow, xright, yhigh, first, last, (&xleft_loc), (&ylow_loc), (&xright_loc), (&yhigh_loc), (&nidx));
1703: nidx *= dof;
1704: n[q] = nidx;
1705: /*
1706: Based on the counted number of indices in the local domain *with an overlap*,
1707: construct a subcommunicator of all the MPI ranks supporting this domain.
1708: Observe that a domain with an overlap might have nontrivial local support,
1709: while the domain without an overlap might not. Hence, the decision to participate
1710: in the subcommunicator must be based on the domain with an overlap.
1711: */
1712: if (q == 0) {
1713: if (nidx) color = 1;
1714: else color = MPI_UNDEFINED;
1715: PetscCallMPI(MPI_Comm_split(comm, color, rank, &subcomm));
1716: split = PETSC_TRUE;
1717: }
1718: /*
1719: Proceed only if the number of local indices *with an overlap* is nonzero.
1720: */
1721: if (n[0]) {
1722: if (q == 0) xis = is;
1723: if (q == 1) {
1724: /*
1725: The IS for the no-overlap subdomain shares a communicator with the overlapping domain.
1726: Moreover, if the overlap is zero, the two ISs are identical.
1727: */
1728: if (overlap == 0) {
1729: (*is_local)[s] = (*is)[s];
1730: PetscCall(PetscObjectReference((PetscObject)(*is)[s]));
1731: continue;
1732: } else {
1733: xis = is_local;
1734: subcomm = ((PetscObject)(*is)[s])->comm;
1735: }
1736: } /* if (q == 1) */
1737: idx = NULL;
1738: PetscCall(PetscMalloc1(nidx, &idx));
1739: if (nidx) {
1740: k = 0;
1741: for (jj = ylow_loc; jj < yhigh_loc; ++jj) {
1742: PetscInt x0 = (jj == ylow_loc) ? xleft_loc : xleft;
1743: PetscInt x1 = (jj == yhigh_loc - 1) ? xright_loc : xright;
1744: kk = dof * (M * jj + x0);
1745: for (ii = x0; ii < x1; ++ii) {
1746: for (d = 0; d < dof; ++d) idx[k++] = kk++;
1747: }
1748: }
1749: }
1750: PetscCall(ISCreateGeneral(subcomm, nidx, idx, PETSC_OWN_POINTER, (*xis) + s));
1751: if (split) PetscCallMPI(MPI_Comm_free(&subcomm));
1752: } /* if (n[0]) */
1753: } /* for (q = 0; q < 2; ++q) */
1754: if (n[0]) ++s;
1755: xstart += maxwidth;
1756: } /* for (i = 0; i < Mdomains; ++i) */
1757: ystart += maxheight;
1758: } /* for (j = 0; j < Ndomains; ++j) */
1759: PetscFunctionReturn(PETSC_SUCCESS);
1760: }
1762: /*@C
1763: PCGASMGetSubdomains - Gets the subdomains supported on this MPI rank
1764: for the `PCGASM` additive Schwarz preconditioner.
1766: Not Collective
1768: Input Parameter:
1769: . pc - the preconditioner context
1771: Output Parameters:
1772: + n - the number of subdomains for this MPI rank (default value = 1)
1773: . iis - the index sets that define the inner subdomains (without overlap) supported on this rank (can be `NULL`)
1774: - ois - the index sets that define the outer subdomains (with overlap) supported on this rank (can be `NULL`)
1776: Level: advanced
1778: Notes:
1779: The user is responsible for destroying the `IS`s and freeing the returned arrays, this can be done with
1780: `PCGASMDestroySubdomains()`
1782: The `IS` numbering is in the parallel, global numbering of the vector.
1784: .seealso: [](ch_ksp), `PCGASM`, `PCGASMSetOverlap()`, `PCGASMGetSubKSP()`, `PCGASMCreateSubdomains2D()`,
1785: `PCGASMSetSubdomains()`, `PCGASMGetSubmatrices()`, `PCGASMDestroySubdomains()`
1786: @*/
1787: PetscErrorCode PCGASMGetSubdomains(PC pc, PetscInt *n, IS *iis[], IS *ois[])
1788: {
1789: PC_GASM *osm;
1790: PetscBool match;
1791: PetscInt i;
1793: PetscFunctionBegin;
1795: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCGASM, &match));
1796: PetscCheck(match, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Incorrect object type: expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name);
1797: osm = (PC_GASM *)pc->data;
1798: if (n) *n = osm->n;
1799: if (iis) PetscCall(PetscMalloc1(osm->n, iis));
1800: if (ois) PetscCall(PetscMalloc1(osm->n, ois));
1801: if (iis || ois) {
1802: for (i = 0; i < osm->n; ++i) {
1803: if (iis) (*iis)[i] = osm->iis[i];
1804: if (ois) (*ois)[i] = osm->ois[i];
1805: }
1806: }
1807: PetscFunctionReturn(PETSC_SUCCESS);
1808: }
1810: /*@C
1811: PCGASMGetSubmatrices - Gets the local submatrices (for this MPI rank
1812: only) for the `PCGASM` additive Schwarz preconditioner.
1814: Not Collective
1816: Input Parameter:
1817: . pc - the preconditioner context
1819: Output Parameters:
1820: + n - the number of matrices for this MPI rank (default value = 1)
1821: - mat - the matrices
1823: Level: advanced
1825: Note:
1826: Matrices returned by this routine have the same communicators as the index sets (`IS`)
1827: used to define subdomains in `PCGASMSetSubdomains()`
1829: .seealso: [](ch_ksp), `PCGASM`, `PCGASMSetOverlap()`, `PCGASMGetSubKSP()`,
1830: `PCGASMCreateSubdomains2D()`, `PCGASMSetSubdomains()`, `PCGASMGetSubdomains()`
1831: @*/
1832: PetscErrorCode PCGASMGetSubmatrices(PC pc, PetscInt *n, Mat *mat[])
1833: {
1834: PC_GASM *osm;
1835: PetscBool match;
1837: PetscFunctionBegin;
1839: PetscAssertPointer(n, 2);
1840: if (mat) PetscAssertPointer(mat, 3);
1841: PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call after KSPSetUp() or PCSetUp().");
1842: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCGASM, &match));
1843: PetscCheck(match, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name);
1844: osm = (PC_GASM *)pc->data;
1845: if (n) *n = osm->n;
1846: if (mat) *mat = osm->pmat;
1847: PetscFunctionReturn(PETSC_SUCCESS);
1848: }
1850: /*@
1851: PCGASMSetUseDMSubdomains - Indicates whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible for `PCGASM`
1853: Logically Collective
1855: Input Parameters:
1856: + pc - the preconditioner
1857: - flg - boolean indicating whether to use subdomains defined by the `DM`
1859: Options Database Key:
1860: + -pc_gasm_dm_subdomains - configure subdomains
1861: . -pc_gasm_overlap - set overlap
1862: - -pc_gasm_total_subdomains - set number of subdomains
1864: Level: intermediate
1866: Note:
1867: `PCGASMSetSubdomains()`, `PCGASMSetTotalSubdomains()` or `PCGASMSetOverlap()` take precedence over `PCGASMSetUseDMSubdomains()`,
1868: so setting `PCGASMSetSubdomains()` with nontrivial subdomain ISs or any of `PCGASMSetTotalSubdomains()` and `PCGASMSetOverlap()`
1869: automatically turns the latter off.
1871: .seealso: [](ch_ksp), `PCGASM`, `PCGASMGetUseDMSubdomains()`, `PCGASMSetSubdomains()`, `PCGASMSetOverlap()`
1872: `PCGASMCreateSubdomains2D()`
1873: @*/
1874: PetscErrorCode PCGASMSetUseDMSubdomains(PC pc, PetscBool flg)
1875: {
1876: PC_GASM *osm = (PC_GASM *)pc->data;
1877: PetscBool match;
1879: PetscFunctionBegin;
1882: PetscCheck(!pc->setupcalled, ((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Not for a setup PC.");
1883: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCGASM, &match));
1884: if (match) {
1885: if (!osm->user_subdomains && osm->N == PETSC_DETERMINE && osm->overlap < 0) osm->dm_subdomains = flg;
1886: }
1887: PetscFunctionReturn(PETSC_SUCCESS);
1888: }
1890: /*@
1891: PCGASMGetUseDMSubdomains - Returns flag indicating whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible with `PCGASM`
1893: Not Collective
1895: Input Parameter:
1896: . pc - the preconditioner
1898: Output Parameter:
1899: . flg - boolean indicating whether to use subdomains defined by the `DM`
1901: Level: intermediate
1903: .seealso: [](ch_ksp), `PCGASM`, `PCGASMSetUseDMSubdomains()`, `PCGASMSetOverlap()`
1904: `PCGASMCreateSubdomains2D()`
1905: @*/
1906: PetscErrorCode PCGASMGetUseDMSubdomains(PC pc, PetscBool *flg)
1907: {
1908: PC_GASM *osm = (PC_GASM *)pc->data;
1909: PetscBool match;
1911: PetscFunctionBegin;
1913: PetscAssertPointer(flg, 2);
1914: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCGASM, &match));
1915: if (match) {
1916: if (flg) *flg = osm->dm_subdomains;
1917: }
1918: PetscFunctionReturn(PETSC_SUCCESS);
1919: }