Actual source code: asm.c

  1: /*
  2:   This file defines an additive Schwarz preconditioner for any Mat implementation.

  4:   Note that each processor may have any number of subdomains. But in order to
  5:   deal easily with the VecScatter(), we treat each processor as if it has the
  6:   same number of subdomains.

  8:        n - total number of true subdomains on all processors
  9:        n_local_true - actual number of subdomains on this processor
 10:        n_local = maximum over all processors of n_local_true
 11: */

 13: #include <petsc/private/pcasmimpl.h>
 14: #include "petsc/private/matimpl.h"

 16: static PetscErrorCode PCView_ASM(PC pc, PetscViewer viewer)
 17: {
 18:   PC_ASM           *osm = (PC_ASM *)pc->data;
 19:   PetscMPIInt       rank;
 20:   PetscInt          i, bsz;
 21:   PetscBool         iascii, isstring;
 22:   PetscViewer       sviewer;
 23:   PetscViewerFormat format;
 24:   const char       *prefix;

 26:   PetscFunctionBegin;
 27:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
 28:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
 29:   if (iascii) {
 30:     char overlaps[256] = "user-defined overlap", blocks[256] = "total subdomain blocks not yet set";
 31:     if (osm->overlap >= 0) PetscCall(PetscSNPrintf(overlaps, sizeof(overlaps), "amount of overlap = %" PetscInt_FMT, osm->overlap));
 32:     if (osm->n > 0) PetscCall(PetscSNPrintf(blocks, sizeof(blocks), "total subdomain blocks = %" PetscInt_FMT, osm->n));
 33:     PetscCall(PetscViewerASCIIPrintf(viewer, "  %s, %s\n", blocks, overlaps));
 34:     PetscCall(PetscViewerASCIIPrintf(viewer, "  restriction/interpolation type - %s\n", PCASMTypes[osm->type]));
 35:     if (osm->dm_subdomains) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: using DM to define subdomains\n"));
 36:     if (osm->loctype != PC_COMPOSITE_ADDITIVE) PetscCall(PetscViewerASCIIPrintf(viewer, "  Additive Schwarz: local solve composition type - %s\n", PCCompositeTypes[osm->loctype]));
 37:     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
 38:     PetscCall(PetscViewerGetFormat(viewer, &format));
 39:     if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) {
 40:       if (osm->ksp) {
 41:         PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for first block is in the following KSP and PC objects on rank 0:\n"));
 42:         PetscCall(PCGetOptionsPrefix(pc, &prefix));
 43:         PetscCall(PetscViewerASCIIPrintf(viewer, "  Use -%sksp_view ::ascii_info_detail to display information for all blocks\n", prefix ? prefix : ""));
 44:         PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
 45:         if (rank == 0) {
 46:           PetscCall(PetscViewerASCIIPushTab(viewer));
 47:           PetscCall(KSPView(osm->ksp[0], sviewer));
 48:           PetscCall(PetscViewerASCIIPopTab(viewer));
 49:         }
 50:         PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
 51:       }
 52:     } else {
 53:       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
 54:       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "  [%d] number of local blocks = %" PetscInt_FMT "\n", (int)rank, osm->n_local_true));
 55:       PetscCall(PetscViewerFlush(viewer));
 56:       PetscCall(PetscViewerASCIIPrintf(viewer, "  Local solver information for each block is in the following KSP and PC objects:\n"));
 57:       PetscCall(PetscViewerASCIIPushTab(viewer));
 58:       PetscCall(PetscViewerASCIIPrintf(viewer, "- - - - - - - - - - - - - - - - - -\n"));
 59:       PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
 60:       for (i = 0; i < osm->n_local_true; i++) {
 61:         PetscCall(ISGetLocalSize(osm->is[i], &bsz));
 62:         PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer, "[%d] local block number %" PetscInt_FMT ", size = %" PetscInt_FMT "\n", (int)rank, i, bsz));
 63:         PetscCall(KSPView(osm->ksp[i], sviewer));
 64:         PetscCall(PetscViewerASCIISynchronizedPrintf(sviewer, "- - - - - - - - - - - - - - - - - -\n"));
 65:       }
 66:       PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
 67:       PetscCall(PetscViewerASCIIPopTab(viewer));
 68:       PetscCall(PetscViewerFlush(viewer));
 69:       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
 70:     }
 71:   } else if (isstring) {
 72:     PetscCall(PetscViewerStringSPrintf(viewer, " blocks=%" PetscInt_FMT ", overlap=%" PetscInt_FMT ", type=%s", osm->n, osm->overlap, PCASMTypes[osm->type]));
 73:     PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
 74:     if (osm->ksp) PetscCall(KSPView(osm->ksp[0], sviewer));
 75:     PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
 76:   }
 77:   PetscFunctionReturn(PETSC_SUCCESS);
 78: }

 80: static PetscErrorCode PCASMPrintSubdomains(PC pc)
 81: {
 82:   PC_ASM         *osm = (PC_ASM *)pc->data;
 83:   const char     *prefix;
 84:   char            fname[PETSC_MAX_PATH_LEN + 1];
 85:   PetscViewer     viewer, sviewer;
 86:   char           *s;
 87:   PetscInt        i, j, nidx;
 88:   const PetscInt *idx;
 89:   PetscMPIInt     rank, size;

 91:   PetscFunctionBegin;
 92:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
 93:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
 94:   PetscCall(PCGetOptionsPrefix(pc, &prefix));
 95:   PetscCall(PetscOptionsGetString(NULL, prefix, "-pc_asm_print_subdomains", fname, sizeof(fname), NULL));
 96:   if (fname[0] == 0) PetscCall(PetscStrncpy(fname, "stdout", sizeof(fname)));
 97:   PetscCall(PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc), fname, &viewer));
 98:   for (i = 0; i < osm->n_local; i++) {
 99:     if (i < osm->n_local_true) {
100:       PetscCall(ISGetLocalSize(osm->is[i], &nidx));
101:       PetscCall(ISGetIndices(osm->is[i], &idx));
102:       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
103: #define len 16 * (nidx + 1) + 512
104:       PetscCall(PetscMalloc1(len, &s));
105:       PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
106: #undef len
107:       PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " with overlap:\n", rank, size, i));
108:       for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
109:       PetscCall(ISRestoreIndices(osm->is[i], &idx));
110:       PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
111:       PetscCall(PetscViewerDestroy(&sviewer));
112:       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
113:       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
114:       PetscCall(PetscViewerFlush(viewer));
115:       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
116:       PetscCall(PetscFree(s));
117:       if (osm->is_local) {
118:         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
119: #define len 16 * (nidx + 1) + 512
120:         PetscCall(PetscMalloc1(len, &s));
121:         PetscCall(PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer));
122: #undef len
123:         PetscCall(PetscViewerStringSPrintf(sviewer, "[%d:%d] Subdomain %" PetscInt_FMT " without overlap:\n", rank, size, i));
124:         PetscCall(ISGetLocalSize(osm->is_local[i], &nidx));
125:         PetscCall(ISGetIndices(osm->is_local[i], &idx));
126:         for (j = 0; j < nidx; j++) PetscCall(PetscViewerStringSPrintf(sviewer, "%" PetscInt_FMT " ", idx[j]));
127:         PetscCall(ISRestoreIndices(osm->is_local[i], &idx));
128:         PetscCall(PetscViewerStringSPrintf(sviewer, "\n"));
129:         PetscCall(PetscViewerDestroy(&sviewer));
130:         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
131:         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%s", s));
132:         PetscCall(PetscViewerFlush(viewer));
133:         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
134:         PetscCall(PetscFree(s));
135:       }
136:     } else {
137:       /* Participate in collective viewer calls. */
138:       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
139:       PetscCall(PetscViewerFlush(viewer));
140:       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
141:       /* Assume either all ranks have is_local or none do. */
142:       if (osm->is_local) {
143:         PetscCall(PetscViewerASCIIPushSynchronized(viewer));
144:         PetscCall(PetscViewerFlush(viewer));
145:         PetscCall(PetscViewerASCIIPopSynchronized(viewer));
146:       }
147:     }
148:   }
149:   PetscCall(PetscViewerFlush(viewer));
150:   PetscCall(PetscViewerDestroy(&viewer));
151:   PetscFunctionReturn(PETSC_SUCCESS);
152: }

154: static PetscErrorCode PCSetUp_ASM(PC pc)
155: {
156:   PC_ASM     *osm = (PC_ASM *)pc->data;
157:   PetscBool   flg;
158:   PetscInt    i, m, m_local;
159:   MatReuse    scall = MAT_REUSE_MATRIX;
160:   IS          isl;
161:   KSP         ksp;
162:   PC          subpc;
163:   const char *prefix, *pprefix;
164:   Vec         vec;
165:   DM         *domain_dm = NULL;

167:   PetscFunctionBegin;
168:   if (!pc->setupcalled) {
169:     PetscInt m;

171:     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
172:     if (osm->n_local_true == PETSC_DECIDE) {
173:       /* no subdomains given */
174:       /* try pc->dm first, if allowed */
175:       if (osm->dm_subdomains && pc->dm) {
176:         PetscInt num_domains, d;
177:         char   **domain_names;
178:         IS      *inner_domain_is, *outer_domain_is;
179:         PetscCall(DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm));
180:         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
181:                               A future improvement of this code might allow one to use
182:                               DM-defined subdomains and also increase the overlap,
183:                               but that is not currently supported */
184:         if (num_domains) PetscCall(PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is));
185:         for (d = 0; d < num_domains; ++d) {
186:           if (domain_names) PetscCall(PetscFree(domain_names[d]));
187:           if (inner_domain_is) PetscCall(ISDestroy(&inner_domain_is[d]));
188:           if (outer_domain_is) PetscCall(ISDestroy(&outer_domain_is[d]));
189:         }
190:         PetscCall(PetscFree(domain_names));
191:         PetscCall(PetscFree(inner_domain_is));
192:         PetscCall(PetscFree(outer_domain_is));
193:       }
194:       if (osm->n_local_true == PETSC_DECIDE) {
195:         /* still no subdomains; use one subdomain per processor */
196:         osm->n_local_true = 1;
197:       }
198:     }
199:     { /* determine the global and max number of subdomains */
200:       struct {
201:         PetscInt max, sum;
202:       } inwork, outwork;
203:       PetscMPIInt size;

205:       inwork.max = osm->n_local_true;
206:       inwork.sum = osm->n_local_true;
207:       PetscCall(MPIU_Allreduce(&inwork, &outwork, 1, MPIU_2INT, MPIU_MAXSUM_OP, PetscObjectComm((PetscObject)pc)));
208:       osm->n_local = outwork.max;
209:       osm->n       = outwork.sum;

211:       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
212:       if (outwork.max == 1 && outwork.sum == size) {
213:         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
214:         PetscCall(MatSetOption(pc->pmat, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
215:       }
216:     }
217:     if (!osm->is) { /* create the index sets */
218:       PetscCall(PCASMCreateSubdomains(pc->pmat, osm->n_local_true, &osm->is));
219:     }
220:     if (osm->n_local_true > 1 && !osm->is_local) {
221:       PetscCall(PetscMalloc1(osm->n_local_true, &osm->is_local));
222:       for (i = 0; i < osm->n_local_true; i++) {
223:         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
224:           PetscCall(ISDuplicate(osm->is[i], &osm->is_local[i]));
225:           PetscCall(ISCopy(osm->is[i], osm->is_local[i]));
226:         } else {
227:           PetscCall(PetscObjectReference((PetscObject)osm->is[i]));
228:           osm->is_local[i] = osm->is[i];
229:         }
230:       }
231:     }
232:     PetscCall(PCGetOptionsPrefix(pc, &prefix));
233:     if (osm->overlap > 0) {
234:       /* Extend the "overlapping" regions by a number of steps */
235:       PetscCall(MatIncreaseOverlap(pc->pmat, osm->n_local_true, osm->is, osm->overlap));
236:     }
237:     if (osm->sort_indices) {
238:       for (i = 0; i < osm->n_local_true; i++) {
239:         PetscCall(ISSort(osm->is[i]));
240:         if (osm->is_local) PetscCall(ISSort(osm->is_local[i]));
241:       }
242:     }
243:     flg = PETSC_FALSE;
244:     PetscCall(PetscOptionsHasName(NULL, prefix, "-pc_asm_print_subdomains", &flg));
245:     if (flg) PetscCall(PCASMPrintSubdomains(pc));
246:     if (!osm->ksp) {
247:       /* Create the local solvers */
248:       PetscCall(PetscMalloc1(osm->n_local_true, &osm->ksp));
249:       if (domain_dm) PetscCall(PetscInfo(pc, "Setting up ASM subproblems using the embedded DM\n"));
250:       for (i = 0; i < osm->n_local_true; i++) {
251:         PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
252:         PetscCall(KSPSetNestLevel(ksp, pc->kspnestlevel));
253:         PetscCall(KSPSetErrorIfNotConverged(ksp, pc->erroriffailure));
254:         PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp, (PetscObject)pc, 1));
255:         PetscCall(KSPSetType(ksp, KSPPREONLY));
256:         PetscCall(KSPGetPC(ksp, &subpc));
257:         PetscCall(PCGetOptionsPrefix(pc, &prefix));
258:         PetscCall(KSPSetOptionsPrefix(ksp, prefix));
259:         PetscCall(KSPAppendOptionsPrefix(ksp, "sub_"));
260:         if (domain_dm) {
261:           PetscCall(KSPSetDM(ksp, domain_dm[i]));
262:           PetscCall(KSPSetDMActive(ksp, PETSC_FALSE));
263:           PetscCall(DMDestroy(&domain_dm[i]));
264:         }
265:         osm->ksp[i] = ksp;
266:       }
267:       if (domain_dm) PetscCall(PetscFree(domain_dm));
268:     }

270:     PetscCall(ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis));
271:     PetscCall(ISSortRemoveDups(osm->lis));
272:     PetscCall(ISGetLocalSize(osm->lis, &m));

274:     scall = MAT_INITIAL_MATRIX;
275:   } else {
276:     /*
277:        Destroy the blocks from the previous iteration
278:     */
279:     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
280:       PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->pmat));
281:       scall = MAT_INITIAL_MATRIX;
282:     }
283:   }

285:   /* Destroy previous submatrices of a different type than pc->pmat since MAT_REUSE_MATRIX won't work in that case */
286:   if (scall == MAT_REUSE_MATRIX && osm->sub_mat_type) {
287:     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
288:     scall = MAT_INITIAL_MATRIX;
289:   }

291:   /*
292:      Extract out the submatrices
293:   */
294:   PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, osm->is, scall, &osm->pmat));
295:   if (scall == MAT_INITIAL_MATRIX) {
296:     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc->pmat, &pprefix));
297:     for (i = 0; i < osm->n_local_true; i++) PetscCall(PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i], pprefix));
298:   }

300:   /* Convert the types of the submatrices (if needbe) */
301:   if (osm->sub_mat_type) {
302:     for (i = 0; i < osm->n_local_true; i++) PetscCall(MatConvert(osm->pmat[i], osm->sub_mat_type, MAT_INPLACE_MATRIX, &(osm->pmat[i])));
303:   }

305:   if (!pc->setupcalled) {
306:     VecType vtype;

308:     /* Create the local work vectors (from the local matrices) and scatter contexts */
309:     PetscCall(MatCreateVecs(pc->pmat, &vec, NULL));

311:     PetscCheck(!osm->is_local || osm->n_local_true == 1 || (osm->type != PC_ASM_INTERPOLATE && osm->type != PC_ASM_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains() with more than a single subdomain");
312:     if (osm->is_local && osm->type != PC_ASM_BASIC && osm->loctype == PC_COMPOSITE_ADDITIVE) PetscCall(PetscMalloc1(osm->n_local_true, &osm->lprolongation));
313:     PetscCall(PetscMalloc1(osm->n_local_true, &osm->lrestriction));
314:     PetscCall(PetscMalloc1(osm->n_local_true, &osm->x));
315:     PetscCall(PetscMalloc1(osm->n_local_true, &osm->y));

317:     PetscCall(ISGetLocalSize(osm->lis, &m));
318:     PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
319:     PetscCall(MatGetVecType(osm->pmat[0], &vtype));
320:     PetscCall(VecCreate(PETSC_COMM_SELF, &osm->lx));
321:     PetscCall(VecSetSizes(osm->lx, m, m));
322:     PetscCall(VecSetType(osm->lx, vtype));
323:     PetscCall(VecDuplicate(osm->lx, &osm->ly));
324:     PetscCall(VecScatterCreate(vec, osm->lis, osm->lx, isl, &osm->restriction));
325:     PetscCall(ISDestroy(&isl));

327:     for (i = 0; i < osm->n_local_true; ++i) {
328:       ISLocalToGlobalMapping ltog;
329:       IS                     isll;
330:       const PetscInt        *idx_is;
331:       PetscInt              *idx_lis, nout;

333:       PetscCall(ISGetLocalSize(osm->is[i], &m));
334:       PetscCall(MatCreateVecs(osm->pmat[i], &osm->x[i], NULL));
335:       PetscCall(VecDuplicate(osm->x[i], &osm->y[i]));

337:       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
338:       PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
339:       PetscCall(ISGetLocalSize(osm->is[i], &m));
340:       PetscCall(ISGetIndices(osm->is[i], &idx_is));
341:       PetscCall(PetscMalloc1(m, &idx_lis));
342:       PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m, idx_is, &nout, idx_lis));
343:       PetscCheck(nout == m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is not a subset of lis");
344:       PetscCall(ISRestoreIndices(osm->is[i], &idx_is));
345:       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m, idx_lis, PETSC_OWN_POINTER, &isll));
346:       PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
347:       PetscCall(ISCreateStride(PETSC_COMM_SELF, m, 0, 1, &isl));
348:       PetscCall(VecScatterCreate(osm->ly, isll, osm->y[i], isl, &osm->lrestriction[i]));
349:       PetscCall(ISDestroy(&isll));
350:       PetscCall(ISDestroy(&isl));
351:       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overlapping is_local[i] entries */
352:         ISLocalToGlobalMapping ltog;
353:         IS                     isll, isll_local;
354:         const PetscInt        *idx_local;
355:         PetscInt              *idx1, *idx2, nout;

357:         PetscCall(ISGetLocalSize(osm->is_local[i], &m_local));
358:         PetscCall(ISGetIndices(osm->is_local[i], &idx_local));

360:         PetscCall(ISLocalToGlobalMappingCreateIS(osm->is[i], &ltog));
361:         PetscCall(PetscMalloc1(m_local, &idx1));
362:         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx1));
363:         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
364:         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of is");
365:         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx1, PETSC_OWN_POINTER, &isll));

367:         PetscCall(ISLocalToGlobalMappingCreateIS(osm->lis, &ltog));
368:         PetscCall(PetscMalloc1(m_local, &idx2));
369:         PetscCall(ISGlobalToLocalMappingApply(ltog, IS_GTOLM_DROP, m_local, idx_local, &nout, idx2));
370:         PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
371:         PetscCheck(nout == m_local, PETSC_COMM_SELF, PETSC_ERR_PLIB, "is_local not a subset of lis");
372:         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m_local, idx2, PETSC_OWN_POINTER, &isll_local));

374:         PetscCall(ISRestoreIndices(osm->is_local[i], &idx_local));
375:         PetscCall(VecScatterCreate(osm->y[i], isll, osm->ly, isll_local, &osm->lprolongation[i]));

377:         PetscCall(ISDestroy(&isll));
378:         PetscCall(ISDestroy(&isll_local));
379:       }
380:     }
381:     PetscCall(VecDestroy(&vec));
382:   }

384:   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
385:     IS      *cis;
386:     PetscInt c;

388:     PetscCall(PetscMalloc1(osm->n_local_true, &cis));
389:     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
390:     PetscCall(MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats));
391:     PetscCall(PetscFree(cis));
392:   }

394:   /* Return control to the user so that the submatrices can be modified (e.g., to apply
395:      different boundary conditions for the submatrices than for the global problem) */
396:   PetscCall(PCModifySubMatrices(pc, osm->n_local_true, osm->is, osm->is, osm->pmat, pc->modifysubmatricesP));

398:   /*
399:      Loop over subdomains putting them into local ksp
400:   */
401:   PetscCall(KSPGetOptionsPrefix(osm->ksp[0], &prefix));
402:   for (i = 0; i < osm->n_local_true; i++) {
403:     PetscCall(KSPSetOperators(osm->ksp[i], osm->pmat[i], osm->pmat[i]));
404:     PetscCall(MatSetOptionsPrefix(osm->pmat[i], prefix));
405:     if (!pc->setupcalled) PetscCall(KSPSetFromOptions(osm->ksp[i]));
406:   }
407:   PetscFunctionReturn(PETSC_SUCCESS);
408: }

410: static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
411: {
412:   PC_ASM            *osm = (PC_ASM *)pc->data;
413:   PetscInt           i;
414:   KSPConvergedReason reason;

416:   PetscFunctionBegin;
417:   for (i = 0; i < osm->n_local_true; i++) {
418:     PetscCall(KSPSetUp(osm->ksp[i]));
419:     PetscCall(KSPGetConvergedReason(osm->ksp[i], &reason));
420:     if (reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR;
421:   }
422:   PetscFunctionReturn(PETSC_SUCCESS);
423: }

425: static PetscErrorCode PCApply_ASM(PC pc, Vec x, Vec y)
426: {
427:   PC_ASM     *osm = (PC_ASM *)pc->data;
428:   PetscInt    i, n_local_true = osm->n_local_true;
429:   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;

431:   PetscFunctionBegin;
432:   /*
433:      support for limiting the restriction or interpolation to only local
434:      subdomain values (leaving the other values 0).
435:   */
436:   if (!(osm->type & PC_ASM_RESTRICT)) {
437:     forward = SCATTER_FORWARD_LOCAL;
438:     /* have to zero the work RHS since scatter may leave some slots empty */
439:     PetscCall(VecSet(osm->lx, 0.0));
440:   }
441:   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;

443:   PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
444:   /* zero the global and the local solutions */
445:   PetscCall(VecSet(y, 0.0));
446:   PetscCall(VecSet(osm->ly, 0.0));

448:   /* copy the global RHS to local RHS including the ghost nodes */
449:   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
450:   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));

452:   /* restrict local RHS to the overlapping 0-block RHS */
453:   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
454:   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));

456:   /* do the local solves */
457:   for (i = 0; i < n_local_true; ++i) {
458:     /* solve the overlapping i-block */
459:     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
460:     PetscCall(KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]));
461:     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
462:     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));

464:     if (osm->lprolongation && osm->type != PC_ASM_INTERPOLATE) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */
465:       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
466:       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
467:     } else { /* interpolate the overlapping i-block solution to the local solution */
468:       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
469:       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
470:     }

472:     if (i < n_local_true - 1) {
473:       /* restrict local RHS to the overlapping (i+1)-block RHS */
474:       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
475:       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));

477:       if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
478:         /* update the overlapping (i+1)-block RHS using the current local solution */
479:         PetscCall(MatMult(osm->lmats[i + 1], osm->ly, osm->y[i + 1]));
480:         PetscCall(VecAXPBY(osm->x[i + 1], -1., 1., osm->y[i + 1]));
481:       }
482:     }
483:   }
484:   /* add the local solution to the global solution including the ghost nodes */
485:   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
486:   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
487:   PetscFunctionReturn(PETSC_SUCCESS);
488: }

490: static PetscErrorCode PCMatApply_ASM(PC pc, Mat X, Mat Y)
491: {
492:   PC_ASM     *osm = (PC_ASM *)pc->data;
493:   Mat         Z, W;
494:   Vec         x;
495:   PetscInt    i, m, N;
496:   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;

498:   PetscFunctionBegin;
499:   PetscCheck(osm->n_local_true <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not yet implemented");
500:   /*
501:      support for limiting the restriction or interpolation to only local
502:      subdomain values (leaving the other values 0).
503:   */
504:   if (!(osm->type & PC_ASM_RESTRICT)) {
505:     forward = SCATTER_FORWARD_LOCAL;
506:     /* have to zero the work RHS since scatter may leave some slots empty */
507:     PetscCall(VecSet(osm->lx, 0.0));
508:   }
509:   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
510:   PetscCall(VecGetLocalSize(osm->x[0], &m));
511:   PetscCall(MatGetSize(X, NULL, &N));
512:   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &Z));

514:   PetscCheck(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
515:   /* zero the global and the local solutions */
516:   PetscCall(MatZeroEntries(Y));
517:   PetscCall(VecSet(osm->ly, 0.0));

519:   for (i = 0; i < N; ++i) {
520:     PetscCall(MatDenseGetColumnVecRead(X, i, &x));
521:     /* copy the global RHS to local RHS including the ghost nodes */
522:     PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
523:     PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
524:     PetscCall(MatDenseRestoreColumnVecRead(X, i, &x));

526:     PetscCall(MatDenseGetColumnVecWrite(Z, i, &x));
527:     /* restrict local RHS to the overlapping 0-block RHS */
528:     PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
529:     PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward));
530:     PetscCall(MatDenseRestoreColumnVecWrite(Z, i, &x));
531:   }
532:   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &W));
533:   /* solve the overlapping 0-block */
534:   PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
535:   PetscCall(KSPMatSolve(osm->ksp[0], Z, W));
536:   PetscCall(KSPCheckSolve(osm->ksp[0], pc, NULL));
537:   PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0));
538:   PetscCall(MatDestroy(&Z));

540:   for (i = 0; i < N; ++i) {
541:     PetscCall(VecSet(osm->ly, 0.0));
542:     PetscCall(MatDenseGetColumnVecRead(W, i, &x));
543:     if (osm->lprolongation && osm->type != PC_ASM_INTERPOLATE) { /* interpolate the non-overlapping 0-block solution to the local solution (only for restrictive additive) */
544:       PetscCall(VecScatterBegin(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
545:       PetscCall(VecScatterEnd(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward));
546:     } else { /* interpolate the overlapping 0-block solution to the local solution */
547:       PetscCall(VecScatterBegin(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
548:       PetscCall(VecScatterEnd(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse));
549:     }
550:     PetscCall(MatDenseRestoreColumnVecRead(W, i, &x));

552:     PetscCall(MatDenseGetColumnVecWrite(Y, i, &x));
553:     /* add the local solution to the global solution including the ghost nodes */
554:     PetscCall(VecScatterBegin(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
555:     PetscCall(VecScatterEnd(osm->restriction, osm->ly, x, ADD_VALUES, reverse));
556:     PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &x));
557:   }
558:   PetscCall(MatDestroy(&W));
559:   PetscFunctionReturn(PETSC_SUCCESS);
560: }

562: static PetscErrorCode PCApplyTranspose_ASM(PC pc, Vec x, Vec y)
563: {
564:   PC_ASM     *osm = (PC_ASM *)pc->data;
565:   PetscInt    i, n_local_true = osm->n_local_true;
566:   ScatterMode forward = SCATTER_FORWARD, reverse = SCATTER_REVERSE;

568:   PetscFunctionBegin;
569:   /*
570:      Support for limiting the restriction or interpolation to only local
571:      subdomain values (leaving the other values 0).

573:      Note: these are reversed from the PCApply_ASM() because we are applying the
574:      transpose of the three terms
575:   */

577:   if (!(osm->type & PC_ASM_INTERPOLATE)) {
578:     forward = SCATTER_FORWARD_LOCAL;
579:     /* have to zero the work RHS since scatter may leave some slots empty */
580:     PetscCall(VecSet(osm->lx, 0.0));
581:   }
582:   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;

584:   /* zero the global and the local solutions */
585:   PetscCall(VecSet(y, 0.0));
586:   PetscCall(VecSet(osm->ly, 0.0));

588:   /* Copy the global RHS to local RHS including the ghost nodes */
589:   PetscCall(VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward));
590:   PetscCall(VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward));

592:   /* Restrict local RHS to the overlapping 0-block RHS */
593:   PetscCall(VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));
594:   PetscCall(VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward));

596:   /* do the local solves */
597:   for (i = 0; i < n_local_true; ++i) {
598:     /* solve the overlapping i-block */
599:     PetscCall(PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));
600:     PetscCall(KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]));
601:     PetscCall(KSPCheckSolve(osm->ksp[i], pc, osm->y[i]));
602:     PetscCall(PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0));

604:     if (osm->lprolongation && osm->type != PC_ASM_RESTRICT) { /* interpolate the non-overlapping i-block solution to the local solution */
605:       PetscCall(VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
606:       PetscCall(VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward));
607:     } else { /* interpolate the overlapping i-block solution to the local solution */
608:       PetscCall(VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
609:       PetscCall(VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse));
610:     }

612:     if (i < n_local_true - 1) {
613:       /* Restrict local RHS to the overlapping (i+1)-block RHS */
614:       PetscCall(VecScatterBegin(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
615:       PetscCall(VecScatterEnd(osm->lrestriction[i + 1], osm->lx, osm->x[i + 1], INSERT_VALUES, forward));
616:     }
617:   }
618:   /* Add the local solution to the global solution including the ghost nodes */
619:   PetscCall(VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
620:   PetscCall(VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse));
621:   PetscFunctionReturn(PETSC_SUCCESS);
622: }

624: static PetscErrorCode PCReset_ASM(PC pc)
625: {
626:   PC_ASM  *osm = (PC_ASM *)pc->data;
627:   PetscInt i;

629:   PetscFunctionBegin;
630:   if (osm->ksp) {
631:     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPReset(osm->ksp[i]));
632:   }
633:   if (osm->pmat) {
634:     if (osm->n_local_true > 0) PetscCall(MatDestroySubMatrices(osm->n_local_true, &osm->pmat));
635:   }
636:   if (osm->lrestriction) {
637:     PetscCall(VecScatterDestroy(&osm->restriction));
638:     for (i = 0; i < osm->n_local_true; i++) {
639:       PetscCall(VecScatterDestroy(&osm->lrestriction[i]));
640:       if (osm->lprolongation) PetscCall(VecScatterDestroy(&osm->lprolongation[i]));
641:       PetscCall(VecDestroy(&osm->x[i]));
642:       PetscCall(VecDestroy(&osm->y[i]));
643:     }
644:     PetscCall(PetscFree(osm->lrestriction));
645:     if (osm->lprolongation) PetscCall(PetscFree(osm->lprolongation));
646:     PetscCall(PetscFree(osm->x));
647:     PetscCall(PetscFree(osm->y));
648:   }
649:   PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));
650:   PetscCall(ISDestroy(&osm->lis));
651:   PetscCall(VecDestroy(&osm->lx));
652:   PetscCall(VecDestroy(&osm->ly));
653:   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) PetscCall(MatDestroyMatrices(osm->n_local_true, &osm->lmats));

655:   PetscCall(PetscFree(osm->sub_mat_type));

657:   osm->is       = NULL;
658:   osm->is_local = NULL;
659:   PetscFunctionReturn(PETSC_SUCCESS);
660: }

662: static PetscErrorCode PCDestroy_ASM(PC pc)
663: {
664:   PC_ASM  *osm = (PC_ASM *)pc->data;
665:   PetscInt i;

667:   PetscFunctionBegin;
668:   PetscCall(PCReset_ASM(pc));
669:   if (osm->ksp) {
670:     for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
671:     PetscCall(PetscFree(osm->ksp));
672:   }
673:   PetscCall(PetscFree(pc->data));

675:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", NULL));
676:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", NULL));
677:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", NULL));
678:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", NULL));
679:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", NULL));
680:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", NULL));
681:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", NULL));
682:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", NULL));
683:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", NULL));
684:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", NULL));
685:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", NULL));
686:   PetscFunctionReturn(PETSC_SUCCESS);
687: }

689: static PetscErrorCode PCSetFromOptions_ASM(PC pc, PetscOptionItems *PetscOptionsObject)
690: {
691:   PC_ASM         *osm = (PC_ASM *)pc->data;
692:   PetscInt        blocks, ovl;
693:   PetscBool       flg;
694:   PCASMType       asmtype;
695:   PCCompositeType loctype;
696:   char            sub_mat_type[256];

698:   PetscFunctionBegin;
699:   PetscOptionsHeadBegin(PetscOptionsObject, "Additive Schwarz options");
700:   PetscCall(PetscOptionsBool("-pc_asm_dm_subdomains", "Use DMCreateDomainDecomposition() to define subdomains", "PCASMSetDMSubdomains", osm->dm_subdomains, &osm->dm_subdomains, &flg));
701:   PetscCall(PetscOptionsInt("-pc_asm_blocks", "Number of subdomains", "PCASMSetTotalSubdomains", osm->n, &blocks, &flg));
702:   if (flg) {
703:     PetscCall(PCASMSetTotalSubdomains(pc, blocks, NULL, NULL));
704:     osm->dm_subdomains = PETSC_FALSE;
705:   }
706:   PetscCall(PetscOptionsInt("-pc_asm_local_blocks", "Number of local subdomains", "PCASMSetLocalSubdomains", osm->n_local_true, &blocks, &flg));
707:   if (flg) {
708:     PetscCall(PCASMSetLocalSubdomains(pc, blocks, NULL, NULL));
709:     osm->dm_subdomains = PETSC_FALSE;
710:   }
711:   PetscCall(PetscOptionsInt("-pc_asm_overlap", "Number of grid points overlap", "PCASMSetOverlap", osm->overlap, &ovl, &flg));
712:   if (flg) {
713:     PetscCall(PCASMSetOverlap(pc, ovl));
714:     osm->dm_subdomains = PETSC_FALSE;
715:   }
716:   flg = PETSC_FALSE;
717:   PetscCall(PetscOptionsEnum("-pc_asm_type", "Type of restriction/extension", "PCASMSetType", PCASMTypes, (PetscEnum)osm->type, (PetscEnum *)&asmtype, &flg));
718:   if (flg) PetscCall(PCASMSetType(pc, asmtype));
719:   flg = PETSC_FALSE;
720:   PetscCall(PetscOptionsEnum("-pc_asm_local_type", "Type of local solver composition", "PCASMSetLocalType", PCCompositeTypes, (PetscEnum)osm->loctype, (PetscEnum *)&loctype, &flg));
721:   if (flg) PetscCall(PCASMSetLocalType(pc, loctype));
722:   PetscCall(PetscOptionsFList("-pc_asm_sub_mat_type", "Subsolve Matrix Type", "PCASMSetSubMatType", MatList, NULL, sub_mat_type, 256, &flg));
723:   if (flg) PetscCall(PCASMSetSubMatType(pc, sub_mat_type));
724:   PetscOptionsHeadEnd();
725:   PetscFunctionReturn(PETSC_SUCCESS);
726: }

728: static PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc, PetscInt n, IS is[], IS is_local[])
729: {
730:   PC_ASM  *osm = (PC_ASM *)pc->data;
731:   PetscInt i;

733:   PetscFunctionBegin;
734:   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Each process must have 1 or more blocks, n = %" PetscInt_FMT, n);
735:   PetscCheck(!pc->setupcalled || (n == osm->n_local_true && !is), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetLocalSubdomains() should be called before calling PCSetUp().");

737:   if (!pc->setupcalled) {
738:     if (is) {
739:       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is[i]));
740:     }
741:     if (is_local) {
742:       for (i = 0; i < n; i++) PetscCall(PetscObjectReference((PetscObject)is_local[i]));
743:     }
744:     PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));

746:     if (osm->ksp && osm->n_local_true != n) {
747:       for (i = 0; i < osm->n_local_true; i++) PetscCall(KSPDestroy(&osm->ksp[i]));
748:       PetscCall(PetscFree(osm->ksp));
749:     }

751:     osm->n_local_true = n;
752:     osm->is           = NULL;
753:     osm->is_local     = NULL;
754:     if (is) {
755:       PetscCall(PetscMalloc1(n, &osm->is));
756:       for (i = 0; i < n; i++) osm->is[i] = is[i];
757:       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
758:       osm->overlap = -1;
759:     }
760:     if (is_local) {
761:       PetscCall(PetscMalloc1(n, &osm->is_local));
762:       for (i = 0; i < n; i++) osm->is_local[i] = is_local[i];
763:       if (!is) {
764:         PetscCall(PetscMalloc1(osm->n_local_true, &osm->is));
765:         for (i = 0; i < osm->n_local_true; i++) {
766:           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
767:             PetscCall(ISDuplicate(osm->is_local[i], &osm->is[i]));
768:             PetscCall(ISCopy(osm->is_local[i], osm->is[i]));
769:           } else {
770:             PetscCall(PetscObjectReference((PetscObject)osm->is_local[i]));
771:             osm->is[i] = osm->is_local[i];
772:           }
773:         }
774:       }
775:     }
776:   }
777:   PetscFunctionReturn(PETSC_SUCCESS);
778: }

780: static PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc, PetscInt N, IS *is, IS *is_local)
781: {
782:   PC_ASM     *osm = (PC_ASM *)pc->data;
783:   PetscMPIInt rank, size;
784:   PetscInt    n;

786:   PetscFunctionBegin;
787:   PetscCheck(N >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of total blocks must be > 0, N = %" PetscInt_FMT, N);
788:   PetscCheck(!is && !is_local, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");

790:   /*
791:      Split the subdomains equally among all processors
792:   */
793:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
794:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
795:   n = N / size + ((N % size) > rank);
796:   PetscCheck(n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Process %d must have at least one block: total processors %d total blocks %" PetscInt_FMT, (int)rank, (int)size, N);
797:   PetscCheck(!pc->setupcalled || n == osm->n_local_true, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PCASMSetTotalSubdomains() should be called before PCSetUp().");
798:   if (!pc->setupcalled) {
799:     PetscCall(PCASMDestroySubdomains(osm->n_local_true, osm->is, osm->is_local));

801:     osm->n_local_true = n;
802:     osm->is           = NULL;
803:     osm->is_local     = NULL;
804:   }
805:   PetscFunctionReturn(PETSC_SUCCESS);
806: }

808: static PetscErrorCode PCASMSetOverlap_ASM(PC pc, PetscInt ovl)
809: {
810:   PC_ASM *osm = (PC_ASM *)pc->data;

812:   PetscFunctionBegin;
813:   PetscCheck(ovl >= 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Negative overlap value requested");
814:   PetscCheck(!pc->setupcalled || ovl == osm->overlap, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PCASMSetOverlap() should be called before PCSetUp().");
815:   if (!pc->setupcalled) osm->overlap = ovl;
816:   PetscFunctionReturn(PETSC_SUCCESS);
817: }

819: static PetscErrorCode PCASMSetType_ASM(PC pc, PCASMType type)
820: {
821:   PC_ASM *osm = (PC_ASM *)pc->data;

823:   PetscFunctionBegin;
824:   osm->type     = type;
825:   osm->type_set = PETSC_TRUE;
826:   PetscFunctionReturn(PETSC_SUCCESS);
827: }

829: static PetscErrorCode PCASMGetType_ASM(PC pc, PCASMType *type)
830: {
831:   PC_ASM *osm = (PC_ASM *)pc->data;

833:   PetscFunctionBegin;
834:   *type = osm->type;
835:   PetscFunctionReturn(PETSC_SUCCESS);
836: }

838: static PetscErrorCode PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
839: {
840:   PC_ASM *osm = (PC_ASM *)pc->data;

842:   PetscFunctionBegin;
843:   PetscCheck(type == PC_COMPOSITE_ADDITIVE || type == PC_COMPOSITE_MULTIPLICATIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Only supports additive or multiplicative as the local type");
844:   osm->loctype = type;
845:   PetscFunctionReturn(PETSC_SUCCESS);
846: }

848: static PetscErrorCode PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
849: {
850:   PC_ASM *osm = (PC_ASM *)pc->data;

852:   PetscFunctionBegin;
853:   *type = osm->loctype;
854:   PetscFunctionReturn(PETSC_SUCCESS);
855: }

857: static PetscErrorCode PCASMSetSortIndices_ASM(PC pc, PetscBool doSort)
858: {
859:   PC_ASM *osm = (PC_ASM *)pc->data;

861:   PetscFunctionBegin;
862:   osm->sort_indices = doSort;
863:   PetscFunctionReturn(PETSC_SUCCESS);
864: }

866: static PetscErrorCode PCASMGetSubKSP_ASM(PC pc, PetscInt *n_local, PetscInt *first_local, KSP **ksp)
867: {
868:   PC_ASM *osm = (PC_ASM *)pc->data;

870:   PetscFunctionBegin;
871:   PetscCheck(osm->n_local_true >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");

873:   if (n_local) *n_local = osm->n_local_true;
874:   if (first_local) {
875:     PetscCallMPI(MPI_Scan(&osm->n_local_true, first_local, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc)));
876:     *first_local -= osm->n_local_true;
877:   }
878:   if (ksp) *ksp = osm->ksp;
879:   PetscFunctionReturn(PETSC_SUCCESS);
880: }

882: static PetscErrorCode PCASMGetSubMatType_ASM(PC pc, MatType *sub_mat_type)
883: {
884:   PC_ASM *osm = (PC_ASM *)pc->data;

886:   PetscFunctionBegin;
888:   PetscAssertPointer(sub_mat_type, 2);
889:   *sub_mat_type = osm->sub_mat_type;
890:   PetscFunctionReturn(PETSC_SUCCESS);
891: }

893: static PetscErrorCode PCASMSetSubMatType_ASM(PC pc, MatType sub_mat_type)
894: {
895:   PC_ASM *osm = (PC_ASM *)pc->data;

897:   PetscFunctionBegin;
899:   PetscCall(PetscFree(osm->sub_mat_type));
900:   PetscCall(PetscStrallocpy(sub_mat_type, (char **)&osm->sub_mat_type));
901:   PetscFunctionReturn(PETSC_SUCCESS);
902: }

904: /*@C
905:   PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner `PCASM`.

907:   Collective

909:   Input Parameters:
910: + pc       - the preconditioner context
911: . n        - the number of subdomains for this processor (default value = 1)
912: . is       - the index set that defines the subdomains for this processor
913:          (or `NULL` for PETSc to determine subdomains)
914: - is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
915:          (or `NULL` to not provide these)

917:   Options Database Key:
918: . -pc_asm_local_blocks <blks> - Sets number of local blocks

920:   Level: advanced

922:   Notes:
923:   The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local

925:   By default the `PCASM` preconditioner uses 1 block per processor.

927:   Use `PCASMSetTotalSubdomains()` to set the subdomains for all processors.

929:   If is_local is provided and `PCASMType` is `PC_ASM_RESTRICT` then the solution only over the is_local region is interpolated
930:   back to form the global solution (this is the standard restricted additive Schwarz method)

932:   If the is_local is provided and `PCASMType` is `PC_ASM_INTERPOLATE` or `PC_ASM_NONE` then an error is generated since there is
933:   no code to handle that case.

935: .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
936:           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `PCASMType`, `PCASMSetType()`, `PCGASM`
937: @*/
938: PetscErrorCode PCASMSetLocalSubdomains(PC pc, PetscInt n, IS is[], IS is_local[])
939: {
940:   PetscFunctionBegin;
942:   PetscTryMethod(pc, "PCASMSetLocalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, n, is, is_local));
943:   PetscFunctionReturn(PETSC_SUCCESS);
944: }

946: /*@C
947:   PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
948:   additive Schwarz preconditioner, `PCASM`.

950:   Collective, all MPI ranks must pass in the same array of `IS`

952:   Input Parameters:
953: + pc       - the preconditioner context
954: . N        - the number of subdomains for all processors
955: . is       - the index sets that define the subdomains for all processors
956:          (or `NULL` to ask PETSc to determine the subdomains)
957: - is_local - the index sets that define the local part of the subdomains for this processor
958:          (or `NULL` to not provide this information)

960:   Options Database Key:
961: . -pc_asm_blocks <blks> - Sets total blocks

963:   Level: advanced

965:   Notes:
966:   Currently you cannot use this to set the actual subdomains with the argument is or is_local.

968:   By default the `PCASM` preconditioner uses 1 block per processor.

970:   These index sets cannot be destroyed until after completion of the
971:   linear solves for which the `PCASM` preconditioner is being used.

973:   Use `PCASMSetLocalSubdomains()` to set local subdomains.

975:   The `IS` numbering is in the parallel, global numbering of the vector for both is and is_local

977: .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
978:           `PCASMCreateSubdomains2D()`, `PCGASM`
979: @*/
980: PetscErrorCode PCASMSetTotalSubdomains(PC pc, PetscInt N, IS is[], IS is_local[])
981: {
982:   PetscFunctionBegin;
984:   PetscTryMethod(pc, "PCASMSetTotalSubdomains_C", (PC, PetscInt, IS[], IS[]), (pc, N, is, is_local));
985:   PetscFunctionReturn(PETSC_SUCCESS);
986: }

988: /*@
989:   PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
990:   additive Schwarz preconditioner, `PCASM`.

992:   Logically Collective

994:   Input Parameters:
995: + pc  - the preconditioner context
996: - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)

998:   Options Database Key:
999: . -pc_asm_overlap <ovl> - Sets overlap

1001:   Level: intermediate

1003:   Notes:
1004:   By default the `PCASM` preconditioner uses 1 block per processor.  To use
1005:   multiple blocks per perocessor, see `PCASMSetTotalSubdomains()` and
1006:   `PCASMSetLocalSubdomains()` (and the option -pc_asm_blocks <blks>).

1008:   The overlap defaults to 1, so if one desires that no additional
1009:   overlap be computed beyond what may have been set with a call to
1010:   `PCASMSetTotalSubdomains()` or `PCASMSetLocalSubdomains()`, then ovl
1011:   must be set to be 0.  In particular, if one does not explicitly set
1012:   the subdomains an application code, then all overlap would be computed
1013:   internally by PETSc, and using an overlap of 0 would result in an `PCASM`
1014:   variant that is equivalent to the block Jacobi preconditioner.

1016:   The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1017:   use the option -mat_increase_overlap_scalable when the problem and number of processes is large.

1019:   One can define initial index sets with any overlap via
1020:   `PCASMSetLocalSubdomains()`; the routine
1021:   `PCASMSetOverlap()` merely allows PETSc to extend that overlap further
1022:   if desired.

1024: .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1025:           `PCASMCreateSubdomains2D()`, `PCASMGetLocalSubdomains()`, `MatIncreaseOverlap()`, `PCGASM`
1026: @*/
1027: PetscErrorCode PCASMSetOverlap(PC pc, PetscInt ovl)
1028: {
1029:   PetscFunctionBegin;
1032:   PetscTryMethod(pc, "PCASMSetOverlap_C", (PC, PetscInt), (pc, ovl));
1033:   PetscFunctionReturn(PETSC_SUCCESS);
1034: }

1036: /*@
1037:   PCASMSetType - Sets the type of restriction and interpolation used
1038:   for local problems in the additive Schwarz method, `PCASM`.

1040:   Logically Collective

1042:   Input Parameters:
1043: + pc   - the preconditioner context
1044: - type - variant of `PCASM`, one of
1045: .vb
1046:       PC_ASM_BASIC       - full interpolation and restriction
1047:       PC_ASM_RESTRICT    - full restriction, local processor interpolation (default)
1048:       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1049:       PC_ASM_NONE        - local processor restriction and interpolation
1050: .ve

1052:   Options Database Key:
1053: . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`

1055:   Level: intermediate

1057:   Note:
1058:   if the is_local arguments are passed to `PCASMSetLocalSubdomains()` then they are used when `PC_ASM_RESTRICT` has been selected
1059:   to limit the local processor interpolation

1061: .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1062:           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetLocalType()`, `PCASMGetLocalType()`, `PCGASM`
1063: @*/
1064: PetscErrorCode PCASMSetType(PC pc, PCASMType type)
1065: {
1066:   PetscFunctionBegin;
1069:   PetscTryMethod(pc, "PCASMSetType_C", (PC, PCASMType), (pc, type));
1070:   PetscFunctionReturn(PETSC_SUCCESS);
1071: }

1073: /*@
1074:   PCASMGetType - Gets the type of restriction and interpolation used
1075:   for local problems in the additive Schwarz method, `PCASM`.

1077:   Logically Collective

1079:   Input Parameter:
1080: . pc - the preconditioner context

1082:   Output Parameter:
1083: . type - variant of `PCASM`, one of
1084: .vb
1085:       PC_ASM_BASIC       - full interpolation and restriction
1086:       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1087:       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1088:       PC_ASM_NONE        - local processor restriction and interpolation
1089: .ve

1091:   Options Database Key:
1092: . -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASM` type

1094:   Level: intermediate

1096: .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`, `PCGASM`,
1097:           `PCASMCreateSubdomains2D()`, `PCASMType`, `PCASMSetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1098: @*/
1099: PetscErrorCode PCASMGetType(PC pc, PCASMType *type)
1100: {
1101:   PetscFunctionBegin;
1103:   PetscUseMethod(pc, "PCASMGetType_C", (PC, PCASMType *), (pc, type));
1104:   PetscFunctionReturn(PETSC_SUCCESS);
1105: }

1107: /*@
1108:   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method, `PCASM`.

1110:   Logically Collective

1112:   Input Parameters:
1113: + pc   - the preconditioner context
1114: - type - type of composition, one of
1115: .vb
1116:   PC_COMPOSITE_ADDITIVE       - local additive combination
1117:   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1118: .ve

1120:   Options Database Key:
1121: . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type

1123:   Level: intermediate

1125: .seealso: [](ch_ksp), `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMGetLocalType()`, `PCASMType`, `PCCompositeType`
1126: @*/
1127: PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1128: {
1129:   PetscFunctionBegin;
1132:   PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));
1133:   PetscFunctionReturn(PETSC_SUCCESS);
1134: }

1136: /*@
1137:   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method, `PCASM`.

1139:   Logically Collective

1141:   Input Parameter:
1142: . pc - the preconditioner context

1144:   Output Parameter:
1145: . type - type of composition, one of
1146: .vb
1147:   PC_COMPOSITE_ADDITIVE       - local additive combination
1148:   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1149: .ve

1151:   Options Database Key:
1152: . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type

1154:   Level: intermediate

1156: .seealso: [](ch_ksp), `PCASM`, `PCASMSetType()`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMCreate()`, `PCASMType`, `PCCompositeType`
1157: @*/
1158: PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1159: {
1160:   PetscFunctionBegin;
1162:   PetscAssertPointer(type, 2);
1163:   PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));
1164:   PetscFunctionReturn(PETSC_SUCCESS);
1165: }

1167: /*@
1168:   PCASMSetSortIndices - Determines whether subdomain indices are sorted.

1170:   Logically Collective

1172:   Input Parameters:
1173: + pc     - the preconditioner context
1174: - doSort - sort the subdomain indices

1176:   Level: intermediate

1178: .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMGetSubKSP()`,
1179:           `PCASMCreateSubdomains2D()`
1180: @*/
1181: PetscErrorCode PCASMSetSortIndices(PC pc, PetscBool doSort)
1182: {
1183:   PetscFunctionBegin;
1186:   PetscTryMethod(pc, "PCASMSetSortIndices_C", (PC, PetscBool), (pc, doSort));
1187:   PetscFunctionReturn(PETSC_SUCCESS);
1188: }

1190: /*@C
1191:   PCASMGetSubKSP - Gets the local `KSP` contexts for all blocks on
1192:   this processor.

1194:   Collective iff first_local is requested

1196:   Input Parameter:
1197: . pc - the preconditioner context

1199:   Output Parameters:
1200: + n_local     - the number of blocks on this processor or NULL
1201: . first_local - the global number of the first block on this processor or NULL,
1202:                  all processors must request or all must pass NULL
1203: - ksp         - the array of `KSP` contexts

1205:   Level: advanced

1207:   Notes:
1208:   After `PCASMGetSubKSP()` the array of `KSP`s is not to be freed.

1210:   You must call `KSPSetUp()` before calling `PCASMGetSubKSP()`.

1212:   Fortran Notes:
1213:   The output argument 'ksp' must be an array of sufficient length or `PETSC_NULL_KSP`. The latter can be used to learn the necessary length.

1215: .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`,
1216:           `PCASMCreateSubdomains2D()`,
1217: @*/
1218: PetscErrorCode PCASMGetSubKSP(PC pc, PetscInt *n_local, PetscInt *first_local, KSP *ksp[])
1219: {
1220:   PetscFunctionBegin;
1222:   PetscUseMethod(pc, "PCASMGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (pc, n_local, first_local, ksp));
1223:   PetscFunctionReturn(PETSC_SUCCESS);
1224: }

1226: /*MC
1227:    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1228:            its own `KSP` object, {cite}`dryja1987additive` and {cite}`1sbg`

1230:    Options Database Keys:
1231: +  -pc_asm_blocks <blks>                          - Sets total blocks. Defaults to one block per MPI process.
1232: .  -pc_asm_overlap <ovl>                          - Sets overlap
1233: .  -pc_asm_type [basic,restrict,interpolate,none] - Sets `PCASMType`, default is restrict. See `PCASMSetType()`
1234: -  -pc_asm_local_type [additive, multiplicative]  - Sets `PCCompositeType`, default is additive. See `PCASMSetLocalType()`

1236:    Level: beginner

1238:    Notes:
1239:    If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1240:    will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1241:     -pc_asm_type basic to get the same convergence behavior

1243:    Each processor can have one or more blocks, but a block cannot be shared by more
1244:    than one processor. Use `PCGASM` for subdomains shared by multiple processes.

1246:    To set options on the solvers for each block append -sub_ to all the `KSP`, and `PC`
1247:    options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly

1249:    To set the options on the solvers separate for each block call `PCASMGetSubKSP()`
1250:    and set the options directly on the resulting `KSP` object (you can access its `PC` with `KSPGetPC()`)

1252: .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCASMType`, `PCCompositeType`,
1253:           `PCBJACOBI`, `PCASMGetSubKSP()`, `PCASMSetLocalSubdomains()`, `PCASMType`, `PCASMGetType()`, `PCASMSetLocalType()`, `PCASMGetLocalType()`
1254:           `PCASMSetTotalSubdomains()`, `PCSetModifySubMatrices()`, `PCASMSetOverlap()`, `PCASMSetType()`, `PCCompositeType`
1255: M*/

1257: PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1258: {
1259:   PC_ASM *osm;

1261:   PetscFunctionBegin;
1262:   PetscCall(PetscNew(&osm));

1264:   osm->n             = PETSC_DECIDE;
1265:   osm->n_local       = 0;
1266:   osm->n_local_true  = PETSC_DECIDE;
1267:   osm->overlap       = 1;
1268:   osm->ksp           = NULL;
1269:   osm->restriction   = NULL;
1270:   osm->lprolongation = NULL;
1271:   osm->lrestriction  = NULL;
1272:   osm->x             = NULL;
1273:   osm->y             = NULL;
1274:   osm->is            = NULL;
1275:   osm->is_local      = NULL;
1276:   osm->mat           = NULL;
1277:   osm->pmat          = NULL;
1278:   osm->type          = PC_ASM_RESTRICT;
1279:   osm->loctype       = PC_COMPOSITE_ADDITIVE;
1280:   osm->sort_indices  = PETSC_TRUE;
1281:   osm->dm_subdomains = PETSC_FALSE;
1282:   osm->sub_mat_type  = NULL;

1284:   pc->data                 = (void *)osm;
1285:   pc->ops->apply           = PCApply_ASM;
1286:   pc->ops->matapply        = PCMatApply_ASM;
1287:   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1288:   pc->ops->setup           = PCSetUp_ASM;
1289:   pc->ops->reset           = PCReset_ASM;
1290:   pc->ops->destroy         = PCDestroy_ASM;
1291:   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1292:   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1293:   pc->ops->view            = PCView_ASM;
1294:   pc->ops->applyrichardson = NULL;

1296:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalSubdomains_C", PCASMSetLocalSubdomains_ASM));
1297:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetTotalSubdomains_C", PCASMSetTotalSubdomains_ASM));
1298:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetOverlap_C", PCASMSetOverlap_ASM));
1299:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetType_C", PCASMSetType_ASM));
1300:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetType_C", PCASMGetType_ASM));
1301:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetLocalType_C", PCASMSetLocalType_ASM));
1302:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetLocalType_C", PCASMGetLocalType_ASM));
1303:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSortIndices_C", PCASMSetSortIndices_ASM));
1304:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubKSP_C", PCASMGetSubKSP_ASM));
1305:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMGetSubMatType_C", PCASMGetSubMatType_ASM));
1306:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCASMSetSubMatType_C", PCASMSetSubMatType_ASM));
1307:   PetscFunctionReturn(PETSC_SUCCESS);
1308: }

1310: /*@C
1311:   PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1312:   preconditioner, `PCASM`,  for any problem on a general grid.

1314:   Collective

1316:   Input Parameters:
1317: + A - The global matrix operator
1318: - n - the number of local blocks

1320:   Output Parameter:
1321: . outis - the array of index sets defining the subdomains

1323:   Level: advanced

1325:   Note:
1326:   This generates nonoverlapping subdomains; the `PCASM` will generate the overlap
1327:   from these if you use `PCASMSetLocalSubdomains()`

1329:   Fortran Notes:
1330:   You must provide the array outis[] already allocated of length n.

1332: .seealso: [](ch_ksp), `PCASM`, `PCASMSetLocalSubdomains()`, `PCASMDestroySubdomains()`
1333: @*/
1334: PetscErrorCode PCASMCreateSubdomains(Mat A, PetscInt n, IS *outis[])
1335: {
1336:   MatPartitioning mpart;
1337:   const char     *prefix;
1338:   PetscInt        i, j, rstart, rend, bs;
1339:   PetscBool       hasop, isbaij = PETSC_FALSE, foundpart = PETSC_FALSE;
1340:   Mat             Ad = NULL, adj;
1341:   IS              ispart, isnumb, *is;

1343:   PetscFunctionBegin;
1345:   PetscAssertPointer(outis, 3);
1346:   PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of local blocks must be > 0, n = %" PetscInt_FMT, n);

1348:   /* Get prefix, row distribution, and block size */
1349:   PetscCall(MatGetOptionsPrefix(A, &prefix));
1350:   PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
1351:   PetscCall(MatGetBlockSize(A, &bs));
1352:   PetscCheck(rstart / bs * bs == rstart && rend / bs * bs == rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "bad row distribution [%" PetscInt_FMT ",%" PetscInt_FMT ") for matrix block size %" PetscInt_FMT, rstart, rend, bs);

1354:   /* Get diagonal block from matrix if possible */
1355:   PetscCall(MatHasOperation(A, MATOP_GET_DIAGONAL_BLOCK, &hasop));
1356:   if (hasop) PetscCall(MatGetDiagonalBlock(A, &Ad));
1357:   if (Ad) {
1358:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQBAIJ, &isbaij));
1359:     if (!isbaij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)Ad, MATSEQSBAIJ, &isbaij));
1360:   }
1361:   if (Ad && n > 1) {
1362:     PetscBool match, done;
1363:     /* Try to setup a good matrix partitioning if available */
1364:     PetscCall(MatPartitioningCreate(PETSC_COMM_SELF, &mpart));
1365:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)mpart, prefix));
1366:     PetscCall(MatPartitioningSetFromOptions(mpart));
1367:     PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGCURRENT, &match));
1368:     if (!match) PetscCall(PetscObjectTypeCompare((PetscObject)mpart, MATPARTITIONINGSQUARE, &match));
1369:     if (!match) { /* assume a "good" partitioner is available */
1370:       PetscInt        na;
1371:       const PetscInt *ia, *ja;
1372:       PetscCall(MatGetRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1373:       if (done) {
1374:         /* Build adjacency matrix by hand. Unfortunately a call to
1375:            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1376:            remove the block-aij structure and we cannot expect
1377:            MatPartitioning to split vertices as we need */
1378:         PetscInt        i, j, len, nnz, cnt, *iia = NULL, *jja = NULL;
1379:         const PetscInt *row;
1380:         nnz = 0;
1381:         for (i = 0; i < na; i++) { /* count number of nonzeros */
1382:           len = ia[i + 1] - ia[i];
1383:           row = ja + ia[i];
1384:           for (j = 0; j < len; j++) {
1385:             if (row[j] == i) { /* don't count diagonal */
1386:               len--;
1387:               break;
1388:             }
1389:           }
1390:           nnz += len;
1391:         }
1392:         PetscCall(PetscMalloc1(na + 1, &iia));
1393:         PetscCall(PetscMalloc1(nnz, &jja));
1394:         nnz    = 0;
1395:         iia[0] = 0;
1396:         for (i = 0; i < na; i++) { /* fill adjacency */
1397:           cnt = 0;
1398:           len = ia[i + 1] - ia[i];
1399:           row = ja + ia[i];
1400:           for (j = 0; j < len; j++) {
1401:             if (row[j] != i) { /* if not diagonal */
1402:               jja[nnz + cnt++] = row[j];
1403:             }
1404:           }
1405:           nnz += cnt;
1406:           iia[i + 1] = nnz;
1407:         }
1408:         /* Partitioning of the adjacency matrix */
1409:         PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, na, na, iia, jja, NULL, &adj));
1410:         PetscCall(MatPartitioningSetAdjacency(mpart, adj));
1411:         PetscCall(MatPartitioningSetNParts(mpart, n));
1412:         PetscCall(MatPartitioningApply(mpart, &ispart));
1413:         PetscCall(ISPartitioningToNumbering(ispart, &isnumb));
1414:         PetscCall(MatDestroy(&adj));
1415:         foundpart = PETSC_TRUE;
1416:       }
1417:       PetscCall(MatRestoreRowIJ(Ad, 0, PETSC_TRUE, isbaij, &na, &ia, &ja, &done));
1418:     }
1419:     PetscCall(MatPartitioningDestroy(&mpart));
1420:   }

1422:   PetscCall(PetscMalloc1(n, &is));
1423:   *outis = is;

1425:   if (!foundpart) {
1426:     /* Partitioning by contiguous chunks of rows */

1428:     PetscInt mbs   = (rend - rstart) / bs;
1429:     PetscInt start = rstart;
1430:     for (i = 0; i < n; i++) {
1431:       PetscInt count = (mbs / n + ((mbs % n) > i)) * bs;
1432:       PetscCall(ISCreateStride(PETSC_COMM_SELF, count, start, 1, &is[i]));
1433:       start += count;
1434:     }

1436:   } else {
1437:     /* Partitioning by adjacency of diagonal block  */

1439:     const PetscInt *numbering;
1440:     PetscInt       *count, nidx, *indices, *newidx, start = 0;
1441:     /* Get node count in each partition */
1442:     PetscCall(PetscMalloc1(n, &count));
1443:     PetscCall(ISPartitioningCount(ispart, n, count));
1444:     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1445:       for (i = 0; i < n; i++) count[i] *= bs;
1446:     }
1447:     /* Build indices from node numbering */
1448:     PetscCall(ISGetLocalSize(isnumb, &nidx));
1449:     PetscCall(PetscMalloc1(nidx, &indices));
1450:     for (i = 0; i < nidx; i++) indices[i] = i; /* needs to be initialized */
1451:     PetscCall(ISGetIndices(isnumb, &numbering));
1452:     PetscCall(PetscSortIntWithPermutation(nidx, numbering, indices));
1453:     PetscCall(ISRestoreIndices(isnumb, &numbering));
1454:     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1455:       PetscCall(PetscMalloc1(nidx * bs, &newidx));
1456:       for (i = 0; i < nidx; i++) {
1457:         for (j = 0; j < bs; j++) newidx[i * bs + j] = indices[i] * bs + j;
1458:       }
1459:       PetscCall(PetscFree(indices));
1460:       nidx *= bs;
1461:       indices = newidx;
1462:     }
1463:     /* Shift to get global indices */
1464:     for (i = 0; i < nidx; i++) indices[i] += rstart;

1466:     /* Build the index sets for each block */
1467:     for (i = 0; i < n; i++) {
1468:       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, count[i], &indices[start], PETSC_COPY_VALUES, &is[i]));
1469:       PetscCall(ISSort(is[i]));
1470:       start += count[i];
1471:     }

1473:     PetscCall(PetscFree(count));
1474:     PetscCall(PetscFree(indices));
1475:     PetscCall(ISDestroy(&isnumb));
1476:     PetscCall(ISDestroy(&ispart));
1477:   }
1478:   PetscFunctionReturn(PETSC_SUCCESS);
1479: }

1481: /*@C
1482:   PCASMDestroySubdomains - Destroys the index sets created with
1483:   `PCASMCreateSubdomains()`. Should be called after setting subdomains with `PCASMSetLocalSubdomains()`.

1485:   Collective

1487:   Input Parameters:
1488: + n        - the number of index sets
1489: . is       - the array of index sets
1490: - is_local - the array of local index sets, can be `NULL`

1492:   Level: advanced

1494: .seealso: [](ch_ksp), `PCASM`, `PCASMCreateSubdomains()`, `PCASMSetLocalSubdomains()`
1495: @*/
1496: PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1497: {
1498:   PetscInt i;

1500:   PetscFunctionBegin;
1501:   if (n <= 0) PetscFunctionReturn(PETSC_SUCCESS);
1502:   if (is) {
1503:     PetscAssertPointer(is, 2);
1504:     for (i = 0; i < n; i++) PetscCall(ISDestroy(&is[i]));
1505:     PetscCall(PetscFree(is));
1506:   }
1507:   if (is_local) {
1508:     PetscAssertPointer(is_local, 3);
1509:     for (i = 0; i < n; i++) PetscCall(ISDestroy(&is_local[i]));
1510:     PetscCall(PetscFree(is_local));
1511:   }
1512:   PetscFunctionReturn(PETSC_SUCCESS);
1513: }

1515: /*@C
1516:   PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1517:   preconditioner, `PCASM`, for a two-dimensional problem on a regular grid.

1519:   Not Collective

1521:   Input Parameters:
1522: + m       - the number of mesh points in the x direction
1523: . n       - the number of mesh points in the y direction
1524: . M       - the number of subdomains in the x direction
1525: . N       - the number of subdomains in the y direction
1526: . dof     - degrees of freedom per node
1527: - overlap - overlap in mesh lines

1529:   Output Parameters:
1530: + Nsub     - the number of subdomains created
1531: . is       - array of index sets defining overlapping (if overlap > 0) subdomains
1532: - is_local - array of index sets defining non-overlapping subdomains

1534:   Level: advanced

1536:   Note:
1537:   Presently `PCAMSCreateSubdomains2d()` is valid only for sequential
1538:   preconditioners.  More general related routines are
1539:   `PCASMSetTotalSubdomains()` and `PCASMSetLocalSubdomains()`.

1541:   Fortran Notes:
1542:   The `IS` must be declared as an array of length long enough to hold `Nsub` entries

1544: .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetLocalSubdomains()`, `PCASMGetSubKSP()`,
1545:           `PCASMSetOverlap()`
1546: @*/
1547: PetscErrorCode PCASMCreateSubdomains2D(PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt dof, PetscInt overlap, PetscInt *Nsub, IS **is, IS **is_local)
1548: {
1549:   PetscInt i, j, height, width, ystart, xstart, yleft, yright, xleft, xright, loc_outer;
1550:   PetscInt nidx, *idx, loc, ii, jj, count;

1552:   PetscFunctionBegin;
1553:   PetscCheck(dof == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "dof must be 1");

1555:   *Nsub = N * M;
1556:   PetscCall(PetscMalloc1(*Nsub, is));
1557:   PetscCall(PetscMalloc1(*Nsub, is_local));
1558:   ystart    = 0;
1559:   loc_outer = 0;
1560:   for (i = 0; i < N; i++) {
1561:     height = n / N + ((n % N) > i); /* height of subdomain */
1562:     PetscCheck(height >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many N subdomains for mesh dimension n");
1563:     yleft = ystart - overlap;
1564:     if (yleft < 0) yleft = 0;
1565:     yright = ystart + height + overlap;
1566:     if (yright > n) yright = n;
1567:     xstart = 0;
1568:     for (j = 0; j < M; j++) {
1569:       width = m / M + ((m % M) > j); /* width of subdomain */
1570:       PetscCheck(width >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many M subdomains for mesh dimension m");
1571:       xleft = xstart - overlap;
1572:       if (xleft < 0) xleft = 0;
1573:       xright = xstart + width + overlap;
1574:       if (xright > m) xright = m;
1575:       nidx = (xright - xleft) * (yright - yleft);
1576:       PetscCall(PetscMalloc1(nidx, &idx));
1577:       loc = 0;
1578:       for (ii = yleft; ii < yright; ii++) {
1579:         count = m * ii + xleft;
1580:         for (jj = xleft; jj < xright; jj++) idx[loc++] = count++;
1581:       }
1582:       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nidx, idx, PETSC_COPY_VALUES, (*is) + loc_outer));
1583:       if (overlap == 0) {
1584:         PetscCall(PetscObjectReference((PetscObject)(*is)[loc_outer]));

1586:         (*is_local)[loc_outer] = (*is)[loc_outer];
1587:       } else {
1588:         for (loc = 0, ii = ystart; ii < ystart + height; ii++) {
1589:           for (jj = xstart; jj < xstart + width; jj++) idx[loc++] = m * ii + jj;
1590:         }
1591:         PetscCall(ISCreateGeneral(PETSC_COMM_SELF, loc, idx, PETSC_COPY_VALUES, *is_local + loc_outer));
1592:       }
1593:       PetscCall(PetscFree(idx));
1594:       xstart += width;
1595:       loc_outer++;
1596:     }
1597:     ystart += height;
1598:   }
1599:   for (i = 0; i < *Nsub; i++) PetscCall(ISSort((*is)[i]));
1600:   PetscFunctionReturn(PETSC_SUCCESS);
1601: }

1603: /*@C
1604:   PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1605:   only) for the additive Schwarz preconditioner, `PCASM`.

1607:   Not Collective

1609:   Input Parameter:
1610: . pc - the preconditioner context

1612:   Output Parameters:
1613: + n        - if requested, the number of subdomains for this processor (default value = 1)
1614: . is       - if requested, the index sets that define the subdomains for this processor
1615: - is_local - if requested, the index sets that define the local part of the subdomains for this processor (can be `NULL`)

1617:   Level: advanced

1619:   Note:
1620:   The `IS` numbering is in the parallel, global numbering of the vector.

1622: .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1623:           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubmatrices()`
1624: @*/
1625: PetscErrorCode PCASMGetLocalSubdomains(PC pc, PetscInt *n, IS *is[], IS *is_local[])
1626: {
1627:   PC_ASM   *osm = (PC_ASM *)pc->data;
1628:   PetscBool match;

1630:   PetscFunctionBegin;
1632:   if (n) PetscAssertPointer(n, 2);
1633:   if (is) PetscAssertPointer(is, 3);
1634:   if (is_local) PetscAssertPointer(is_local, 4);
1635:   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1636:   PetscCheck(match, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "PC is not a PCASM");
1637:   if (n) *n = osm->n_local_true;
1638:   if (is) *is = osm->is;
1639:   if (is_local) *is_local = osm->is_local;
1640:   PetscFunctionReturn(PETSC_SUCCESS);
1641: }

1643: /*@C
1644:   PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1645:   only) for the additive Schwarz preconditioner, `PCASM`.

1647:   Not Collective

1649:   Input Parameter:
1650: . pc - the preconditioner context

1652:   Output Parameters:
1653: + n   - if requested, the number of matrices for this processor (default value = 1)
1654: - mat - if requested, the matrices

1656:   Level: advanced

1658:   Notes:
1659:   Call after `PCSetUp()` (or `KSPSetUp()`) but before `PCApply()` and before `PCSetUpOnBlocks()`)

1661:   Usually one would use `PCSetModifySubMatrices()` to change the submatrices in building the preconditioner.

1663: .seealso: [](ch_ksp), `PCASM`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`, `PCASMGetSubKSP()`,
1664:           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`, `PCSetModifySubMatrices()`
1665: @*/
1666: PetscErrorCode PCASMGetLocalSubmatrices(PC pc, PetscInt *n, Mat *mat[])
1667: {
1668:   PC_ASM   *osm;
1669:   PetscBool match;

1671:   PetscFunctionBegin;
1673:   if (n) PetscAssertPointer(n, 2);
1674:   if (mat) PetscAssertPointer(mat, 3);
1675:   PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call after KSPSetUp() or PCSetUp().");
1676:   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1677:   if (!match) {
1678:     if (n) *n = 0;
1679:     if (mat) *mat = NULL;
1680:   } else {
1681:     osm = (PC_ASM *)pc->data;
1682:     if (n) *n = osm->n_local_true;
1683:     if (mat) *mat = osm->pmat;
1684:   }
1685:   PetscFunctionReturn(PETSC_SUCCESS);
1686: }

1688: /*@
1689:   PCASMSetDMSubdomains - Indicates whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.

1691:   Logically Collective

1693:   Input Parameters:
1694: + pc  - the preconditioner
1695: - flg - boolean indicating whether to use subdomains defined by the `DM`

1697:   Options Database Key:
1698: . -pc_asm_dm_subdomains <bool> - use subdomains defined by the `DM`

1700:   Level: intermediate

1702:   Note:
1703:   `PCASMSetTotalSubdomains()` and `PCASMSetOverlap()` take precedence over `PCASMSetDMSubdomains()`,
1704:   so setting either of the first two effectively turns the latter off.

1706: .seealso: [](ch_ksp), `PCASM`, `PCASMGetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1707:           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1708: @*/
1709: PetscErrorCode PCASMSetDMSubdomains(PC pc, PetscBool flg)
1710: {
1711:   PC_ASM   *osm = (PC_ASM *)pc->data;
1712:   PetscBool match;

1714:   PetscFunctionBegin;
1717:   PetscCheck(!pc->setupcalled, ((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Not for a setup PC.");
1718:   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1719:   if (match) osm->dm_subdomains = flg;
1720:   PetscFunctionReturn(PETSC_SUCCESS);
1721: }

1723: /*@
1724:   PCASMGetDMSubdomains - Returns flag indicating whether to use `DMCreateDomainDecomposition()` to define the subdomains, whenever possible.

1726:   Not Collective

1728:   Input Parameter:
1729: . pc - the preconditioner

1731:   Output Parameter:
1732: . flg - boolean indicating whether to use subdomains defined by the `DM`

1734:   Level: intermediate

1736: .seealso: [](ch_ksp), `PCASM`, `PCASMSetDMSubdomains()`, `PCASMSetTotalSubdomains()`, `PCASMSetOverlap()`
1737:           `PCASMCreateSubdomains2D()`, `PCASMSetLocalSubdomains()`, `PCASMGetLocalSubdomains()`
1738: @*/
1739: PetscErrorCode PCASMGetDMSubdomains(PC pc, PetscBool *flg)
1740: {
1741:   PC_ASM   *osm = (PC_ASM *)pc->data;
1742:   PetscBool match;

1744:   PetscFunctionBegin;
1746:   PetscAssertPointer(flg, 2);
1747:   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &match));
1748:   if (match) *flg = osm->dm_subdomains;
1749:   else *flg = PETSC_FALSE;
1750:   PetscFunctionReturn(PETSC_SUCCESS);
1751: }

1753: /*@C
1754:   PCASMGetSubMatType - Gets the matrix type used for `PCASM` subsolves, as a string.

1756:   Not Collective

1758:   Input Parameter:
1759: . pc - the `PC`

1761:   Output Parameter:
1762: . sub_mat_type - name of matrix type

1764:   Level: advanced

1766: .seealso: [](ch_ksp), `PCASM`, `PCASMSetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1767: @*/
1768: PetscErrorCode PCASMGetSubMatType(PC pc, MatType *sub_mat_type)
1769: {
1770:   PetscFunctionBegin;
1772:   PetscTryMethod(pc, "PCASMGetSubMatType_C", (PC, MatType *), (pc, sub_mat_type));
1773:   PetscFunctionReturn(PETSC_SUCCESS);
1774: }

1776: /*@C
1777:   PCASMSetSubMatType - Set the type of matrix used for `PCASM` subsolves

1779:   Collective

1781:   Input Parameters:
1782: + pc           - the `PC` object
1783: - sub_mat_type - the `MatType`

1785:   Options Database Key:
1786: . -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl.
1787:    If you specify a base name like aijviennacl, the corresponding sequential type is assumed.

1789:   Note:
1790:   See `MatType` for available types

1792:   Level: advanced

1794: .seealso: [](ch_ksp), `PCASM`, `PCASMGetSubMatType()`, `PCSetType()`, `VecSetType()`, `MatType`, `Mat`
1795: @*/
1796: PetscErrorCode PCASMSetSubMatType(PC pc, MatType sub_mat_type)
1797: {
1798:   PetscFunctionBegin;
1800:   PetscTryMethod(pc, "PCASMSetSubMatType_C", (PC, MatType), (pc, sub_mat_type));
1801:   PetscFunctionReturn(PETSC_SUCCESS);
1802: }