Actual source code: asm.c
petsc-3.7.7 2017-09-25
2: /*
3: This file defines an additive Schwarz preconditioner for any Mat implementation.
5: Note that each processor may have any number of subdomains. But in order to
6: deal easily with the VecScatter(), we treat each processor as if it has the
7: same number of subdomains.
9: n - total number of true subdomains on all processors
10: n_local_true - actual number of subdomains on this processor
11: n_local = maximum over all processors of n_local_true
12: */
13: #include <petsc/private/pcimpl.h> /*I "petscpc.h" I*/
14: #include <petscdm.h>
16: typedef struct {
17: PetscInt n, n_local, n_local_true;
18: PetscInt overlap; /* overlap requested by user */
19: KSP *ksp; /* linear solvers for each block */
20: VecScatter *restriction; /* mapping from global to subregion */
21: VecScatter *localization; /* mapping from overlapping to non-overlapping subregion */
22: VecScatter *prolongation; /* mapping from subregion to global */
23: Vec *x,*y,*y_local; /* work vectors */
24: IS *is; /* index set that defines each overlapping subdomain */
25: IS *is_local; /* index set that defines each non-overlapping subdomain, may be NULL */
26: Mat *mat,*pmat; /* mat is not currently used */
27: PCASMType type; /* use reduced interpolation, restriction or both */
28: PetscBool type_set; /* if user set this value (so won't change it for symmetric problems) */
29: PetscBool same_local_solves; /* flag indicating whether all local solvers are same */
30: PetscBool sort_indices; /* flag to sort subdomain indices */
31: PetscBool dm_subdomains; /* whether DM is allowed to define subdomains */
32: PCCompositeType loctype; /* the type of composition for local solves */
33: /* For multiplicative solve */
34: Mat *lmats; /* submatrices for overlapping multiplicative (process) subdomain */
35: Vec lx, ly; /* work vectors */
36: IS lis; /* index set that defines each overlapping multiplicative (process) subdomain */
37: } PC_ASM;
41: static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
42: {
43: PC_ASM *osm = (PC_ASM*)pc->data;
45: PetscMPIInt rank;
46: PetscInt i,bsz;
47: PetscBool iascii,isstring;
48: PetscViewer sviewer;
51: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
52: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
53: if (iascii) {
54: char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
55: if (osm->overlap >= 0) {PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);}
56: if (osm->n > 0) {PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);}
57: PetscViewerASCIIPrintf(viewer," Additive Schwarz: %s, %s\n",blocks,overlaps);
58: PetscViewerASCIIPrintf(viewer," Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[osm->type]);
59: if (osm->loctype != PC_COMPOSITE_ADDITIVE) {PetscViewerASCIIPrintf(viewer," Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);}
60: MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);
61: if (osm->same_local_solves) {
62: if (osm->ksp) {
63: PetscViewerASCIIPrintf(viewer," Local solve is same for all blocks, in the following KSP and PC objects:\n");
64: PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
65: if (!rank) {
66: PetscViewerASCIIPushTab(viewer);
67: KSPView(osm->ksp[0],sviewer);
68: PetscViewerASCIIPopTab(viewer);
69: }
70: PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
71: }
72: } else {
73: PetscViewerASCIIPushSynchronized(viewer);
74: PetscViewerASCIISynchronizedPrintf(viewer," [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);
75: PetscViewerFlush(viewer);
76: PetscViewerASCIIPrintf(viewer," Local solve info for each block is in the following KSP and PC objects:\n");
77: PetscViewerASCIIPushTab(viewer);
78: PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");
79: PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
80: for (i=0; i<osm->n_local_true; i++) {
81: ISGetLocalSize(osm->is[i],&bsz);
82: PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);
83: KSPView(osm->ksp[i],sviewer);
84: PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");
85: }
86: PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
87: PetscViewerASCIIPopTab(viewer);
88: PetscViewerFlush(viewer);
89: PetscViewerASCIIPopSynchronized(viewer);
90: }
91: } else if (isstring) {
92: PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);
93: PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
94: if (osm->ksp) {KSPView(osm->ksp[0],sviewer);}
95: PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
96: }
97: return(0);
98: }
102: static PetscErrorCode PCASMPrintSubdomains(PC pc)
103: {
104: PC_ASM *osm = (PC_ASM*)pc->data;
105: const char *prefix;
106: char fname[PETSC_MAX_PATH_LEN+1];
107: PetscViewer viewer, sviewer;
108: char *s;
109: PetscInt i,j,nidx;
110: const PetscInt *idx;
111: PetscMPIInt rank, size;
115: MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);
116: MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);
117: PCGetOptionsPrefix(pc,&prefix);
118: PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);
119: if (fname[0] == 0) { PetscStrcpy(fname,"stdout"); };
120: PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);
121: for (i=0; i<osm->n_local; i++) {
122: if (i < osm->n_local_true) {
123: ISGetLocalSize(osm->is[i],&nidx);
124: ISGetIndices(osm->is[i],&idx);
125: /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
126: PetscMalloc1(16*(nidx+1)+512, &s);
127: PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);
128: PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);
129: for (j=0; j<nidx; j++) {
130: PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);
131: }
132: ISRestoreIndices(osm->is[i],&idx);
133: PetscViewerStringSPrintf(sviewer,"\n");
134: PetscViewerDestroy(&sviewer);
135: PetscViewerASCIIPushSynchronized(viewer);
136: PetscViewerASCIISynchronizedPrintf(viewer, s);
137: PetscViewerFlush(viewer);
138: PetscViewerASCIIPopSynchronized(viewer);
139: PetscFree(s);
140: if (osm->is_local) {
141: /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
142: PetscMalloc1(16*(nidx+1)+512, &s);
143: PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);
144: PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);
145: ISGetLocalSize(osm->is_local[i],&nidx);
146: ISGetIndices(osm->is_local[i],&idx);
147: for (j=0; j<nidx; j++) {
148: PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);
149: }
150: ISRestoreIndices(osm->is_local[i],&idx);
151: PetscViewerStringSPrintf(sviewer,"\n");
152: PetscViewerDestroy(&sviewer);
153: PetscViewerASCIIPushSynchronized(viewer);
154: PetscViewerASCIISynchronizedPrintf(viewer, s);
155: PetscViewerFlush(viewer);
156: PetscViewerASCIIPopSynchronized(viewer);
157: PetscFree(s);
158: }
159: } else {
160: /* Participate in collective viewer calls. */
161: PetscViewerASCIIPushSynchronized(viewer);
162: PetscViewerFlush(viewer);
163: PetscViewerASCIIPopSynchronized(viewer);
164: /* Assume either all ranks have is_local or none do. */
165: if (osm->is_local) {
166: PetscViewerASCIIPushSynchronized(viewer);
167: PetscViewerFlush(viewer);
168: PetscViewerASCIIPopSynchronized(viewer);
169: }
170: }
171: }
172: PetscViewerFlush(viewer);
173: PetscViewerDestroy(&viewer);
174: return(0);
175: }
179: static PetscErrorCode PCSetUp_ASM(PC pc)
180: {
181: PC_ASM *osm = (PC_ASM*)pc->data;
183: PetscBool symset,flg;
184: PetscInt i,m,m_local;
185: MatReuse scall = MAT_REUSE_MATRIX;
186: IS isl;
187: KSP ksp;
188: PC subpc;
189: const char *prefix,*pprefix;
190: Vec vec;
191: DM *domain_dm = NULL;
194: if (!pc->setupcalled) {
196: if (!osm->type_set) {
197: MatIsSymmetricKnown(pc->pmat,&symset,&flg);
198: if (symset && flg) osm->type = PC_ASM_BASIC;
199: }
201: /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
202: if (osm->n_local_true == PETSC_DECIDE) {
203: /* no subdomains given */
204: /* try pc->dm first, if allowed */
205: if (osm->dm_subdomains && pc->dm) {
206: PetscInt num_domains, d;
207: char **domain_names;
208: IS *inner_domain_is, *outer_domain_is;
209: DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);
210: if (num_domains) {
211: PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);
212: }
213: for (d = 0; d < num_domains; ++d) {
214: if (domain_names) {PetscFree(domain_names[d]);}
215: if (inner_domain_is) {ISDestroy(&inner_domain_is[d]);}
216: if (outer_domain_is) {ISDestroy(&outer_domain_is[d]);}
217: }
218: PetscFree(domain_names);
219: PetscFree(inner_domain_is);
220: PetscFree(outer_domain_is);
221: }
222: if (osm->n_local_true == PETSC_DECIDE) {
223: /* still no subdomains; use one subdomain per processor */
224: osm->n_local_true = 1;
225: }
226: }
227: { /* determine the global and max number of subdomains */
228: struct {PetscInt max,sum;} inwork,outwork;
229: inwork.max = osm->n_local_true;
230: inwork.sum = osm->n_local_true;
231: MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,PetscMaxSum_Op,PetscObjectComm((PetscObject)pc));
232: osm->n_local = outwork.max;
233: osm->n = outwork.sum;
234: }
235: if (!osm->is) { /* create the index sets */
236: PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);
237: }
238: if (osm->n_local_true > 1 && !osm->is_local) {
239: PetscMalloc1(osm->n_local_true,&osm->is_local);
240: for (i=0; i<osm->n_local_true; i++) {
241: if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
242: ISDuplicate(osm->is[i],&osm->is_local[i]);
243: ISCopy(osm->is[i],osm->is_local[i]);
244: } else {
245: PetscObjectReference((PetscObject)osm->is[i]);
246: osm->is_local[i] = osm->is[i];
247: }
248: }
249: }
250: PCGetOptionsPrefix(pc,&prefix);
251: flg = PETSC_FALSE;
252: PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);
253: if (flg) { PCASMPrintSubdomains(pc); }
255: if (osm->overlap > 0) {
256: /* Extend the "overlapping" regions by a number of steps */
257: MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);
258: }
259: if (osm->sort_indices) {
260: for (i=0; i<osm->n_local_true; i++) {
261: ISSort(osm->is[i]);
262: if (osm->is_local) {
263: ISSort(osm->is_local[i]);
264: }
265: }
266: }
267: /* Create the local work vectors and scatter contexts */
268: MatCreateVecs(pc->pmat,&vec,0);
269: PetscMalloc1(osm->n_local,&osm->restriction);
270: if (osm->is_local) {PetscMalloc1(osm->n_local,&osm->localization);}
271: PetscMalloc1(osm->n_local,&osm->prolongation);
272: PetscMalloc1(osm->n_local,&osm->x);
273: PetscMalloc1(osm->n_local,&osm->y);
274: PetscMalloc1(osm->n_local,&osm->y_local);
275: for (i=0; i<osm->n_local_true; ++i) {
276: ISGetLocalSize(osm->is[i],&m);
277: VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);
278: ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);
279: VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);
280: ISDestroy(&isl);
281: VecDuplicate(osm->x[i],&osm->y[i]);
282: if (osm->is_local) {
283: ISLocalToGlobalMapping ltog;
284: IS isll;
285: const PetscInt *idx_local;
286: PetscInt *idx,nout;
288: ISLocalToGlobalMappingCreateIS(osm->is[i],<og);
289: ISGetLocalSize(osm->is_local[i],&m_local);
290: ISGetIndices(osm->is_local[i], &idx_local);
291: PetscMalloc1(m_local,&idx);
292: ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);
293: ISLocalToGlobalMappingDestroy(<og);
294: if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
295: ISRestoreIndices(osm->is_local[i], &idx_local);
296: ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,PETSC_OWN_POINTER,&isll);
297: ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);
298: VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);
299: VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);
300: ISDestroy(&isll);
302: VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);
303: ISDestroy(&isl);
304: } else {
305: VecGetLocalSize(vec,&m_local);
307: osm->y_local[i] = osm->y[i];
309: PetscObjectReference((PetscObject) osm->y[i]);
311: osm->prolongation[i] = osm->restriction[i];
313: PetscObjectReference((PetscObject) osm->restriction[i]);
314: }
315: }
316: for (i=osm->n_local_true; i<osm->n_local; i++) {
317: VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);
318: VecDuplicate(osm->x[i],&osm->y[i]);
319: VecDuplicate(osm->x[i],&osm->y_local[i]);
320: ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);
321: VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);
322: if (osm->is_local) {
323: VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);
324: VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);
325: } else {
326: osm->prolongation[i] = osm->restriction[i];
327: PetscObjectReference((PetscObject) osm->restriction[i]);
328: }
329: ISDestroy(&isl);
330: }
331: VecDestroy(&vec);
333: if (!osm->ksp) {
334: /* Create the local solvers */
335: PetscMalloc1(osm->n_local_true,&osm->ksp);
336: if (domain_dm) {
337: PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");
338: }
339: for (i=0; i<osm->n_local_true; i++) {
340: KSPCreate(PETSC_COMM_SELF,&ksp);
341: KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);
342: PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);
343: PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);
344: KSPSetType(ksp,KSPPREONLY);
345: KSPGetPC(ksp,&subpc);
346: PCGetOptionsPrefix(pc,&prefix);
347: KSPSetOptionsPrefix(ksp,prefix);
348: KSPAppendOptionsPrefix(ksp,"sub_");
349: if (domain_dm) {
350: KSPSetDM(ksp, domain_dm[i]);
351: KSPSetDMActive(ksp, PETSC_FALSE);
352: DMDestroy(&domain_dm[i]);
353: }
354: osm->ksp[i] = ksp;
355: }
356: if (domain_dm) {
357: PetscFree(domain_dm);
358: }
359: }
360: if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
361: PetscInt m;
363: ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);
364: ISSortRemoveDups(osm->lis);
365: ISGetLocalSize(osm->lis, &m);
366: VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);
367: VecDuplicate(osm->lx, &osm->ly);
368: }
369: scall = MAT_INITIAL_MATRIX;
370: } else {
371: /*
372: Destroy the blocks from the previous iteration
373: */
374: if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
375: MatDestroyMatrices(osm->n_local_true,&osm->pmat);
376: scall = MAT_INITIAL_MATRIX;
377: }
378: }
380: /*
381: Extract out the submatrices
382: */
383: MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);
384: if (scall == MAT_INITIAL_MATRIX) {
385: PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);
386: for (i=0; i<osm->n_local_true; i++) {
387: PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);
388: PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);
389: }
390: }
391: if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
392: IS *cis;
393: PetscInt c;
395: PetscMalloc1(osm->n_local_true, &cis);
396: for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
397: MatGetSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);
398: PetscFree(cis);
399: }
401: /* Return control to the user so that the submatrices can be modified (e.g., to apply
402: different boundary conditions for the submatrices than for the global problem) */
403: PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);
405: /*
406: Loop over subdomains putting them into local ksp
407: */
408: for (i=0; i<osm->n_local_true; i++) {
409: KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);
410: if (!pc->setupcalled) {
411: KSPSetFromOptions(osm->ksp[i]);
412: }
413: }
414: return(0);
415: }
417: #include <petsc/private/kspimpl.h>
420: static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
421: {
422: PC_ASM *osm = (PC_ASM*)pc->data;
424: PetscInt i;
427: for (i=0; i<osm->n_local_true; i++) {
428: KSPSetUp(osm->ksp[i]);
429: if (osm->ksp[i]->reason == KSP_DIVERGED_PCSETUP_FAILED) {
430: pc->failedreason = PC_SUBPC_ERROR;
431: }
432: }
433: return(0);
434: }
438: static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
439: {
440: PC_ASM *osm = (PC_ASM*)pc->data;
442: PetscInt i,n_local = osm->n_local,n_local_true = osm->n_local_true;
443: ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
446: /*
447: Support for limiting the restriction or interpolation to only local
448: subdomain values (leaving the other values 0).
449: */
450: if (!(osm->type & PC_ASM_RESTRICT)) {
451: forward = SCATTER_FORWARD_LOCAL;
452: /* have to zero the work RHS since scatter may leave some slots empty */
453: for (i=0; i<n_local_true; i++) {
454: VecZeroEntries(osm->x[i]);
455: }
456: }
457: if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
459: switch (osm->loctype)
460: {
461: case PC_COMPOSITE_ADDITIVE:
462: for (i=0; i<n_local; i++) {
463: VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);
464: }
465: VecZeroEntries(y);
466: /* do the local solves */
467: for (i=0; i<n_local_true; i++) {
468: VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);
469: KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);
470: if (osm->localization) {
471: VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);
472: VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);
473: }
474: VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);
475: }
476: /* handle the rest of the scatters that do not have local solves */
477: for (i=n_local_true; i<n_local; i++) {
478: VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);
479: VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);
480: }
481: for (i=0; i<n_local; i++) {
482: VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);
483: }
484: break;
485: case PC_COMPOSITE_MULTIPLICATIVE:
486: VecZeroEntries(y);
487: /* do the local solves */
488: for (i = 0; i < n_local_true; ++i) {
489: if (i > 0) {
490: /* Update rhs */
491: VecScatterBegin(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);
492: VecScatterEnd(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);
493: } else {
494: VecZeroEntries(osm->x[i]);
495: }
496: VecScatterBegin(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);
497: VecScatterEnd(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);
498: KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);
499: if (osm->localization) {
500: VecScatterBegin(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);
501: VecScatterEnd(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);
502: }
503: VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);
504: VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);
505: if (i < n_local_true-1) {
506: VecSet(osm->ly, 0.0);
507: VecScatterBegin(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);
508: VecScatterEnd(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);
509: VecScale(osm->ly, -1.0);
510: MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]);
511: VecScatterBegin(osm->restriction[i+1], osm->y[i+1], osm->lx, INSERT_VALUES, reverse);
512: VecScatterEnd(osm->restriction[i+1], osm->y[i+1], osm->lx, INSERT_VALUES, reverse);
513: }
514: }
515: /* handle the rest of the scatters that do not have local solves */
516: for (i = n_local_true; i < n_local; ++i) {
517: VecScatterBegin(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);
518: VecScatterEnd(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);
519: VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);
520: VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);
521: }
522: break;
523: default: SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
524: }
525: return(0);
526: }
530: static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
531: {
532: PC_ASM *osm = (PC_ASM*)pc->data;
534: PetscInt i,n_local = osm->n_local,n_local_true = osm->n_local_true;
535: ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
538: /*
539: Support for limiting the restriction or interpolation to only local
540: subdomain values (leaving the other values 0).
542: Note: these are reversed from the PCApply_ASM() because we are applying the
543: transpose of the three terms
544: */
545: if (!(osm->type & PC_ASM_INTERPOLATE)) {
546: forward = SCATTER_FORWARD_LOCAL;
547: /* have to zero the work RHS since scatter may leave some slots empty */
548: for (i=0; i<n_local_true; i++) {
549: VecZeroEntries(osm->x[i]);
550: }
551: }
552: if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
554: for (i=0; i<n_local; i++) {
555: VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);
556: }
557: VecZeroEntries(y);
558: /* do the local solves */
559: for (i=0; i<n_local_true; i++) {
560: VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);
561: KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);
562: if (osm->localization) {
563: VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);
564: VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);
565: }
566: VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);
567: }
568: /* handle the rest of the scatters that do not have local solves */
569: for (i=n_local_true; i<n_local; i++) {
570: VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);
571: VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);
572: }
573: for (i=0; i<n_local; i++) {
574: VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);
575: }
576: return(0);
577: }
581: static PetscErrorCode PCReset_ASM(PC pc)
582: {
583: PC_ASM *osm = (PC_ASM*)pc->data;
585: PetscInt i;
588: if (osm->ksp) {
589: for (i=0; i<osm->n_local_true; i++) {
590: KSPReset(osm->ksp[i]);
591: }
592: }
593: if (osm->pmat) {
594: if (osm->n_local_true > 0) {
595: MatDestroyMatrices(osm->n_local_true,&osm->pmat);
596: }
597: }
598: if (osm->restriction) {
599: for (i=0; i<osm->n_local; i++) {
600: VecScatterDestroy(&osm->restriction[i]);
601: if (osm->localization) {VecScatterDestroy(&osm->localization[i]);}
602: VecScatterDestroy(&osm->prolongation[i]);
603: VecDestroy(&osm->x[i]);
604: VecDestroy(&osm->y[i]);
605: VecDestroy(&osm->y_local[i]);
606: }
607: PetscFree(osm->restriction);
608: if (osm->localization) {PetscFree(osm->localization);}
609: PetscFree(osm->prolongation);
610: PetscFree(osm->x);
611: PetscFree(osm->y);
612: PetscFree(osm->y_local);
613: }
614: PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);
615: if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
616: ISDestroy(&osm->lis);
617: MatDestroyMatrices(osm->n_local_true, &osm->lmats);
618: VecDestroy(&osm->lx);
619: VecDestroy(&osm->ly);
620: }
622: osm->is = 0;
623: osm->is_local = 0;
624: return(0);
625: }
629: static PetscErrorCode PCDestroy_ASM(PC pc)
630: {
631: PC_ASM *osm = (PC_ASM*)pc->data;
633: PetscInt i;
636: PCReset_ASM(pc);
637: if (osm->ksp) {
638: for (i=0; i<osm->n_local_true; i++) {
639: KSPDestroy(&osm->ksp[i]);
640: }
641: PetscFree(osm->ksp);
642: }
643: PetscFree(pc->data);
644: return(0);
645: }
649: static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
650: {
651: PC_ASM *osm = (PC_ASM*)pc->data;
653: PetscInt blocks,ovl;
654: PetscBool symset,flg;
655: PCASMType asmtype;
656: PCCompositeType loctype;
659: /* set the type to symmetric if matrix is symmetric */
660: if (!osm->type_set && pc->pmat) {
661: MatIsSymmetricKnown(pc->pmat,&symset,&flg);
662: if (symset && flg) osm->type = PC_ASM_BASIC;
663: }
664: PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");
665: PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);
666: PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);
667: if (flg) {
668: PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);
669: osm->dm_subdomains = PETSC_FALSE;
670: }
671: PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);
672: if (flg) {
673: PCASMSetOverlap(pc,ovl);
674: osm->dm_subdomains = PETSC_FALSE;
675: }
676: flg = PETSC_FALSE;
677: PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);
678: if (flg) {PCASMSetType(pc,asmtype); }
679: flg = PETSC_FALSE;
680: PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);
681: if (flg) {PCASMSetLocalType(pc,loctype); }
682: PetscOptionsTail();
683: return(0);
684: }
686: /*------------------------------------------------------------------------------------*/
690: static PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
691: {
692: PC_ASM *osm = (PC_ASM*)pc->data;
694: PetscInt i;
697: if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
698: if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
700: if (!pc->setupcalled) {
701: if (is) {
702: for (i=0; i<n; i++) {PetscObjectReference((PetscObject)is[i]);}
703: }
704: if (is_local) {
705: for (i=0; i<n; i++) {PetscObjectReference((PetscObject)is_local[i]);}
706: }
707: PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);
709: osm->n_local_true = n;
710: osm->is = 0;
711: osm->is_local = 0;
712: if (is) {
713: PetscMalloc1(n,&osm->is);
714: for (i=0; i<n; i++) osm->is[i] = is[i];
715: /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
716: osm->overlap = -1;
717: }
718: if (is_local) {
719: PetscMalloc1(n,&osm->is_local);
720: for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
721: if (!is) {
722: PetscMalloc1(osm->n_local_true,&osm->is);
723: for (i=0; i<osm->n_local_true; i++) {
724: if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
725: ISDuplicate(osm->is_local[i],&osm->is[i]);
726: ISCopy(osm->is_local[i],osm->is[i]);
727: } else {
728: PetscObjectReference((PetscObject)osm->is_local[i]);
729: osm->is[i] = osm->is_local[i];
730: }
731: }
732: }
733: }
734: }
735: return(0);
736: }
740: static PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
741: {
742: PC_ASM *osm = (PC_ASM*)pc->data;
744: PetscMPIInt rank,size;
745: PetscInt n;
748: if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
749: if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
751: /*
752: Split the subdomains equally among all processors
753: */
754: MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);
755: MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
756: n = N/size + ((N % size) > rank);
757: if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
758: if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
759: if (!pc->setupcalled) {
760: PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);
762: osm->n_local_true = n;
763: osm->is = 0;
764: osm->is_local = 0;
765: }
766: return(0);
767: }
771: static PetscErrorCode PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
772: {
773: PC_ASM *osm = (PC_ASM*)pc->data;
776: if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
777: if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
778: if (!pc->setupcalled) osm->overlap = ovl;
779: return(0);
780: }
784: static PetscErrorCode PCASMSetType_ASM(PC pc,PCASMType type)
785: {
786: PC_ASM *osm = (PC_ASM*)pc->data;
789: osm->type = type;
790: osm->type_set = PETSC_TRUE;
791: return(0);
792: }
796: static PetscErrorCode PCASMGetType_ASM(PC pc,PCASMType *type)
797: {
798: PC_ASM *osm = (PC_ASM*)pc->data;
801: *type = osm->type;
802: return(0);
803: }
807: static PetscErrorCode PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
808: {
809: PC_ASM *osm = (PC_ASM *) pc->data;
812: osm->loctype = type;
813: return(0);
814: }
818: static PetscErrorCode PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
819: {
820: PC_ASM *osm = (PC_ASM *) pc->data;
823: *type = osm->loctype;
824: return(0);
825: }
829: static PetscErrorCode PCASMSetSortIndices_ASM(PC pc,PetscBool doSort)
830: {
831: PC_ASM *osm = (PC_ASM*)pc->data;
834: osm->sort_indices = doSort;
835: return(0);
836: }
840: static PetscErrorCode PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
841: {
842: PC_ASM *osm = (PC_ASM*)pc->data;
846: if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
848: if (n_local) *n_local = osm->n_local_true;
849: if (first_local) {
850: MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));
851: *first_local -= osm->n_local_true;
852: }
853: if (ksp) {
854: /* Assume that local solves are now different; not necessarily
855: true though! This flag is used only for PCView_ASM() */
856: *ksp = osm->ksp;
857: osm->same_local_solves = PETSC_FALSE;
858: }
859: return(0);
860: }
864: /*@C
865: PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
867: Collective on PC
869: Input Parameters:
870: + pc - the preconditioner context
871: . n - the number of subdomains for this processor (default value = 1)
872: . is - the index set that defines the subdomains for this processor
873: (or NULL for PETSc to determine subdomains)
874: - is_local - the index sets that define the local part of the subdomains for this processor
875: (or NULL to use the default of 1 subdomain per process)
877: Notes:
878: The IS numbering is in the parallel, global numbering of the vector for both is and is_local
880: By default the ASM preconditioner uses 1 block per processor.
882: Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
884: Level: advanced
886: .keywords: PC, ASM, set, local, subdomains, additive Schwarz
888: .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
889: PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
890: @*/
891: PetscErrorCode PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
892: {
897: PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));
898: return(0);
899: }
903: /*@C
904: PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
905: additive Schwarz preconditioner. Either all or no processors in the
906: PC communicator must call this routine, with the same index sets.
908: Collective on PC
910: Input Parameters:
911: + pc - the preconditioner context
912: . N - the number of subdomains for all processors
913: . is - the index sets that define the subdomains for all processors
914: (or NULL to ask PETSc to compe up with subdomains)
915: - is_local - the index sets that define the local part of the subdomains for this processor
916: (or NULL to use the default of 1 subdomain per process)
918: Options Database Key:
919: To set the total number of subdomain blocks rather than specify the
920: index sets, use the option
921: . -pc_asm_blocks <blks> - Sets total blocks
923: Notes:
924: Currently you cannot use this to set the actual subdomains with the argument is.
926: By default the ASM preconditioner uses 1 block per processor.
928: These index sets cannot be destroyed until after completion of the
929: linear solves for which the ASM preconditioner is being used.
931: Use PCASMSetLocalSubdomains() to set local subdomains.
933: The IS numbering is in the parallel, global numbering of the vector for both is and is_local
935: Level: advanced
937: .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
939: .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
940: PCASMCreateSubdomains2D()
941: @*/
942: PetscErrorCode PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
943: {
948: PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));
949: return(0);
950: }
954: /*@
955: PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
956: additive Schwarz preconditioner. Either all or no processors in the
957: PC communicator must call this routine. If MatIncreaseOverlap is used,
958: use option -mat_increase_overlap when the problem size large.
960: Logically Collective on PC
962: Input Parameters:
963: + pc - the preconditioner context
964: - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
966: Options Database Key:
967: . -pc_asm_overlap <ovl> - Sets overlap
969: Notes:
970: By default the ASM preconditioner uses 1 block per processor. To use
971: multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
972: PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
974: The overlap defaults to 1, so if one desires that no additional
975: overlap be computed beyond what may have been set with a call to
976: PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
977: must be set to be 0. In particular, if one does not explicitly set
978: the subdomains an application code, then all overlap would be computed
979: internally by PETSc, and using an overlap of 0 would result in an ASM
980: variant that is equivalent to the block Jacobi preconditioner.
982: Note that one can define initial index sets with any overlap via
983: PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
984: PCASMSetOverlap() merely allows PETSc to extend that overlap further
985: if desired.
987: Level: intermediate
989: .keywords: PC, ASM, set, overlap
991: .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
992: PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
993: @*/
994: PetscErrorCode PCASMSetOverlap(PC pc,PetscInt ovl)
995: {
1001: PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));
1002: return(0);
1003: }
1007: /*@
1008: PCASMSetType - Sets the type of restriction and interpolation used
1009: for local problems in the additive Schwarz method.
1011: Logically Collective on PC
1013: Input Parameters:
1014: + pc - the preconditioner context
1015: - type - variant of ASM, one of
1016: .vb
1017: PC_ASM_BASIC - full interpolation and restriction
1018: PC_ASM_RESTRICT - full restriction, local processor interpolation
1019: PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1020: PC_ASM_NONE - local processor restriction and interpolation
1021: .ve
1023: Options Database Key:
1024: . -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1026: Level: intermediate
1028: .keywords: PC, ASM, set, type
1030: .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1031: PCASMCreateSubdomains2D()
1032: @*/
1033: PetscErrorCode PCASMSetType(PC pc,PCASMType type)
1034: {
1040: PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));
1041: return(0);
1042: }
1046: /*@
1047: PCASMGetType - Gets the type of restriction and interpolation used
1048: for local problems in the additive Schwarz method.
1050: Logically Collective on PC
1052: Input Parameter:
1053: . pc - the preconditioner context
1055: Output Parameter:
1056: . type - variant of ASM, one of
1058: .vb
1059: PC_ASM_BASIC - full interpolation and restriction
1060: PC_ASM_RESTRICT - full restriction, local processor interpolation
1061: PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1062: PC_ASM_NONE - local processor restriction and interpolation
1063: .ve
1065: Options Database Key:
1066: . -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1068: Level: intermediate
1070: .keywords: PC, ASM, set, type
1072: .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1073: PCASMCreateSubdomains2D()
1074: @*/
1075: PetscErrorCode PCASMGetType(PC pc,PCASMType *type)
1076: {
1081: PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));
1082: return(0);
1083: }
1087: /*@
1088: PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1090: Logically Collective on PC
1092: Input Parameters:
1093: + pc - the preconditioner context
1094: - type - type of composition, one of
1095: .vb
1096: PC_COMPOSITE_ADDITIVE - local additive combination
1097: PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1098: .ve
1100: Options Database Key:
1101: . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1103: Level: intermediate
1105: .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASMCreate()
1106: @*/
1107: PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1108: {
1114: PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));
1115: return(0);
1116: }
1120: /*@
1121: PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1123: Logically Collective on PC
1125: Input Parameter:
1126: . pc - the preconditioner context
1128: Output Parameter:
1129: . type - type of composition, one of
1130: .vb
1131: PC_COMPOSITE_ADDITIVE - local additive combination
1132: PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1133: .ve
1135: Options Database Key:
1136: . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1138: Level: intermediate
1140: .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate()
1141: @*/
1142: PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1143: {
1149: PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));
1150: return(0);
1151: }
1155: /*@
1156: PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1158: Logically Collective on PC
1160: Input Parameters:
1161: + pc - the preconditioner context
1162: - doSort - sort the subdomain indices
1164: Level: intermediate
1166: .keywords: PC, ASM, set, type
1168: .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1169: PCASMCreateSubdomains2D()
1170: @*/
1171: PetscErrorCode PCASMSetSortIndices(PC pc,PetscBool doSort)
1172: {
1178: PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));
1179: return(0);
1180: }
1184: /*@C
1185: PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1186: this processor.
1188: Collective on PC iff first_local is requested
1190: Input Parameter:
1191: . pc - the preconditioner context
1193: Output Parameters:
1194: + n_local - the number of blocks on this processor or NULL
1195: . first_local - the global number of the first block on this processor or NULL,
1196: all processors must request or all must pass NULL
1197: - ksp - the array of KSP contexts
1199: Note:
1200: After PCASMGetSubKSP() the array of KSPes is not to be freed.
1202: Currently for some matrix implementations only 1 block per processor
1203: is supported.
1205: You must call KSPSetUp() before calling PCASMGetSubKSP().
1207: Fortran note:
1208: The output argument 'ksp' must be an array of sufficient length or NULL_OBJECT. The latter can be used to learn the necessary length.
1210: Level: advanced
1212: .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
1214: .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1215: PCASMCreateSubdomains2D(),
1216: @*/
1217: PetscErrorCode PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1218: {
1223: PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));
1224: return(0);
1225: }
1227: /* -------------------------------------------------------------------------------------*/
1228: /*MC
1229: PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1230: its own KSP object.
1232: Options Database Keys:
1233: + -pc_asm_blocks <blks> - Sets total blocks
1234: . -pc_asm_overlap <ovl> - Sets overlap
1235: - -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1237: IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1238: will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1239: -pc_asm_type basic to use the standard ASM.
1241: Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1242: than one processor. Defaults to one block per processor.
1244: To set options on the solvers for each block append -sub_ to all the KSP, and PC
1245: options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1247: To set the options on the solvers separate for each block call PCASMGetSubKSP()
1248: and set the options directly on the resulting KSP object (you can access its PC
1249: with KSPGetPC())
1252: Level: beginner
1254: Concepts: additive Schwarz method
1256: References:
1257: + 1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1258: Courant Institute, New York University Technical report
1259: - 1. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1260: Cambridge University Press.
1262: .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC,
1263: PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
1264: PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType()
1266: M*/
1270: PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1271: {
1273: PC_ASM *osm;
1276: PetscNewLog(pc,&osm);
1278: osm->n = PETSC_DECIDE;
1279: osm->n_local = 0;
1280: osm->n_local_true = PETSC_DECIDE;
1281: osm->overlap = 1;
1282: osm->ksp = 0;
1283: osm->restriction = 0;
1284: osm->localization = 0;
1285: osm->prolongation = 0;
1286: osm->x = 0;
1287: osm->y = 0;
1288: osm->y_local = 0;
1289: osm->is = 0;
1290: osm->is_local = 0;
1291: osm->mat = 0;
1292: osm->pmat = 0;
1293: osm->type = PC_ASM_RESTRICT;
1294: osm->loctype = PC_COMPOSITE_ADDITIVE;
1295: osm->same_local_solves = PETSC_TRUE;
1296: osm->sort_indices = PETSC_TRUE;
1297: osm->dm_subdomains = PETSC_FALSE;
1299: pc->data = (void*)osm;
1300: pc->ops->apply = PCApply_ASM;
1301: pc->ops->applytranspose = PCApplyTranspose_ASM;
1302: pc->ops->setup = PCSetUp_ASM;
1303: pc->ops->reset = PCReset_ASM;
1304: pc->ops->destroy = PCDestroy_ASM;
1305: pc->ops->setfromoptions = PCSetFromOptions_ASM;
1306: pc->ops->setuponblocks = PCSetUpOnBlocks_ASM;
1307: pc->ops->view = PCView_ASM;
1308: pc->ops->applyrichardson = 0;
1310: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);
1311: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);
1312: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);
1313: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);
1314: PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);
1315: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);
1316: PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);
1317: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);
1318: PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);
1319: return(0);
1320: }
1324: /*@C
1325: PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1326: preconditioner for a any problem on a general grid.
1328: Collective
1330: Input Parameters:
1331: + A - The global matrix operator
1332: - n - the number of local blocks
1334: Output Parameters:
1335: . outis - the array of index sets defining the subdomains
1337: Level: advanced
1339: Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1340: from these if you use PCASMSetLocalSubdomains()
1342: In the Fortran version you must provide the array outis[] already allocated of length n.
1344: .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1346: .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1347: @*/
1348: PetscErrorCode PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1349: {
1350: MatPartitioning mpart;
1351: const char *prefix;
1352: PetscErrorCode (*f)(Mat,Mat*);
1353: PetscMPIInt size;
1354: PetscInt i,j,rstart,rend,bs;
1355: PetscBool isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1356: Mat Ad = NULL, adj;
1357: IS ispart,isnumb,*is;
1358: PetscErrorCode ierr;
1363: if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1365: /* Get prefix, row distribution, and block size */
1366: MatGetOptionsPrefix(A,&prefix);
1367: MatGetOwnershipRange(A,&rstart,&rend);
1368: MatGetBlockSize(A,&bs);
1369: if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1371: /* Get diagonal block from matrix if possible */
1372: MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
1373: PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",&f);
1374: if (f) {
1375: MatGetDiagonalBlock(A,&Ad);
1376: } else if (size == 1) {
1377: Ad = A;
1378: }
1379: if (Ad) {
1380: PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);
1381: if (!isbaij) {PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);}
1382: }
1383: if (Ad && n > 1) {
1384: PetscBool match,done;
1385: /* Try to setup a good matrix partitioning if available */
1386: MatPartitioningCreate(PETSC_COMM_SELF,&mpart);
1387: PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);
1388: MatPartitioningSetFromOptions(mpart);
1389: PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);
1390: if (!match) {
1391: PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);
1392: }
1393: if (!match) { /* assume a "good" partitioner is available */
1394: PetscInt na;
1395: const PetscInt *ia,*ja;
1396: MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);
1397: if (done) {
1398: /* Build adjacency matrix by hand. Unfortunately a call to
1399: MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1400: remove the block-aij structure and we cannot expect
1401: MatPartitioning to split vertices as we need */
1402: PetscInt i,j,len,nnz,cnt,*iia=0,*jja=0;
1403: const PetscInt *row;
1404: nnz = 0;
1405: for (i=0; i<na; i++) { /* count number of nonzeros */
1406: len = ia[i+1] - ia[i];
1407: row = ja + ia[i];
1408: for (j=0; j<len; j++) {
1409: if (row[j] == i) { /* don't count diagonal */
1410: len--; break;
1411: }
1412: }
1413: nnz += len;
1414: }
1415: PetscMalloc1(na+1,&iia);
1416: PetscMalloc1(nnz,&jja);
1417: nnz = 0;
1418: iia[0] = 0;
1419: for (i=0; i<na; i++) { /* fill adjacency */
1420: cnt = 0;
1421: len = ia[i+1] - ia[i];
1422: row = ja + ia[i];
1423: for (j=0; j<len; j++) {
1424: if (row[j] != i) { /* if not diagonal */
1425: jja[nnz+cnt++] = row[j];
1426: }
1427: }
1428: nnz += cnt;
1429: iia[i+1] = nnz;
1430: }
1431: /* Partitioning of the adjacency matrix */
1432: MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);
1433: MatPartitioningSetAdjacency(mpart,adj);
1434: MatPartitioningSetNParts(mpart,n);
1435: MatPartitioningApply(mpart,&ispart);
1436: ISPartitioningToNumbering(ispart,&isnumb);
1437: MatDestroy(&adj);
1438: foundpart = PETSC_TRUE;
1439: }
1440: MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);
1441: }
1442: MatPartitioningDestroy(&mpart);
1443: }
1445: PetscMalloc1(n,&is);
1446: *outis = is;
1448: if (!foundpart) {
1450: /* Partitioning by contiguous chunks of rows */
1452: PetscInt mbs = (rend-rstart)/bs;
1453: PetscInt start = rstart;
1454: for (i=0; i<n; i++) {
1455: PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1456: ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);
1457: start += count;
1458: }
1460: } else {
1462: /* Partitioning by adjacency of diagonal block */
1464: const PetscInt *numbering;
1465: PetscInt *count,nidx,*indices,*newidx,start=0;
1466: /* Get node count in each partition */
1467: PetscMalloc1(n,&count);
1468: ISPartitioningCount(ispart,n,count);
1469: if (isbaij && bs > 1) { /* adjust for the block-aij case */
1470: for (i=0; i<n; i++) count[i] *= bs;
1471: }
1472: /* Build indices from node numbering */
1473: ISGetLocalSize(isnumb,&nidx);
1474: PetscMalloc1(nidx,&indices);
1475: for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1476: ISGetIndices(isnumb,&numbering);
1477: PetscSortIntWithPermutation(nidx,numbering,indices);
1478: ISRestoreIndices(isnumb,&numbering);
1479: if (isbaij && bs > 1) { /* adjust for the block-aij case */
1480: PetscMalloc1(nidx*bs,&newidx);
1481: for (i=0; i<nidx; i++) {
1482: for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1483: }
1484: PetscFree(indices);
1485: nidx *= bs;
1486: indices = newidx;
1487: }
1488: /* Shift to get global indices */
1489: for (i=0; i<nidx; i++) indices[i] += rstart;
1491: /* Build the index sets for each block */
1492: for (i=0; i<n; i++) {
1493: ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);
1494: ISSort(is[i]);
1495: start += count[i];
1496: }
1498: PetscFree(count);
1499: PetscFree(indices);
1500: ISDestroy(&isnumb);
1501: ISDestroy(&ispart);
1503: }
1504: return(0);
1505: }
1509: /*@C
1510: PCASMDestroySubdomains - Destroys the index sets created with
1511: PCASMCreateSubdomains(). Should be called after setting subdomains
1512: with PCASMSetLocalSubdomains().
1514: Collective
1516: Input Parameters:
1517: + n - the number of index sets
1518: . is - the array of index sets
1519: - is_local - the array of local index sets, can be NULL
1521: Level: advanced
1523: .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1525: .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1526: @*/
1527: PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1528: {
1529: PetscInt i;
1533: if (n <= 0) return(0);
1534: if (is) {
1536: for (i=0; i<n; i++) { ISDestroy(&is[i]); }
1537: PetscFree(is);
1538: }
1539: if (is_local) {
1541: for (i=0; i<n; i++) { ISDestroy(&is_local[i]); }
1542: PetscFree(is_local);
1543: }
1544: return(0);
1545: }
1549: /*@
1550: PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1551: preconditioner for a two-dimensional problem on a regular grid.
1553: Not Collective
1555: Input Parameters:
1556: + m, n - the number of mesh points in the x and y directions
1557: . M, N - the number of subdomains in the x and y directions
1558: . dof - degrees of freedom per node
1559: - overlap - overlap in mesh lines
1561: Output Parameters:
1562: + Nsub - the number of subdomains created
1563: . is - array of index sets defining overlapping (if overlap > 0) subdomains
1564: - is_local - array of index sets defining non-overlapping subdomains
1566: Note:
1567: Presently PCAMSCreateSubdomains2d() is valid only for sequential
1568: preconditioners. More general related routines are
1569: PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1571: Level: advanced
1573: .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1575: .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1576: PCASMSetOverlap()
1577: @*/
1578: PetscErrorCode PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1579: {
1580: PetscInt i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1582: PetscInt nidx,*idx,loc,ii,jj,count;
1585: if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1587: *Nsub = N*M;
1588: PetscMalloc1(*Nsub,is);
1589: PetscMalloc1(*Nsub,is_local);
1590: ystart = 0;
1591: loc_outer = 0;
1592: for (i=0; i<N; i++) {
1593: height = n/N + ((n % N) > i); /* height of subdomain */
1594: if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1595: yleft = ystart - overlap; if (yleft < 0) yleft = 0;
1596: yright = ystart + height + overlap; if (yright > n) yright = n;
1597: xstart = 0;
1598: for (j=0; j<M; j++) {
1599: width = m/M + ((m % M) > j); /* width of subdomain */
1600: if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1601: xleft = xstart - overlap; if (xleft < 0) xleft = 0;
1602: xright = xstart + width + overlap; if (xright > m) xright = m;
1603: nidx = (xright - xleft)*(yright - yleft);
1604: PetscMalloc1(nidx,&idx);
1605: loc = 0;
1606: for (ii=yleft; ii<yright; ii++) {
1607: count = m*ii + xleft;
1608: for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1609: }
1610: ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);
1611: if (overlap == 0) {
1612: PetscObjectReference((PetscObject)(*is)[loc_outer]);
1614: (*is_local)[loc_outer] = (*is)[loc_outer];
1615: } else {
1616: for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1617: for (jj=xstart; jj<xstart+width; jj++) {
1618: idx[loc++] = m*ii + jj;
1619: }
1620: }
1621: ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);
1622: }
1623: PetscFree(idx);
1624: xstart += width;
1625: loc_outer++;
1626: }
1627: ystart += height;
1628: }
1629: for (i=0; i<*Nsub; i++) { ISSort((*is)[i]); }
1630: return(0);
1631: }
1635: /*@C
1636: PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1637: only) for the additive Schwarz preconditioner.
1639: Not Collective
1641: Input Parameter:
1642: . pc - the preconditioner context
1644: Output Parameters:
1645: + n - the number of subdomains for this processor (default value = 1)
1646: . is - the index sets that define the subdomains for this processor
1647: - is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1650: Notes:
1651: The IS numbering is in the parallel, global numbering of the vector.
1653: Level: advanced
1655: .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1657: .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1658: PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1659: @*/
1660: PetscErrorCode PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1661: {
1662: PC_ASM *osm;
1664: PetscBool match;
1670: PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1671: if (!match) {
1672: if (n) *n = 0;
1673: if (is) *is = NULL;
1674: } else {
1675: osm = (PC_ASM*)pc->data;
1676: if (n) *n = osm->n_local_true;
1677: if (is) *is = osm->is;
1678: if (is_local) *is_local = osm->is_local;
1679: }
1680: return(0);
1681: }
1685: /*@C
1686: PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1687: only) for the additive Schwarz preconditioner.
1689: Not Collective
1691: Input Parameter:
1692: . pc - the preconditioner context
1694: Output Parameters:
1695: + n - the number of matrices for this processor (default value = 1)
1696: - mat - the matrices
1699: Level: advanced
1701: Notes: Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks())
1703: Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner.
1705: .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1707: .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1708: PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices()
1709: @*/
1710: PetscErrorCode PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1711: {
1712: PC_ASM *osm;
1714: PetscBool match;
1720: if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1721: PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1722: if (!match) {
1723: if (n) *n = 0;
1724: if (mat) *mat = NULL;
1725: } else {
1726: osm = (PC_ASM*)pc->data;
1727: if (n) *n = osm->n_local_true;
1728: if (mat) *mat = osm->pmat;
1729: }
1730: return(0);
1731: }
1735: /*@
1736: PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1737: Logically Collective
1739: Input Parameter:
1740: + pc - the preconditioner
1741: - flg - boolean indicating whether to use subdomains defined by the DM
1743: Options Database Key:
1744: . -pc_asm_dm_subdomains
1746: Level: intermediate
1748: Notes:
1749: PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1750: so setting either of the first two effectively turns the latter off.
1752: .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1754: .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1755: PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1756: @*/
1757: PetscErrorCode PCASMSetDMSubdomains(PC pc,PetscBool flg)
1758: {
1759: PC_ASM *osm = (PC_ASM*)pc->data;
1761: PetscBool match;
1766: if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1767: PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1768: if (match) {
1769: osm->dm_subdomains = flg;
1770: }
1771: return(0);
1772: }
1776: /*@
1777: PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1778: Not Collective
1780: Input Parameter:
1781: . pc - the preconditioner
1783: Output Parameter:
1784: . flg - boolean indicating whether to use subdomains defined by the DM
1786: Level: intermediate
1788: .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1790: .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1791: PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1792: @*/
1793: PetscErrorCode PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1794: {
1795: PC_ASM *osm = (PC_ASM*)pc->data;
1797: PetscBool match;
1802: PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1803: if (match) {
1804: if (flg) *flg = osm->dm_subdomains;
1805: }
1806: return(0);
1807: }