Actual source code: asm.c
1: /*
2: This file defines an additive Schwarz preconditioner for any Mat implementation.
4: Note that each processor may have any number of subdomains. But in order to
5: deal easily with the VecScatter(), we treat each processor as if it has the
6: same number of subdomains.
8: n - total number of true subdomains on all processors
9: n_local_true - actual number of subdomains on this processor
10: n_local = maximum over all processors of n_local_true
11: */
13: #include <petsc/private/pcasmimpl.h>
15: static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
16: {
17: PC_ASM *osm = (PC_ASM*)pc->data;
18: PetscMPIInt rank;
19: PetscInt i,bsz;
20: PetscBool iascii,isstring;
21: PetscViewer sviewer;
22: PetscViewerFormat format;
23: const char *prefix;
25: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
26: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
27: if (iascii) {
28: char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
29: if (osm->overlap >= 0) PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);
30: if (osm->n > 0) PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);
31: PetscViewerASCIIPrintf(viewer," %s, %s\n",blocks,overlaps);
32: PetscViewerASCIIPrintf(viewer," restriction/interpolation type - %s\n",PCASMTypes[osm->type]);
33: if (osm->dm_subdomains) PetscViewerASCIIPrintf(viewer," Additive Schwarz: using DM to define subdomains\n");
34: if (osm->loctype != PC_COMPOSITE_ADDITIVE) PetscViewerASCIIPrintf(viewer," Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);
35: MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);
36: PetscViewerGetFormat(viewer,&format);
37: if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) {
38: if (osm->ksp) {
39: PetscViewerASCIIPrintf(viewer," Local solver information for first block is in the following KSP and PC objects on rank 0:\n");
40: PCGetOptionsPrefix(pc,&prefix);
41: PetscViewerASCIIPrintf(viewer," Use -%sksp_view ::ascii_info_detail to display information for all blocks\n",prefix?prefix:"");
42: PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
43: if (rank == 0) {
44: PetscViewerASCIIPushTab(viewer);
45: KSPView(osm->ksp[0],sviewer);
46: PetscViewerASCIIPopTab(viewer);
47: }
48: PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
49: }
50: } else {
51: PetscViewerASCIIPushSynchronized(viewer);
52: PetscViewerASCIISynchronizedPrintf(viewer," [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);
53: PetscViewerFlush(viewer);
54: PetscViewerASCIIPrintf(viewer," Local solver information for each block is in the following KSP and PC objects:\n");
55: PetscViewerASCIIPushTab(viewer);
56: PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");
57: PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
58: for (i=0; i<osm->n_local_true; i++) {
59: ISGetLocalSize(osm->is[i],&bsz);
60: PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);
61: KSPView(osm->ksp[i],sviewer);
62: PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");
63: }
64: PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
65: PetscViewerASCIIPopTab(viewer);
66: PetscViewerFlush(viewer);
67: PetscViewerASCIIPopSynchronized(viewer);
68: }
69: } else if (isstring) {
70: PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);
71: PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
72: if (osm->ksp) KSPView(osm->ksp[0],sviewer);
73: PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
74: }
75: return 0;
76: }
78: static PetscErrorCode PCASMPrintSubdomains(PC pc)
79: {
80: PC_ASM *osm = (PC_ASM*)pc->data;
81: const char *prefix;
82: char fname[PETSC_MAX_PATH_LEN+1];
83: PetscViewer viewer, sviewer;
84: char *s;
85: PetscInt i,j,nidx;
86: const PetscInt *idx;
87: PetscMPIInt rank, size;
89: MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);
90: MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);
91: PCGetOptionsPrefix(pc,&prefix);
92: PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,sizeof(fname),NULL);
93: if (fname[0] == 0) PetscStrcpy(fname,"stdout");
94: PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);
95: for (i=0; i<osm->n_local; i++) {
96: if (i < osm->n_local_true) {
97: ISGetLocalSize(osm->is[i],&nidx);
98: ISGetIndices(osm->is[i],&idx);
99: /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
100: #define len 16*(nidx+1)+512
101: PetscMalloc1(len,&s);
102: PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer);
103: #undef len
104: PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);
105: for (j=0; j<nidx; j++) {
106: PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);
107: }
108: ISRestoreIndices(osm->is[i],&idx);
109: PetscViewerStringSPrintf(sviewer,"\n");
110: PetscViewerDestroy(&sviewer);
111: PetscViewerASCIIPushSynchronized(viewer);
112: PetscViewerASCIISynchronizedPrintf(viewer, s);
113: PetscViewerFlush(viewer);
114: PetscViewerASCIIPopSynchronized(viewer);
115: PetscFree(s);
116: if (osm->is_local) {
117: /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
118: #define len 16*(nidx+1)+512
119: PetscMalloc1(len, &s);
120: PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer);
121: #undef len
122: PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);
123: ISGetLocalSize(osm->is_local[i],&nidx);
124: ISGetIndices(osm->is_local[i],&idx);
125: for (j=0; j<nidx; j++) {
126: PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);
127: }
128: ISRestoreIndices(osm->is_local[i],&idx);
129: PetscViewerStringSPrintf(sviewer,"\n");
130: PetscViewerDestroy(&sviewer);
131: PetscViewerASCIIPushSynchronized(viewer);
132: PetscViewerASCIISynchronizedPrintf(viewer, s);
133: PetscViewerFlush(viewer);
134: PetscViewerASCIIPopSynchronized(viewer);
135: PetscFree(s);
136: }
137: } else {
138: /* Participate in collective viewer calls. */
139: PetscViewerASCIIPushSynchronized(viewer);
140: PetscViewerFlush(viewer);
141: PetscViewerASCIIPopSynchronized(viewer);
142: /* Assume either all ranks have is_local or none do. */
143: if (osm->is_local) {
144: PetscViewerASCIIPushSynchronized(viewer);
145: PetscViewerFlush(viewer);
146: PetscViewerASCIIPopSynchronized(viewer);
147: }
148: }
149: }
150: PetscViewerFlush(viewer);
151: PetscViewerDestroy(&viewer);
152: return 0;
153: }
155: static PetscErrorCode PCSetUp_ASM(PC pc)
156: {
157: PC_ASM *osm = (PC_ASM*)pc->data;
158: PetscBool flg;
159: PetscInt i,m,m_local;
160: MatReuse scall = MAT_REUSE_MATRIX;
161: IS isl;
162: KSP ksp;
163: PC subpc;
164: const char *prefix,*pprefix;
165: Vec vec;
166: DM *domain_dm = NULL;
168: if (!pc->setupcalled) {
169: PetscInt m;
171: /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
172: if (osm->n_local_true == PETSC_DECIDE) {
173: /* no subdomains given */
174: /* try pc->dm first, if allowed */
175: if (osm->dm_subdomains && pc->dm) {
176: PetscInt num_domains, d;
177: char **domain_names;
178: IS *inner_domain_is, *outer_domain_is;
179: DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);
180: osm->overlap = -1; /* We do not want to increase the overlap of the IS.
181: A future improvement of this code might allow one to use
182: DM-defined subdomains and also increase the overlap,
183: but that is not currently supported */
184: if (num_domains) {
185: PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);
186: }
187: for (d = 0; d < num_domains; ++d) {
188: if (domain_names) PetscFree(domain_names[d]);
189: if (inner_domain_is) ISDestroy(&inner_domain_is[d]);
190: if (outer_domain_is) ISDestroy(&outer_domain_is[d]);
191: }
192: PetscFree(domain_names);
193: PetscFree(inner_domain_is);
194: PetscFree(outer_domain_is);
195: }
196: if (osm->n_local_true == PETSC_DECIDE) {
197: /* still no subdomains; use one subdomain per processor */
198: osm->n_local_true = 1;
199: }
200: }
201: { /* determine the global and max number of subdomains */
202: struct {PetscInt max,sum;} inwork,outwork;
203: PetscMPIInt size;
205: inwork.max = osm->n_local_true;
206: inwork.sum = osm->n_local_true;
207: MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc));
208: osm->n_local = outwork.max;
209: osm->n = outwork.sum;
211: MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
212: if (outwork.max == 1 && outwork.sum == size) {
213: /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
214: MatSetOption(pc->pmat,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);
215: }
216: }
217: if (!osm->is) { /* create the index sets */
218: PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);
219: }
220: if (osm->n_local_true > 1 && !osm->is_local) {
221: PetscMalloc1(osm->n_local_true,&osm->is_local);
222: for (i=0; i<osm->n_local_true; i++) {
223: if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
224: ISDuplicate(osm->is[i],&osm->is_local[i]);
225: ISCopy(osm->is[i],osm->is_local[i]);
226: } else {
227: PetscObjectReference((PetscObject)osm->is[i]);
228: osm->is_local[i] = osm->is[i];
229: }
230: }
231: }
232: PCGetOptionsPrefix(pc,&prefix);
233: if (osm->overlap > 0) {
234: /* Extend the "overlapping" regions by a number of steps */
235: MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);
236: }
237: if (osm->sort_indices) {
238: for (i=0; i<osm->n_local_true; i++) {
239: ISSort(osm->is[i]);
240: if (osm->is_local) {
241: ISSort(osm->is_local[i]);
242: }
243: }
244: }
245: flg = PETSC_FALSE;
246: PetscOptionsHasName(NULL,prefix,"-pc_asm_print_subdomains",&flg);
247: if (flg) PCASMPrintSubdomains(pc);
248: if (!osm->ksp) {
249: /* Create the local solvers */
250: PetscMalloc1(osm->n_local_true,&osm->ksp);
251: if (domain_dm) {
252: PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");
253: }
254: for (i=0; i<osm->n_local_true; i++) {
255: KSPCreate(PETSC_COMM_SELF,&ksp);
256: KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);
257: PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);
258: PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);
259: KSPSetType(ksp,KSPPREONLY);
260: KSPGetPC(ksp,&subpc);
261: PCGetOptionsPrefix(pc,&prefix);
262: KSPSetOptionsPrefix(ksp,prefix);
263: KSPAppendOptionsPrefix(ksp,"sub_");
264: if (domain_dm) {
265: KSPSetDM(ksp, domain_dm[i]);
266: KSPSetDMActive(ksp, PETSC_FALSE);
267: DMDestroy(&domain_dm[i]);
268: }
269: osm->ksp[i] = ksp;
270: }
271: if (domain_dm) {
272: PetscFree(domain_dm);
273: }
274: }
276: ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);
277: ISSortRemoveDups(osm->lis);
278: ISGetLocalSize(osm->lis, &m);
280: scall = MAT_INITIAL_MATRIX;
281: } else {
282: /*
283: Destroy the blocks from the previous iteration
284: */
285: if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
286: MatDestroyMatrices(osm->n_local_true,&osm->pmat);
287: scall = MAT_INITIAL_MATRIX;
288: }
289: }
291: /* Destroy previous submatrices of a different type than pc->pmat since MAT_REUSE_MATRIX won't work in that case */
292: if ((scall == MAT_REUSE_MATRIX) && osm->sub_mat_type) {
293: if (osm->n_local_true > 0) {
294: MatDestroySubMatrices(osm->n_local_true,&osm->pmat);
295: }
296: scall = MAT_INITIAL_MATRIX;
297: }
299: /*
300: Extract out the submatrices
301: */
302: MatCreateSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);
303: if (scall == MAT_INITIAL_MATRIX) {
304: PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);
305: for (i=0; i<osm->n_local_true; i++) {
306: PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);
307: PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);
308: }
309: }
311: /* Convert the types of the submatrices (if needbe) */
312: if (osm->sub_mat_type) {
313: for (i=0; i<osm->n_local_true; i++) {
314: MatConvert(osm->pmat[i],osm->sub_mat_type,MAT_INPLACE_MATRIX,&(osm->pmat[i]));
315: }
316: }
318: if (!pc->setupcalled) {
319: VecType vtype;
321: /* Create the local work vectors (from the local matrices) and scatter contexts */
322: MatCreateVecs(pc->pmat,&vec,NULL);
325: if (osm->is_local && osm->type == PC_ASM_RESTRICT && osm->loctype == PC_COMPOSITE_ADDITIVE) {
326: PetscMalloc1(osm->n_local_true,&osm->lprolongation);
327: }
328: PetscMalloc1(osm->n_local_true,&osm->lrestriction);
329: PetscMalloc1(osm->n_local_true,&osm->x);
330: PetscMalloc1(osm->n_local_true,&osm->y);
332: ISGetLocalSize(osm->lis,&m);
333: ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);
334: MatGetVecType(osm->pmat[0],&vtype);
335: VecCreate(PETSC_COMM_SELF,&osm->lx);
336: VecSetSizes(osm->lx,m,m);
337: VecSetType(osm->lx,vtype);
338: VecDuplicate(osm->lx, &osm->ly);
339: VecScatterCreate(vec,osm->lis,osm->lx,isl,&osm->restriction);
340: ISDestroy(&isl);
342: for (i=0; i<osm->n_local_true; ++i) {
343: ISLocalToGlobalMapping ltog;
344: IS isll;
345: const PetscInt *idx_is;
346: PetscInt *idx_lis,nout;
348: ISGetLocalSize(osm->is[i],&m);
349: MatCreateVecs(osm->pmat[i],&osm->x[i],NULL);
350: VecDuplicate(osm->x[i],&osm->y[i]);
352: /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
353: ISLocalToGlobalMappingCreateIS(osm->lis,<og);
354: ISGetLocalSize(osm->is[i],&m);
355: ISGetIndices(osm->is[i], &idx_is);
356: PetscMalloc1(m,&idx_lis);
357: ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m,idx_is,&nout,idx_lis);
359: ISRestoreIndices(osm->is[i], &idx_is);
360: ISCreateGeneral(PETSC_COMM_SELF,m,idx_lis,PETSC_OWN_POINTER,&isll);
361: ISLocalToGlobalMappingDestroy(<og);
362: ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);
363: VecScatterCreate(osm->ly,isll,osm->y[i],isl,&osm->lrestriction[i]);
364: ISDestroy(&isll);
365: ISDestroy(&isl);
366: if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overlapping is_local[i] entries */
367: ISLocalToGlobalMapping ltog;
368: IS isll,isll_local;
369: const PetscInt *idx_local;
370: PetscInt *idx1, *idx2, nout;
372: ISGetLocalSize(osm->is_local[i],&m_local);
373: ISGetIndices(osm->is_local[i], &idx_local);
375: ISLocalToGlobalMappingCreateIS(osm->is[i],<og);
376: PetscMalloc1(m_local,&idx1);
377: ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx1);
378: ISLocalToGlobalMappingDestroy(<og);
380: ISCreateGeneral(PETSC_COMM_SELF,m_local,idx1,PETSC_OWN_POINTER,&isll);
382: ISLocalToGlobalMappingCreateIS(osm->lis,<og);
383: PetscMalloc1(m_local,&idx2);
384: ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx2);
385: ISLocalToGlobalMappingDestroy(<og);
387: ISCreateGeneral(PETSC_COMM_SELF,m_local,idx2,PETSC_OWN_POINTER,&isll_local);
389: ISRestoreIndices(osm->is_local[i], &idx_local);
390: VecScatterCreate(osm->y[i],isll,osm->ly,isll_local,&osm->lprolongation[i]);
392: ISDestroy(&isll);
393: ISDestroy(&isll_local);
394: }
395: }
396: VecDestroy(&vec);
397: }
399: if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
400: IS *cis;
401: PetscInt c;
403: PetscMalloc1(osm->n_local_true, &cis);
404: for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
405: MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);
406: PetscFree(cis);
407: }
409: /* Return control to the user so that the submatrices can be modified (e.g., to apply
410: different boundary conditions for the submatrices than for the global problem) */
411: PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);
413: /*
414: Loop over subdomains putting them into local ksp
415: */
416: KSPGetOptionsPrefix(osm->ksp[0],&prefix);
417: for (i=0; i<osm->n_local_true; i++) {
418: KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);
419: MatSetOptionsPrefix(osm->pmat[i],prefix);
420: if (!pc->setupcalled) {
421: KSPSetFromOptions(osm->ksp[i]);
422: }
423: }
424: return 0;
425: }
427: static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
428: {
429: PC_ASM *osm = (PC_ASM*)pc->data;
430: PetscInt i;
431: KSPConvergedReason reason;
433: for (i=0; i<osm->n_local_true; i++) {
434: KSPSetUp(osm->ksp[i]);
435: KSPGetConvergedReason(osm->ksp[i],&reason);
436: if (reason == KSP_DIVERGED_PC_FAILED) {
437: pc->failedreason = PC_SUBPC_ERROR;
438: }
439: }
440: return 0;
441: }
443: static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
444: {
445: PC_ASM *osm = (PC_ASM*)pc->data;
446: PetscInt i,n_local_true = osm->n_local_true;
447: ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
449: /*
450: support for limiting the restriction or interpolation to only local
451: subdomain values (leaving the other values 0).
452: */
453: if (!(osm->type & PC_ASM_RESTRICT)) {
454: forward = SCATTER_FORWARD_LOCAL;
455: /* have to zero the work RHS since scatter may leave some slots empty */
456: VecSet(osm->lx, 0.0);
457: }
458: if (!(osm->type & PC_ASM_INTERPOLATE)) {
459: reverse = SCATTER_REVERSE_LOCAL;
460: }
462: if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE) {
463: /* zero the global and the local solutions */
464: VecSet(y, 0.0);
465: VecSet(osm->ly, 0.0);
467: /* copy the global RHS to local RHS including the ghost nodes */
468: VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);
469: VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);
471: /* restrict local RHS to the overlapping 0-block RHS */
472: VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);
473: VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);
475: /* do the local solves */
476: for (i = 0; i < n_local_true; ++i) {
478: /* solve the overlapping i-block */
479: PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i],0);
480: KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);
481: KSPCheckSolve(osm->ksp[i], pc, osm->y[i]);
482: PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0);
484: if (osm->lprolongation) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */
485: VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);
486: VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);
487: } else { /* interpolate the overlapping i-block solution to the local solution */
488: VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);
489: VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);
490: }
492: if (i < n_local_true-1) {
493: /* restrict local RHS to the overlapping (i+1)-block RHS */
494: VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);
495: VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);
497: if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
498: /* update the overlapping (i+1)-block RHS using the current local solution */
499: MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]);
500: VecAXPBY(osm->x[i+1],-1.,1., osm->y[i+1]);
501: }
502: }
503: }
504: /* add the local solution to the global solution including the ghost nodes */
505: VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse);
506: VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse);
507: } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
508: return 0;
509: }
511: static PetscErrorCode PCMatApply_ASM(PC pc,Mat X,Mat Y)
512: {
513: PC_ASM *osm = (PC_ASM*)pc->data;
514: Mat Z,W;
515: Vec x;
516: PetscInt i,m,N;
517: ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
520: /*
521: support for limiting the restriction or interpolation to only local
522: subdomain values (leaving the other values 0).
523: */
524: if (!(osm->type & PC_ASM_RESTRICT)) {
525: forward = SCATTER_FORWARD_LOCAL;
526: /* have to zero the work RHS since scatter may leave some slots empty */
527: VecSet(osm->lx, 0.0);
528: }
529: if (!(osm->type & PC_ASM_INTERPOLATE)) {
530: reverse = SCATTER_REVERSE_LOCAL;
531: }
532: VecGetLocalSize(osm->x[0], &m);
533: MatGetSize(X, NULL, &N);
534: MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &Z);
535: if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE) {
536: /* zero the global and the local solutions */
537: MatZeroEntries(Y);
538: VecSet(osm->ly, 0.0);
540: for (i = 0; i < N; ++i) {
541: MatDenseGetColumnVecRead(X, i, &x);
542: /* copy the global RHS to local RHS including the ghost nodes */
543: VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);
544: VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);
545: MatDenseRestoreColumnVecRead(X, i, &x);
547: MatDenseGetColumnVecWrite(Z, i, &x);
548: /* restrict local RHS to the overlapping 0-block RHS */
549: VecScatterBegin(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward);
550: VecScatterEnd(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward);
551: MatDenseRestoreColumnVecWrite(Z, i, &x);
552: }
553: MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &W);
554: /* solve the overlapping 0-block */
555: PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0);
556: KSPMatSolve(osm->ksp[0], Z, W);
557: KSPCheckSolve(osm->ksp[0], pc, NULL);
558: PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[0], Z, W,0);
559: MatDestroy(&Z);
561: for (i = 0; i < N; ++i) {
562: VecSet(osm->ly, 0.0);
563: MatDenseGetColumnVecRead(W, i, &x);
564: if (osm->lprolongation) { /* interpolate the non-overlapping 0-block solution to the local solution (only for restrictive additive) */
565: VecScatterBegin(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward);
566: VecScatterEnd(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward);
567: } else { /* interpolate the overlapping 0-block solution to the local solution */
568: VecScatterBegin(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse);
569: VecScatterEnd(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse);
570: }
571: MatDenseRestoreColumnVecRead(W, i, &x);
573: MatDenseGetColumnVecWrite(Y, i, &x);
574: /* add the local solution to the global solution including the ghost nodes */
575: VecScatterBegin(osm->restriction, osm->ly, x, ADD_VALUES, reverse);
576: VecScatterEnd(osm->restriction, osm->ly, x, ADD_VALUES, reverse);
577: MatDenseRestoreColumnVecWrite(Y, i, &x);
578: }
579: MatDestroy(&W);
580: } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
581: return 0;
582: }
584: static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
585: {
586: PC_ASM *osm = (PC_ASM*)pc->data;
587: PetscInt i,n_local_true = osm->n_local_true;
588: ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
590: /*
591: Support for limiting the restriction or interpolation to only local
592: subdomain values (leaving the other values 0).
594: Note: these are reversed from the PCApply_ASM() because we are applying the
595: transpose of the three terms
596: */
598: if (!(osm->type & PC_ASM_INTERPOLATE)) {
599: forward = SCATTER_FORWARD_LOCAL;
600: /* have to zero the work RHS since scatter may leave some slots empty */
601: VecSet(osm->lx, 0.0);
602: }
603: if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
605: /* zero the global and the local solutions */
606: VecSet(y, 0.0);
607: VecSet(osm->ly, 0.0);
609: /* Copy the global RHS to local RHS including the ghost nodes */
610: VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);
611: VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);
613: /* Restrict local RHS to the overlapping 0-block RHS */
614: VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);
615: VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);
617: /* do the local solves */
618: for (i = 0; i < n_local_true; ++i) {
620: /* solve the overlapping i-block */
621: PetscLogEventBegin(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);
622: KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]);
623: KSPCheckSolve(osm->ksp[i],pc,osm->y[i]);
624: PetscLogEventEnd(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);
626: if (osm->lprolongation) { /* interpolate the non-overlapping i-block solution to the local solution */
627: VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);
628: VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);
629: } else { /* interpolate the overlapping i-block solution to the local solution */
630: VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);
631: VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);
632: }
634: if (i < n_local_true-1) {
635: /* Restrict local RHS to the overlapping (i+1)-block RHS */
636: VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);
637: VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);
638: }
639: }
640: /* Add the local solution to the global solution including the ghost nodes */
641: VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse);
642: VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse);
644: return 0;
646: }
648: static PetscErrorCode PCReset_ASM(PC pc)
649: {
650: PC_ASM *osm = (PC_ASM*)pc->data;
651: PetscInt i;
653: if (osm->ksp) {
654: for (i=0; i<osm->n_local_true; i++) {
655: KSPReset(osm->ksp[i]);
656: }
657: }
658: if (osm->pmat) {
659: if (osm->n_local_true > 0) {
660: MatDestroySubMatrices(osm->n_local_true,&osm->pmat);
661: }
662: }
663: if (osm->lrestriction) {
664: VecScatterDestroy(&osm->restriction);
665: for (i=0; i<osm->n_local_true; i++) {
666: VecScatterDestroy(&osm->lrestriction[i]);
667: if (osm->lprolongation) VecScatterDestroy(&osm->lprolongation[i]);
668: VecDestroy(&osm->x[i]);
669: VecDestroy(&osm->y[i]);
670: }
671: PetscFree(osm->lrestriction);
672: if (osm->lprolongation) PetscFree(osm->lprolongation);
673: PetscFree(osm->x);
674: PetscFree(osm->y);
676: }
677: PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);
678: ISDestroy(&osm->lis);
679: VecDestroy(&osm->lx);
680: VecDestroy(&osm->ly);
681: if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
682: MatDestroyMatrices(osm->n_local_true, &osm->lmats);
683: }
685: PetscFree(osm->sub_mat_type);
687: osm->is = NULL;
688: osm->is_local = NULL;
689: return 0;
690: }
692: static PetscErrorCode PCDestroy_ASM(PC pc)
693: {
694: PC_ASM *osm = (PC_ASM*)pc->data;
695: PetscInt i;
697: PCReset_ASM(pc);
698: if (osm->ksp) {
699: for (i=0; i<osm->n_local_true; i++) {
700: KSPDestroy(&osm->ksp[i]);
701: }
702: PetscFree(osm->ksp);
703: }
704: PetscFree(pc->data);
706: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",NULL);
707: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",NULL);
708: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",NULL);
709: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",NULL);
710: PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",NULL);
711: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",NULL);
712: PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",NULL);
713: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",NULL);
714: PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",NULL);
715: PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",NULL);
716: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",NULL);
717: return 0;
718: }
720: static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
721: {
722: PC_ASM *osm = (PC_ASM*)pc->data;
723: PetscInt blocks,ovl;
724: PetscBool flg;
725: PCASMType asmtype;
726: PCCompositeType loctype;
727: char sub_mat_type[256];
729: PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");
730: PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);
731: PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);
732: if (flg) {
733: PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);
734: osm->dm_subdomains = PETSC_FALSE;
735: }
736: PetscOptionsInt("-pc_asm_local_blocks","Number of local subdomains","PCASMSetLocalSubdomains",osm->n_local_true,&blocks,&flg);
737: if (flg) {
738: PCASMSetLocalSubdomains(pc,blocks,NULL,NULL);
739: osm->dm_subdomains = PETSC_FALSE;
740: }
741: PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);
742: if (flg) {
743: PCASMSetOverlap(pc,ovl);
744: osm->dm_subdomains = PETSC_FALSE;
745: }
746: flg = PETSC_FALSE;
747: PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);
748: if (flg) PCASMSetType(pc,asmtype);
749: flg = PETSC_FALSE;
750: PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);
751: if (flg) PCASMSetLocalType(pc,loctype);
752: PetscOptionsFList("-pc_asm_sub_mat_type","Subsolve Matrix Type","PCASMSetSubMatType",MatList,NULL,sub_mat_type,256,&flg);
753: if (flg) {
754: PCASMSetSubMatType(pc,sub_mat_type);
755: }
756: PetscOptionsTail();
757: return 0;
758: }
760: /*------------------------------------------------------------------------------------*/
762: static PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
763: {
764: PC_ASM *osm = (PC_ASM*)pc->data;
765: PetscInt i;
770: if (!pc->setupcalled) {
771: if (is) {
772: for (i=0; i<n; i++) PetscObjectReference((PetscObject)is[i]);
773: }
774: if (is_local) {
775: for (i=0; i<n; i++) PetscObjectReference((PetscObject)is_local[i]);
776: }
777: PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);
779: osm->n_local_true = n;
780: osm->is = NULL;
781: osm->is_local = NULL;
782: if (is) {
783: PetscMalloc1(n,&osm->is);
784: for (i=0; i<n; i++) osm->is[i] = is[i];
785: /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
786: osm->overlap = -1;
787: }
788: if (is_local) {
789: PetscMalloc1(n,&osm->is_local);
790: for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
791: if (!is) {
792: PetscMalloc1(osm->n_local_true,&osm->is);
793: for (i=0; i<osm->n_local_true; i++) {
794: if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
795: ISDuplicate(osm->is_local[i],&osm->is[i]);
796: ISCopy(osm->is_local[i],osm->is[i]);
797: } else {
798: PetscObjectReference((PetscObject)osm->is_local[i]);
799: osm->is[i] = osm->is_local[i];
800: }
801: }
802: }
803: }
804: }
805: return 0;
806: }
808: static PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
809: {
810: PC_ASM *osm = (PC_ASM*)pc->data;
811: PetscMPIInt rank,size;
812: PetscInt n;
817: /*
818: Split the subdomains equally among all processors
819: */
820: MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);
821: MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
822: n = N/size + ((N % size) > rank);
825: if (!pc->setupcalled) {
826: PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);
828: osm->n_local_true = n;
829: osm->is = NULL;
830: osm->is_local = NULL;
831: }
832: return 0;
833: }
835: static PetscErrorCode PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
836: {
837: PC_ASM *osm = (PC_ASM*)pc->data;
841: if (!pc->setupcalled) osm->overlap = ovl;
842: return 0;
843: }
845: static PetscErrorCode PCASMSetType_ASM(PC pc,PCASMType type)
846: {
847: PC_ASM *osm = (PC_ASM*)pc->data;
849: osm->type = type;
850: osm->type_set = PETSC_TRUE;
851: return 0;
852: }
854: static PetscErrorCode PCASMGetType_ASM(PC pc,PCASMType *type)
855: {
856: PC_ASM *osm = (PC_ASM*)pc->data;
858: *type = osm->type;
859: return 0;
860: }
862: static PetscErrorCode PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
863: {
864: PC_ASM *osm = (PC_ASM *) pc->data;
867: osm->loctype = type;
868: return 0;
869: }
871: static PetscErrorCode PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
872: {
873: PC_ASM *osm = (PC_ASM *) pc->data;
875: *type = osm->loctype;
876: return 0;
877: }
879: static PetscErrorCode PCASMSetSortIndices_ASM(PC pc,PetscBool doSort)
880: {
881: PC_ASM *osm = (PC_ASM*)pc->data;
883: osm->sort_indices = doSort;
884: return 0;
885: }
887: static PetscErrorCode PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
888: {
889: PC_ASM *osm = (PC_ASM*)pc->data;
893: if (n_local) *n_local = osm->n_local_true;
894: if (first_local) {
895: MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));
896: *first_local -= osm->n_local_true;
897: }
898: if (ksp) *ksp = osm->ksp;
899: return 0;
900: }
902: static PetscErrorCode PCASMGetSubMatType_ASM(PC pc,MatType *sub_mat_type)
903: {
904: PC_ASM *osm = (PC_ASM*)pc->data;
908: *sub_mat_type = osm->sub_mat_type;
909: return 0;
910: }
912: static PetscErrorCode PCASMSetSubMatType_ASM(PC pc,MatType sub_mat_type)
913: {
914: PC_ASM *osm = (PC_ASM*)pc->data;
917: PetscFree(osm->sub_mat_type);
918: PetscStrallocpy(sub_mat_type,(char**)&osm->sub_mat_type);
919: return 0;
920: }
922: /*@C
923: PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
925: Collective on pc
927: Input Parameters:
928: + pc - the preconditioner context
929: . n - the number of subdomains for this processor (default value = 1)
930: . is - the index set that defines the subdomains for this processor
931: (or NULL for PETSc to determine subdomains)
932: - is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
933: (or NULL to not provide these)
935: Options Database Key:
936: To set the total number of subdomain blocks rather than specify the
937: index sets, use the option
938: . -pc_asm_local_blocks <blks> - Sets local blocks
940: Notes:
941: The IS numbering is in the parallel, global numbering of the vector for both is and is_local
943: By default the ASM preconditioner uses 1 block per processor.
945: Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
947: If is_local is provided and PCASMType is PC_ASM_RESTRICT then the solution only over the is_local region is interpolated
948: back to form the global solution (this is the standard restricted additive Schwarz method)
950: If the is_local is provided and PCASMType is PC_ASM_INTERPOLATE or PC_ASM_NONE then an error is generated since there is
951: no code to handle that case.
953: Level: advanced
955: .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
956: PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), PCASMType, PCASMSetType()
957: @*/
958: PetscErrorCode PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
959: {
961: PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));
962: return 0;
963: }
965: /*@C
966: PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
967: additive Schwarz preconditioner. Either all or no processors in the
968: PC communicator must call this routine, with the same index sets.
970: Collective on pc
972: Input Parameters:
973: + pc - the preconditioner context
974: . N - the number of subdomains for all processors
975: . is - the index sets that define the subdomains for all processors
976: (or NULL to ask PETSc to determine the subdomains)
977: - is_local - the index sets that define the local part of the subdomains for this processor
978: (or NULL to not provide this information)
980: Options Database Key:
981: To set the total number of subdomain blocks rather than specify the
982: index sets, use the option
983: . -pc_asm_blocks <blks> - Sets total blocks
985: Notes:
986: Currently you cannot use this to set the actual subdomains with the argument is or is_local.
988: By default the ASM preconditioner uses 1 block per processor.
990: These index sets cannot be destroyed until after completion of the
991: linear solves for which the ASM preconditioner is being used.
993: Use PCASMSetLocalSubdomains() to set local subdomains.
995: The IS numbering is in the parallel, global numbering of the vector for both is and is_local
997: Level: advanced
999: .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1000: PCASMCreateSubdomains2D()
1001: @*/
1002: PetscErrorCode PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
1003: {
1005: PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));
1006: return 0;
1007: }
1009: /*@
1010: PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
1011: additive Schwarz preconditioner. Either all or no processors in the
1012: PC communicator must call this routine.
1014: Logically Collective on pc
1016: Input Parameters:
1017: + pc - the preconditioner context
1018: - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
1020: Options Database Key:
1021: . -pc_asm_overlap <ovl> - Sets overlap
1023: Notes:
1024: By default the ASM preconditioner uses 1 block per processor. To use
1025: multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
1026: PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
1028: The overlap defaults to 1, so if one desires that no additional
1029: overlap be computed beyond what may have been set with a call to
1030: PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
1031: must be set to be 0. In particular, if one does not explicitly set
1032: the subdomains an application code, then all overlap would be computed
1033: internally by PETSc, and using an overlap of 0 would result in an ASM
1034: variant that is equivalent to the block Jacobi preconditioner.
1036: The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1037: use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1039: Note that one can define initial index sets with any overlap via
1040: PCASMSetLocalSubdomains(); the routine
1041: PCASMSetOverlap() merely allows PETSc to extend that overlap further
1042: if desired.
1044: Level: intermediate
1046: .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1047: PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), MatIncreaseOverlap()
1048: @*/
1049: PetscErrorCode PCASMSetOverlap(PC pc,PetscInt ovl)
1050: {
1053: PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));
1054: return 0;
1055: }
1057: /*@
1058: PCASMSetType - Sets the type of restriction and interpolation used
1059: for local problems in the additive Schwarz method.
1061: Logically Collective on pc
1063: Input Parameters:
1064: + pc - the preconditioner context
1065: - type - variant of ASM, one of
1066: .vb
1067: PC_ASM_BASIC - full interpolation and restriction
1068: PC_ASM_RESTRICT - full restriction, local processor interpolation
1069: PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1070: PC_ASM_NONE - local processor restriction and interpolation
1071: .ve
1073: Options Database Key:
1074: . -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1076: Notes:
1077: if the is_local arguments are passed to PCASMSetLocalSubdomains() then they are used when PC_ASM_RESTRICT has been selected
1078: to limit the local processor interpolation
1080: Level: intermediate
1082: .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1083: PCASMCreateSubdomains2D(), PCASMType, PCASMSetLocalType(), PCASMGetLocalType()
1084: @*/
1085: PetscErrorCode PCASMSetType(PC pc,PCASMType type)
1086: {
1089: PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));
1090: return 0;
1091: }
1093: /*@
1094: PCASMGetType - Gets the type of restriction and interpolation used
1095: for local problems in the additive Schwarz method.
1097: Logically Collective on pc
1099: Input Parameter:
1100: . pc - the preconditioner context
1102: Output Parameter:
1103: . type - variant of ASM, one of
1105: .vb
1106: PC_ASM_BASIC - full interpolation and restriction
1107: PC_ASM_RESTRICT - full restriction, local processor interpolation
1108: PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1109: PC_ASM_NONE - local processor restriction and interpolation
1110: .ve
1112: Options Database Key:
1113: . -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1115: Level: intermediate
1117: .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1118: PCASMCreateSubdomains2D(), PCASMType, PCASMSetType(), PCASMSetLocalType(), PCASMGetLocalType()
1119: @*/
1120: PetscErrorCode PCASMGetType(PC pc,PCASMType *type)
1121: {
1123: PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));
1124: return 0;
1125: }
1127: /*@
1128: PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1130: Logically Collective on pc
1132: Input Parameters:
1133: + pc - the preconditioner context
1134: - type - type of composition, one of
1135: .vb
1136: PC_COMPOSITE_ADDITIVE - local additive combination
1137: PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1138: .ve
1140: Options Database Key:
1141: . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1143: Level: intermediate
1145: .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASM, PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1146: @*/
1147: PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1148: {
1151: PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));
1152: return 0;
1153: }
1155: /*@
1156: PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1158: Logically Collective on pc
1160: Input Parameter:
1161: . pc - the preconditioner context
1163: Output Parameter:
1164: . type - type of composition, one of
1165: .vb
1166: PC_COMPOSITE_ADDITIVE - local additive combination
1167: PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1168: .ve
1170: Options Database Key:
1171: . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1173: Level: intermediate
1175: .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate(), PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1176: @*/
1177: PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1178: {
1181: PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));
1182: return 0;
1183: }
1185: /*@
1186: PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1188: Logically Collective on pc
1190: Input Parameters:
1191: + pc - the preconditioner context
1192: - doSort - sort the subdomain indices
1194: Level: intermediate
1196: .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1197: PCASMCreateSubdomains2D()
1198: @*/
1199: PetscErrorCode PCASMSetSortIndices(PC pc,PetscBool doSort)
1200: {
1203: PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));
1204: return 0;
1205: }
1207: /*@C
1208: PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1209: this processor.
1211: Collective on pc iff first_local is requested
1213: Input Parameter:
1214: . pc - the preconditioner context
1216: Output Parameters:
1217: + n_local - the number of blocks on this processor or NULL
1218: . first_local - the global number of the first block on this processor or NULL,
1219: all processors must request or all must pass NULL
1220: - ksp - the array of KSP contexts
1222: Note:
1223: After PCASMGetSubKSP() the array of KSPes is not to be freed.
1225: You must call KSPSetUp() before calling PCASMGetSubKSP().
1227: Fortran note:
1228: The output argument 'ksp' must be an array of sufficient length or PETSC_NULL_KSP. The latter can be used to learn the necessary length.
1230: Level: advanced
1232: .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1233: PCASMCreateSubdomains2D(),
1234: @*/
1235: PetscErrorCode PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1236: {
1238: PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));
1239: return 0;
1240: }
1242: /* -------------------------------------------------------------------------------------*/
1243: /*MC
1244: PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1245: its own KSP object.
1247: Options Database Keys:
1248: + -pc_asm_blocks <blks> - Sets total blocks
1249: . -pc_asm_overlap <ovl> - Sets overlap
1250: . -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type, default is restrict
1251: - -pc_asm_local_type [additive, multiplicative] - Sets ASM type, default is additive
1253: IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1254: will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1255: -pc_asm_type basic to use the standard ASM.
1257: Notes:
1258: Each processor can have one or more blocks, but a block cannot be shared by more
1259: than one processor. Use PCGASM for subdomains shared by multiple processes. Defaults to one block per processor.
1261: To set options on the solvers for each block append -sub_ to all the KSP, and PC
1262: options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1264: To set the options on the solvers separate for each block call PCASMGetSubKSP()
1265: and set the options directly on the resulting KSP object (you can access its PC
1266: with KSPGetPC())
1268: Level: beginner
1270: References:
1271: + * - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1272: Courant Institute, New York University Technical report
1273: - * - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1274: Cambridge University Press.
1276: .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC,
1277: PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(), PCASMType, PCASMGetType(), PCASMSetLocalType(), PCASMGetLocalType()
1278: PCASMSetTotalSubdomains(), PCSetModifySubMatrices(), PCASMSetOverlap(), PCASMSetType(), PCCompositeType
1280: M*/
1282: PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1283: {
1284: PC_ASM *osm;
1286: PetscNewLog(pc,&osm);
1288: osm->n = PETSC_DECIDE;
1289: osm->n_local = 0;
1290: osm->n_local_true = PETSC_DECIDE;
1291: osm->overlap = 1;
1292: osm->ksp = NULL;
1293: osm->restriction = NULL;
1294: osm->lprolongation = NULL;
1295: osm->lrestriction = NULL;
1296: osm->x = NULL;
1297: osm->y = NULL;
1298: osm->is = NULL;
1299: osm->is_local = NULL;
1300: osm->mat = NULL;
1301: osm->pmat = NULL;
1302: osm->type = PC_ASM_RESTRICT;
1303: osm->loctype = PC_COMPOSITE_ADDITIVE;
1304: osm->sort_indices = PETSC_TRUE;
1305: osm->dm_subdomains = PETSC_FALSE;
1306: osm->sub_mat_type = NULL;
1308: pc->data = (void*)osm;
1309: pc->ops->apply = PCApply_ASM;
1310: pc->ops->matapply = PCMatApply_ASM;
1311: pc->ops->applytranspose = PCApplyTranspose_ASM;
1312: pc->ops->setup = PCSetUp_ASM;
1313: pc->ops->reset = PCReset_ASM;
1314: pc->ops->destroy = PCDestroy_ASM;
1315: pc->ops->setfromoptions = PCSetFromOptions_ASM;
1316: pc->ops->setuponblocks = PCSetUpOnBlocks_ASM;
1317: pc->ops->view = PCView_ASM;
1318: pc->ops->applyrichardson = NULL;
1320: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);
1321: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);
1322: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);
1323: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);
1324: PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);
1325: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);
1326: PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);
1327: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);
1328: PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);
1329: PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",PCASMGetSubMatType_ASM);
1330: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",PCASMSetSubMatType_ASM);
1331: return 0;
1332: }
1334: /*@C
1335: PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1336: preconditioner for any problem on a general grid.
1338: Collective
1340: Input Parameters:
1341: + A - The global matrix operator
1342: - n - the number of local blocks
1344: Output Parameters:
1345: . outis - the array of index sets defining the subdomains
1347: Level: advanced
1349: Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1350: from these if you use PCASMSetLocalSubdomains()
1352: In the Fortran version you must provide the array outis[] already allocated of length n.
1354: .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1355: @*/
1356: PetscErrorCode PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1357: {
1358: MatPartitioning mpart;
1359: const char *prefix;
1360: PetscInt i,j,rstart,rend,bs;
1361: PetscBool hasop, isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1362: Mat Ad = NULL, adj;
1363: IS ispart,isnumb,*is;
1369: /* Get prefix, row distribution, and block size */
1370: MatGetOptionsPrefix(A,&prefix);
1371: MatGetOwnershipRange(A,&rstart,&rend);
1372: MatGetBlockSize(A,&bs);
1375: /* Get diagonal block from matrix if possible */
1376: MatHasOperation(A,MATOP_GET_DIAGONAL_BLOCK,&hasop);
1377: if (hasop) {
1378: MatGetDiagonalBlock(A,&Ad);
1379: }
1380: if (Ad) {
1381: PetscObjectBaseTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);
1382: if (!isbaij) PetscObjectBaseTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);
1383: }
1384: if (Ad && n > 1) {
1385: PetscBool match,done;
1386: /* Try to setup a good matrix partitioning if available */
1387: MatPartitioningCreate(PETSC_COMM_SELF,&mpart);
1388: PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);
1389: MatPartitioningSetFromOptions(mpart);
1390: PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);
1391: if (!match) {
1392: PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);
1393: }
1394: if (!match) { /* assume a "good" partitioner is available */
1395: PetscInt na;
1396: const PetscInt *ia,*ja;
1397: MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);
1398: if (done) {
1399: /* Build adjacency matrix by hand. Unfortunately a call to
1400: MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1401: remove the block-aij structure and we cannot expect
1402: MatPartitioning to split vertices as we need */
1403: PetscInt i,j,len,nnz,cnt,*iia=NULL,*jja=NULL;
1404: const PetscInt *row;
1405: nnz = 0;
1406: for (i=0; i<na; i++) { /* count number of nonzeros */
1407: len = ia[i+1] - ia[i];
1408: row = ja + ia[i];
1409: for (j=0; j<len; j++) {
1410: if (row[j] == i) { /* don't count diagonal */
1411: len--; break;
1412: }
1413: }
1414: nnz += len;
1415: }
1416: PetscMalloc1(na+1,&iia);
1417: PetscMalloc1(nnz,&jja);
1418: nnz = 0;
1419: iia[0] = 0;
1420: for (i=0; i<na; i++) { /* fill adjacency */
1421: cnt = 0;
1422: len = ia[i+1] - ia[i];
1423: row = ja + ia[i];
1424: for (j=0; j<len; j++) {
1425: if (row[j] != i) { /* if not diagonal */
1426: jja[nnz+cnt++] = row[j];
1427: }
1428: }
1429: nnz += cnt;
1430: iia[i+1] = nnz;
1431: }
1432: /* Partitioning of the adjacency matrix */
1433: MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);
1434: MatPartitioningSetAdjacency(mpart,adj);
1435: MatPartitioningSetNParts(mpart,n);
1436: MatPartitioningApply(mpart,&ispart);
1437: ISPartitioningToNumbering(ispart,&isnumb);
1438: MatDestroy(&adj);
1439: foundpart = PETSC_TRUE;
1440: }
1441: MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);
1442: }
1443: MatPartitioningDestroy(&mpart);
1444: }
1446: PetscMalloc1(n,&is);
1447: *outis = is;
1449: if (!foundpart) {
1451: /* Partitioning by contiguous chunks of rows */
1453: PetscInt mbs = (rend-rstart)/bs;
1454: PetscInt start = rstart;
1455: for (i=0; i<n; i++) {
1456: PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1457: ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);
1458: start += count;
1459: }
1461: } else {
1463: /* Partitioning by adjacency of diagonal block */
1465: const PetscInt *numbering;
1466: PetscInt *count,nidx,*indices,*newidx,start=0;
1467: /* Get node count in each partition */
1468: PetscMalloc1(n,&count);
1469: ISPartitioningCount(ispart,n,count);
1470: if (isbaij && bs > 1) { /* adjust for the block-aij case */
1471: for (i=0; i<n; i++) count[i] *= bs;
1472: }
1473: /* Build indices from node numbering */
1474: ISGetLocalSize(isnumb,&nidx);
1475: PetscMalloc1(nidx,&indices);
1476: for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1477: ISGetIndices(isnumb,&numbering);
1478: PetscSortIntWithPermutation(nidx,numbering,indices);
1479: ISRestoreIndices(isnumb,&numbering);
1480: if (isbaij && bs > 1) { /* adjust for the block-aij case */
1481: PetscMalloc1(nidx*bs,&newidx);
1482: for (i=0; i<nidx; i++) {
1483: for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1484: }
1485: PetscFree(indices);
1486: nidx *= bs;
1487: indices = newidx;
1488: }
1489: /* Shift to get global indices */
1490: for (i=0; i<nidx; i++) indices[i] += rstart;
1492: /* Build the index sets for each block */
1493: for (i=0; i<n; i++) {
1494: ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);
1495: ISSort(is[i]);
1496: start += count[i];
1497: }
1499: PetscFree(count);
1500: PetscFree(indices);
1501: ISDestroy(&isnumb);
1502: ISDestroy(&ispart);
1504: }
1505: return 0;
1506: }
1508: /*@C
1509: PCASMDestroySubdomains - Destroys the index sets created with
1510: PCASMCreateSubdomains(). Should be called after setting subdomains
1511: with PCASMSetLocalSubdomains().
1513: Collective
1515: Input Parameters:
1516: + n - the number of index sets
1517: . is - the array of index sets
1518: - is_local - the array of local index sets, can be NULL
1520: Level: advanced
1522: .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1523: @*/
1524: PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1525: {
1526: PetscInt i;
1528: if (n <= 0) return 0;
1529: if (is) {
1531: for (i=0; i<n; i++) ISDestroy(&is[i]);
1532: PetscFree(is);
1533: }
1534: if (is_local) {
1536: for (i=0; i<n; i++) ISDestroy(&is_local[i]);
1537: PetscFree(is_local);
1538: }
1539: return 0;
1540: }
1542: /*@
1543: PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1544: preconditioner for a two-dimensional problem on a regular grid.
1546: Not Collective
1548: Input Parameters:
1549: + m - the number of mesh points in the x direction
1550: . n - the number of mesh points in the y direction
1551: . M - the number of subdomains in the x direction
1552: . N - the number of subdomains in the y direction
1553: . dof - degrees of freedom per node
1554: - overlap - overlap in mesh lines
1556: Output Parameters:
1557: + Nsub - the number of subdomains created
1558: . is - array of index sets defining overlapping (if overlap > 0) subdomains
1559: - is_local - array of index sets defining non-overlapping subdomains
1561: Note:
1562: Presently PCAMSCreateSubdomains2d() is valid only for sequential
1563: preconditioners. More general related routines are
1564: PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1566: Level: advanced
1568: .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1569: PCASMSetOverlap()
1570: @*/
1571: PetscErrorCode PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1572: {
1573: PetscInt i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1574: PetscInt nidx,*idx,loc,ii,jj,count;
1578: *Nsub = N*M;
1579: PetscMalloc1(*Nsub,is);
1580: PetscMalloc1(*Nsub,is_local);
1581: ystart = 0;
1582: loc_outer = 0;
1583: for (i=0; i<N; i++) {
1584: height = n/N + ((n % N) > i); /* height of subdomain */
1586: yleft = ystart - overlap; if (yleft < 0) yleft = 0;
1587: yright = ystart + height + overlap; if (yright > n) yright = n;
1588: xstart = 0;
1589: for (j=0; j<M; j++) {
1590: width = m/M + ((m % M) > j); /* width of subdomain */
1592: xleft = xstart - overlap; if (xleft < 0) xleft = 0;
1593: xright = xstart + width + overlap; if (xright > m) xright = m;
1594: nidx = (xright - xleft)*(yright - yleft);
1595: PetscMalloc1(nidx,&idx);
1596: loc = 0;
1597: for (ii=yleft; ii<yright; ii++) {
1598: count = m*ii + xleft;
1599: for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1600: }
1601: ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);
1602: if (overlap == 0) {
1603: PetscObjectReference((PetscObject)(*is)[loc_outer]);
1605: (*is_local)[loc_outer] = (*is)[loc_outer];
1606: } else {
1607: for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1608: for (jj=xstart; jj<xstart+width; jj++) {
1609: idx[loc++] = m*ii + jj;
1610: }
1611: }
1612: ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);
1613: }
1614: PetscFree(idx);
1615: xstart += width;
1616: loc_outer++;
1617: }
1618: ystart += height;
1619: }
1620: for (i=0; i<*Nsub; i++) ISSort((*is)[i]);
1621: return 0;
1622: }
1624: /*@C
1625: PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1626: only) for the additive Schwarz preconditioner.
1628: Not Collective
1630: Input Parameter:
1631: . pc - the preconditioner context
1633: Output Parameters:
1634: + n - if requested, the number of subdomains for this processor (default value = 1)
1635: . is - if requested, the index sets that define the subdomains for this processor
1636: - is_local - if requested, the index sets that define the local part of the subdomains for this processor (can be NULL)
1638: Notes:
1639: The IS numbering is in the parallel, global numbering of the vector.
1641: Level: advanced
1643: .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1644: PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1645: @*/
1646: PetscErrorCode PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1647: {
1648: PC_ASM *osm = (PC_ASM*)pc->data;
1649: PetscBool match;
1655: PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1657: if (n) *n = osm->n_local_true;
1658: if (is) *is = osm->is;
1659: if (is_local) *is_local = osm->is_local;
1660: return 0;
1661: }
1663: /*@C
1664: PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1665: only) for the additive Schwarz preconditioner.
1667: Not Collective
1669: Input Parameter:
1670: . pc - the preconditioner context
1672: Output Parameters:
1673: + n - if requested, the number of matrices for this processor (default value = 1)
1674: - mat - if requested, the matrices
1676: Level: advanced
1678: Notes:
1679: Call after PCSetUp() (or KSPSetUp()) but before PCApply() and before PCSetUpOnBlocks())
1681: Usually one would use PCSetModifySubMatrices() to change the submatrices in building the preconditioner.
1683: .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1684: PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubMatrices()
1685: @*/
1686: PetscErrorCode PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1687: {
1688: PC_ASM *osm;
1689: PetscBool match;
1695: PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1696: if (!match) {
1697: if (n) *n = 0;
1698: if (mat) *mat = NULL;
1699: } else {
1700: osm = (PC_ASM*)pc->data;
1701: if (n) *n = osm->n_local_true;
1702: if (mat) *mat = osm->pmat;
1703: }
1704: return 0;
1705: }
1707: /*@
1708: PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1710: Logically Collective
1712: Input Parameters:
1713: + pc - the preconditioner
1714: - flg - boolean indicating whether to use subdomains defined by the DM
1716: Options Database Key:
1717: . -pc_asm_dm_subdomains <bool> - use subdomains defined by the DM
1719: Level: intermediate
1721: Notes:
1722: PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1723: so setting either of the first two effectively turns the latter off.
1725: .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1726: PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1727: @*/
1728: PetscErrorCode PCASMSetDMSubdomains(PC pc,PetscBool flg)
1729: {
1730: PC_ASM *osm = (PC_ASM*)pc->data;
1731: PetscBool match;
1736: PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1737: if (match) {
1738: osm->dm_subdomains = flg;
1739: }
1740: return 0;
1741: }
1743: /*@
1744: PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1745: Not Collective
1747: Input Parameter:
1748: . pc - the preconditioner
1750: Output Parameter:
1751: . flg - boolean indicating whether to use subdomains defined by the DM
1753: Level: intermediate
1755: .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1756: PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1757: @*/
1758: PetscErrorCode PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1759: {
1760: PC_ASM *osm = (PC_ASM*)pc->data;
1761: PetscBool match;
1765: PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1766: if (match) *flg = osm->dm_subdomains;
1767: else *flg = PETSC_FALSE;
1768: return 0;
1769: }
1771: /*@
1772: PCASMGetSubMatType - Gets the matrix type used for ASM subsolves, as a string.
1774: Not Collective
1776: Input Parameter:
1777: . pc - the PC
1779: Output Parameter:
1780: . -pc_asm_sub_mat_type - name of matrix type
1782: Level: advanced
1784: .seealso: PCASMSetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1785: @*/
1786: PetscErrorCode PCASMGetSubMatType(PC pc,MatType *sub_mat_type)
1787: {
1789: PetscTryMethod(pc,"PCASMGetSubMatType_C",(PC,MatType*),(pc,sub_mat_type));
1790: return 0;
1791: }
1793: /*@
1794: PCASMSetSubMatType - Set the type of matrix used for ASM subsolves
1796: Collective on Mat
1798: Input Parameters:
1799: + pc - the PC object
1800: - sub_mat_type - matrix type
1802: Options Database Key:
1803: . -pc_asm_sub_mat_type <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1805: Notes:
1806: See "${PETSC_DIR}/include/petscmat.h" for available types
1808: Level: advanced
1810: .seealso: PCASMGetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1811: @*/
1812: PetscErrorCode PCASMSetSubMatType(PC pc,MatType sub_mat_type)
1813: {
1815: PetscTryMethod(pc,"PCASMSetSubMatType_C",(PC,MatType),(pc,sub_mat_type));
1816: return 0;
1817: }