Actual source code: asm.c
petsc-3.5.4 2015-05-23
2: /*
3: This file defines an additive Schwarz preconditioner for any Mat implementation.
5: Note that each processor may have any number of subdomains. But in order to
6: deal easily with the VecScatter(), we treat each processor as if it has the
7: same number of subdomains.
9: n - total number of true subdomains on all processors
10: n_local_true - actual number of subdomains on this processor
11: n_local = maximum over all processors of n_local_true
12: */
13: #include <petsc-private/pcimpl.h> /*I "petscpc.h" I*/
14: #include <petscdm.h>
16: typedef struct {
17: PetscInt n, n_local, n_local_true;
18: PetscInt overlap; /* overlap requested by user */
19: KSP *ksp; /* linear solvers for each block */
20: VecScatter *restriction; /* mapping from global to subregion */
21: VecScatter *localization; /* mapping from overlapping to non-overlapping subregion */
22: VecScatter *prolongation; /* mapping from subregion to global */
23: Vec *x,*y,*y_local; /* work vectors */
24: IS *is; /* index set that defines each overlapping subdomain */
25: IS *is_local; /* index set that defines each non-overlapping subdomain, may be NULL */
26: Mat *mat,*pmat; /* mat is not currently used */
27: PCASMType type; /* use reduced interpolation, restriction or both */
28: PetscBool type_set; /* if user set this value (so won't change it for symmetric problems) */
29: PetscBool same_local_solves; /* flag indicating whether all local solvers are same */
30: PetscBool sort_indices; /* flag to sort subdomain indices */
31: PetscBool dm_subdomains; /* whether DM is allowed to define subdomains */
32: } PC_ASM;
36: static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
37: {
38: PC_ASM *osm = (PC_ASM*)pc->data;
40: PetscMPIInt rank;
41: PetscInt i,bsz;
42: PetscBool iascii,isstring;
43: PetscViewer sviewer;
46: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
47: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
48: if (iascii) {
49: char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
50: if (osm->overlap >= 0) {PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);}
51: if (osm->n > 0) {PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);}
52: PetscViewerASCIIPrintf(viewer," Additive Schwarz: %s, %s\n",blocks,overlaps);
53: PetscViewerASCIIPrintf(viewer," Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[osm->type]);
54: MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);
55: if (osm->same_local_solves) {
56: if (osm->ksp) {
57: PetscViewerASCIIPrintf(viewer," Local solve is same for all blocks, in the following KSP and PC objects:\n");
58: PetscViewerGetSingleton(viewer,&sviewer);
59: if (!rank) {
60: PetscViewerASCIIPushTab(viewer);
61: KSPView(osm->ksp[0],sviewer);
62: PetscViewerASCIIPopTab(viewer);
63: }
64: PetscViewerRestoreSingleton(viewer,&sviewer);
65: }
66: } else {
67: PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);
68: PetscViewerASCIISynchronizedPrintf(viewer," [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);
69: PetscViewerFlush(viewer);
70: PetscViewerASCIIPrintf(viewer," Local solve info for each block is in the following KSP and PC objects:\n");
71: PetscViewerASCIIPushTab(viewer);
72: PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");
73: PetscViewerGetSingleton(viewer,&sviewer);
74: for (i=0; i<osm->n_local_true; i++) {
75: ISGetLocalSize(osm->is[i],&bsz);
76: PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);
77: KSPView(osm->ksp[i],sviewer);
78: PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");
79: }
80: PetscViewerRestoreSingleton(viewer,&sviewer);
81: PetscViewerASCIIPopTab(viewer);
82: PetscViewerFlush(viewer);
83: PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);
84: }
85: } else if (isstring) {
86: PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);
87: PetscViewerGetSingleton(viewer,&sviewer);
88: if (osm->ksp) {KSPView(osm->ksp[0],sviewer);}
89: PetscViewerRestoreSingleton(viewer,&sviewer);
90: }
91: return(0);
92: }
96: static PetscErrorCode PCASMPrintSubdomains(PC pc)
97: {
98: PC_ASM *osm = (PC_ASM*)pc->data;
99: const char *prefix;
100: char fname[PETSC_MAX_PATH_LEN+1];
101: PetscViewer viewer, sviewer;
102: char *s;
103: PetscInt i,j,nidx;
104: const PetscInt *idx;
105: PetscMPIInt rank, size;
109: MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);
110: MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);
111: PCGetOptionsPrefix(pc,&prefix);
112: PetscOptionsGetString(prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);
113: if (fname[0] == 0) { PetscStrcpy(fname,"stdout"); };
114: PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);
115: for (i=0; i<osm->n_local; i++) {
116: if (i < osm->n_local_true) {
117: ISGetLocalSize(osm->is[i],&nidx);
118: ISGetIndices(osm->is[i],&idx);
119: /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
120: PetscMalloc(sizeof(char)*(16*(nidx+1)+512), &s);
121: PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);
122: PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);
123: for (j=0; j<nidx; j++) {
124: PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);
125: }
126: ISRestoreIndices(osm->is[i],&idx);
127: PetscViewerStringSPrintf(sviewer,"\n");
128: PetscViewerDestroy(&sviewer);
129: PetscViewerASCIISynchronizedAllow(viewer, PETSC_TRUE);
130: PetscViewerASCIISynchronizedPrintf(viewer, s);
131: PetscViewerFlush(viewer);
132: PetscViewerASCIISynchronizedAllow(viewer, PETSC_FALSE);
133: PetscFree(s);
134: if (osm->is_local) {
135: /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
136: PetscMalloc(sizeof(char)*(16*(nidx+1)+512), &s);
137: PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);
138: PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);
139: ISGetLocalSize(osm->is_local[i],&nidx);
140: ISGetIndices(osm->is_local[i],&idx);
141: for (j=0; j<nidx; j++) {
142: PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);
143: }
144: ISRestoreIndices(osm->is_local[i],&idx);
145: PetscViewerStringSPrintf(sviewer,"\n");
146: PetscViewerDestroy(&sviewer);
147: PetscViewerASCIISynchronizedAllow(viewer, PETSC_TRUE);
148: PetscViewerASCIISynchronizedPrintf(viewer, s);
149: PetscViewerFlush(viewer);
150: PetscViewerASCIISynchronizedAllow(viewer, PETSC_FALSE);
151: PetscFree(s);
152: }
153: } else {
154: /* Participate in collective viewer calls. */
155: PetscViewerASCIISynchronizedAllow(viewer, PETSC_TRUE);
156: PetscViewerFlush(viewer);
157: PetscViewerASCIISynchronizedAllow(viewer, PETSC_FALSE);
158: /* Assume either all ranks have is_local or none do. */
159: if (osm->is_local) {
160: PetscViewerASCIISynchronizedAllow(viewer, PETSC_TRUE);
161: PetscViewerFlush(viewer);
162: PetscViewerASCIISynchronizedAllow(viewer, PETSC_FALSE);
163: }
164: }
165: }
166: PetscViewerFlush(viewer);
167: PetscViewerDestroy(&viewer);
168: return(0);
169: }
173: static PetscErrorCode PCSetUp_ASM(PC pc)
174: {
175: PC_ASM *osm = (PC_ASM*)pc->data;
177: PetscBool symset,flg;
178: PetscInt i,m,m_local;
179: MatReuse scall = MAT_REUSE_MATRIX;
180: IS isl;
181: KSP ksp;
182: PC subpc;
183: const char *prefix,*pprefix;
184: Vec vec;
185: DM *domain_dm = NULL;
188: if (!pc->setupcalled) {
190: if (!osm->type_set) {
191: MatIsSymmetricKnown(pc->pmat,&symset,&flg);
192: if (symset && flg) osm->type = PC_ASM_BASIC;
193: }
195: /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
196: if (osm->n_local_true == PETSC_DECIDE) {
197: /* no subdomains given */
198: /* try pc->dm first, if allowed */
199: if (osm->dm_subdomains && pc->dm) {
200: PetscInt num_domains, d;
201: char **domain_names;
202: IS *inner_domain_is, *outer_domain_is;
203: DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);
204: if (num_domains) {
205: PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);
206: }
207: for (d = 0; d < num_domains; ++d) {
208: if (domain_names) {PetscFree(domain_names[d]);}
209: if (inner_domain_is) {ISDestroy(&inner_domain_is[d]);}
210: if (outer_domain_is) {ISDestroy(&outer_domain_is[d]);}
211: }
212: PetscFree(domain_names);
213: PetscFree(inner_domain_is);
214: PetscFree(outer_domain_is);
215: }
216: if (osm->n_local_true == PETSC_DECIDE) {
217: /* still no subdomains; use one subdomain per processor */
218: osm->n_local_true = 1;
219: }
220: }
221: { /* determine the global and max number of subdomains */
222: struct {PetscInt max,sum;} inwork,outwork;
223: inwork.max = osm->n_local_true;
224: inwork.sum = osm->n_local_true;
225: MPI_Allreduce(&inwork,&outwork,1,MPIU_2INT,PetscMaxSum_Op,PetscObjectComm((PetscObject)pc));
226: osm->n_local = outwork.max;
227: osm->n = outwork.sum;
228: }
229: if (!osm->is) { /* create the index sets */
230: PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);
231: }
232: if (osm->n_local_true > 1 && !osm->is_local) {
233: PetscMalloc1(osm->n_local_true,&osm->is_local);
234: for (i=0; i<osm->n_local_true; i++) {
235: if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
236: ISDuplicate(osm->is[i],&osm->is_local[i]);
237: ISCopy(osm->is[i],osm->is_local[i]);
238: } else {
239: PetscObjectReference((PetscObject)osm->is[i]);
240: osm->is_local[i] = osm->is[i];
241: }
242: }
243: }
244: PCGetOptionsPrefix(pc,&prefix);
245: flg = PETSC_FALSE;
246: PetscOptionsGetBool(prefix,"-pc_asm_print_subdomains",&flg,NULL);
247: if (flg) { PCASMPrintSubdomains(pc); }
249: if (osm->overlap > 0) {
250: /* Extend the "overlapping" regions by a number of steps */
251: MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);
252: }
253: if (osm->sort_indices) {
254: for (i=0; i<osm->n_local_true; i++) {
255: ISSort(osm->is[i]);
256: if (osm->is_local) {
257: ISSort(osm->is_local[i]);
258: }
259: }
260: }
261: /* Create the local work vectors and scatter contexts */
262: MatGetVecs(pc->pmat,&vec,0);
263: PetscMalloc1(osm->n_local,&osm->restriction);
264: if (osm->is_local) {PetscMalloc1(osm->n_local,&osm->localization);}
265: PetscMalloc1(osm->n_local,&osm->prolongation);
266: PetscMalloc1(osm->n_local,&osm->x);
267: PetscMalloc1(osm->n_local,&osm->y);
268: PetscMalloc1(osm->n_local,&osm->y_local);
269: for (i=0; i<osm->n_local_true; ++i) {
270: ISGetLocalSize(osm->is[i],&m);
271: VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);
272: ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);
273: VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);
274: ISDestroy(&isl);
275: VecDuplicate(osm->x[i],&osm->y[i]);
276: if (osm->is_local) {
277: ISLocalToGlobalMapping ltog;
278: IS isll;
279: const PetscInt *idx_local;
280: PetscInt *idx,nout;
282: ISLocalToGlobalMappingCreateIS(osm->is[i],<og);
283: ISGetLocalSize(osm->is_local[i],&m_local);
284: ISGetIndices(osm->is_local[i], &idx_local);
285: PetscMalloc1(m_local,&idx);
286: ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);
287: ISLocalToGlobalMappingDestroy(<og);
288: if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
289: ISRestoreIndices(osm->is_local[i], &idx_local);
290: ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,PETSC_OWN_POINTER,&isll);
291: ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);
292: VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);
293: VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);
294: ISDestroy(&isll);
296: VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);
297: ISDestroy(&isl);
298: } else {
299: VecGetLocalSize(vec,&m_local);
301: osm->y_local[i] = osm->y[i];
303: PetscObjectReference((PetscObject) osm->y[i]);
305: osm->prolongation[i] = osm->restriction[i];
307: PetscObjectReference((PetscObject) osm->restriction[i]);
308: }
309: }
310: for (i=osm->n_local_true; i<osm->n_local; i++) {
311: VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);
312: VecDuplicate(osm->x[i],&osm->y[i]);
313: VecDuplicate(osm->x[i],&osm->y_local[i]);
314: ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);
315: VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);
316: if (osm->is_local) {
317: VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);
318: VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);
319: } else {
320: osm->prolongation[i] = osm->restriction[i];
321: PetscObjectReference((PetscObject) osm->restriction[i]);
322: }
323: ISDestroy(&isl);
324: }
325: VecDestroy(&vec);
327: if (!osm->ksp) {
328: /* Create the local solvers */
329: PetscMalloc1(osm->n_local_true,&osm->ksp);
330: if (domain_dm) {
331: PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");
332: }
333: for (i=0; i<osm->n_local_true; i++) {
334: KSPCreate(PETSC_COMM_SELF,&ksp);
335: PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);
336: PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);
337: KSPSetType(ksp,KSPPREONLY);
338: KSPGetPC(ksp,&subpc);
339: PCGetOptionsPrefix(pc,&prefix);
340: KSPSetOptionsPrefix(ksp,prefix);
341: KSPAppendOptionsPrefix(ksp,"sub_");
342: if (domain_dm) {
343: KSPSetDM(ksp, domain_dm[i]);
344: KSPSetDMActive(ksp, PETSC_FALSE);
345: DMDestroy(&domain_dm[i]);
346: }
347: osm->ksp[i] = ksp;
348: }
349: if (domain_dm) {
350: PetscFree(domain_dm);
351: }
352: }
353: scall = MAT_INITIAL_MATRIX;
354: } else {
355: /*
356: Destroy the blocks from the previous iteration
357: */
358: if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
359: MatDestroyMatrices(osm->n_local_true,&osm->pmat);
360: scall = MAT_INITIAL_MATRIX;
361: }
362: }
364: /*
365: Extract out the submatrices
366: */
367: MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);
368: if (scall == MAT_INITIAL_MATRIX) {
369: PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);
370: for (i=0; i<osm->n_local_true; i++) {
371: PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);
372: PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);
373: }
374: }
376: /* Return control to the user so that the submatrices can be modified (e.g., to apply
377: different boundary conditions for the submatrices than for the global problem) */
378: PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);
380: /*
381: Loop over subdomains putting them into local ksp
382: */
383: for (i=0; i<osm->n_local_true; i++) {
384: KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);
385: if (!pc->setupcalled) {
386: KSPSetFromOptions(osm->ksp[i]);
387: }
388: }
389: return(0);
390: }
394: static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
395: {
396: PC_ASM *osm = (PC_ASM*)pc->data;
398: PetscInt i;
401: for (i=0; i<osm->n_local_true; i++) {
402: KSPSetUp(osm->ksp[i]);
403: }
404: return(0);
405: }
409: static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
410: {
411: PC_ASM *osm = (PC_ASM*)pc->data;
413: PetscInt i,n_local = osm->n_local,n_local_true = osm->n_local_true;
414: ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
417: /*
418: Support for limiting the restriction or interpolation to only local
419: subdomain values (leaving the other values 0).
420: */
421: if (!(osm->type & PC_ASM_RESTRICT)) {
422: forward = SCATTER_FORWARD_LOCAL;
423: /* have to zero the work RHS since scatter may leave some slots empty */
424: for (i=0; i<n_local_true; i++) {
425: VecZeroEntries(osm->x[i]);
426: }
427: }
428: if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
430: for (i=0; i<n_local; i++) {
431: VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);
432: }
433: VecZeroEntries(y);
434: /* do the local solves */
435: for (i=0; i<n_local_true; i++) {
436: VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);
437: KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);
438: if (osm->localization) {
439: VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);
440: VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);
441: }
442: VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);
443: }
444: /* handle the rest of the scatters that do not have local solves */
445: for (i=n_local_true; i<n_local; i++) {
446: VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);
447: VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);
448: }
449: for (i=0; i<n_local; i++) {
450: VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);
451: }
452: return(0);
453: }
457: static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
458: {
459: PC_ASM *osm = (PC_ASM*)pc->data;
461: PetscInt i,n_local = osm->n_local,n_local_true = osm->n_local_true;
462: ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
465: /*
466: Support for limiting the restriction or interpolation to only local
467: subdomain values (leaving the other values 0).
469: Note: these are reversed from the PCApply_ASM() because we are applying the
470: transpose of the three terms
471: */
472: if (!(osm->type & PC_ASM_INTERPOLATE)) {
473: forward = SCATTER_FORWARD_LOCAL;
474: /* have to zero the work RHS since scatter may leave some slots empty */
475: for (i=0; i<n_local_true; i++) {
476: VecZeroEntries(osm->x[i]);
477: }
478: }
479: if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
481: for (i=0; i<n_local; i++) {
482: VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);
483: }
484: VecZeroEntries(y);
485: /* do the local solves */
486: for (i=0; i<n_local_true; i++) {
487: VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);
488: KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);
489: if (osm->localization) {
490: VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);
491: VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);
492: }
493: VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);
494: }
495: /* handle the rest of the scatters that do not have local solves */
496: for (i=n_local_true; i<n_local; i++) {
497: VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);
498: VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);
499: }
500: for (i=0; i<n_local; i++) {
501: VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);
502: }
503: return(0);
504: }
508: static PetscErrorCode PCReset_ASM(PC pc)
509: {
510: PC_ASM *osm = (PC_ASM*)pc->data;
512: PetscInt i;
515: if (osm->ksp) {
516: for (i=0; i<osm->n_local_true; i++) {
517: KSPReset(osm->ksp[i]);
518: }
519: }
520: if (osm->pmat) {
521: if (osm->n_local_true > 0) {
522: MatDestroyMatrices(osm->n_local_true,&osm->pmat);
523: }
524: }
525: if (osm->restriction) {
526: for (i=0; i<osm->n_local; i++) {
527: VecScatterDestroy(&osm->restriction[i]);
528: if (osm->localization) {VecScatterDestroy(&osm->localization[i]);}
529: VecScatterDestroy(&osm->prolongation[i]);
530: VecDestroy(&osm->x[i]);
531: VecDestroy(&osm->y[i]);
532: VecDestroy(&osm->y_local[i]);
533: }
534: PetscFree(osm->restriction);
535: if (osm->localization) {PetscFree(osm->localization);}
536: PetscFree(osm->prolongation);
537: PetscFree(osm->x);
538: PetscFree(osm->y);
539: PetscFree(osm->y_local);
540: }
541: PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);
543: osm->is = 0;
544: osm->is_local = 0;
545: return(0);
546: }
550: static PetscErrorCode PCDestroy_ASM(PC pc)
551: {
552: PC_ASM *osm = (PC_ASM*)pc->data;
554: PetscInt i;
557: PCReset_ASM(pc);
558: if (osm->ksp) {
559: for (i=0; i<osm->n_local_true; i++) {
560: KSPDestroy(&osm->ksp[i]);
561: }
562: PetscFree(osm->ksp);
563: }
564: PetscFree(pc->data);
565: return(0);
566: }
570: static PetscErrorCode PCSetFromOptions_ASM(PC pc)
571: {
572: PC_ASM *osm = (PC_ASM*)pc->data;
574: PetscInt blocks,ovl;
575: PetscBool symset,flg;
576: PCASMType asmtype;
579: /* set the type to symmetric if matrix is symmetric */
580: if (!osm->type_set && pc->pmat) {
581: MatIsSymmetricKnown(pc->pmat,&symset,&flg);
582: if (symset && flg) osm->type = PC_ASM_BASIC;
583: }
584: PetscOptionsHead("Additive Schwarz options");
585: PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);
586: PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);
587: if (flg) {
588: PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);
589: osm->dm_subdomains = PETSC_FALSE;
590: }
591: PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);
592: if (flg) {
593: PCASMSetOverlap(pc,ovl);
594: osm->dm_subdomains = PETSC_FALSE;
595: }
596: flg = PETSC_FALSE;
597: PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);
598: if (flg) {PCASMSetType(pc,asmtype); }
599: PetscOptionsTail();
600: return(0);
601: }
603: /*------------------------------------------------------------------------------------*/
607: static PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
608: {
609: PC_ASM *osm = (PC_ASM*)pc->data;
611: PetscInt i;
614: if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
615: if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
617: if (!pc->setupcalled) {
618: if (is) {
619: for (i=0; i<n; i++) {PetscObjectReference((PetscObject)is[i]);}
620: }
621: if (is_local) {
622: for (i=0; i<n; i++) {PetscObjectReference((PetscObject)is_local[i]);}
623: }
624: PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);
626: osm->n_local_true = n;
627: osm->is = 0;
628: osm->is_local = 0;
629: if (is) {
630: PetscMalloc1(n,&osm->is);
631: for (i=0; i<n; i++) osm->is[i] = is[i];
632: /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
633: osm->overlap = -1;
634: }
635: if (is_local) {
636: PetscMalloc1(n,&osm->is_local);
637: for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
638: if (!is) {
639: PetscMalloc1(osm->n_local_true,&osm->is);
640: for (i=0; i<osm->n_local_true; i++) {
641: if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
642: ISDuplicate(osm->is_local[i],&osm->is[i]);
643: ISCopy(osm->is_local[i],osm->is[i]);
644: } else {
645: PetscObjectReference((PetscObject)osm->is_local[i]);
646: osm->is[i] = osm->is_local[i];
647: }
648: }
649: }
650: }
651: }
652: return(0);
653: }
657: static PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
658: {
659: PC_ASM *osm = (PC_ASM*)pc->data;
661: PetscMPIInt rank,size;
662: PetscInt n;
665: if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
666: if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
668: /*
669: Split the subdomains equally among all processors
670: */
671: MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);
672: MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
673: n = N/size + ((N % size) > rank);
674: if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
675: if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
676: if (!pc->setupcalled) {
677: PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);
679: osm->n_local_true = n;
680: osm->is = 0;
681: osm->is_local = 0;
682: }
683: return(0);
684: }
688: static PetscErrorCode PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
689: {
690: PC_ASM *osm = (PC_ASM*)pc->data;
693: if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
694: if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
695: if (!pc->setupcalled) osm->overlap = ovl;
696: return(0);
697: }
701: static PetscErrorCode PCASMSetType_ASM(PC pc,PCASMType type)
702: {
703: PC_ASM *osm = (PC_ASM*)pc->data;
706: osm->type = type;
707: osm->type_set = PETSC_TRUE;
708: return(0);
709: }
713: static PetscErrorCode PCASMSetSortIndices_ASM(PC pc,PetscBool doSort)
714: {
715: PC_ASM *osm = (PC_ASM*)pc->data;
718: osm->sort_indices = doSort;
719: return(0);
720: }
724: static PetscErrorCode PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
725: {
726: PC_ASM *osm = (PC_ASM*)pc->data;
730: if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
732: if (n_local) *n_local = osm->n_local_true;
733: if (first_local) {
734: MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));
735: *first_local -= osm->n_local_true;
736: }
737: if (ksp) {
738: /* Assume that local solves are now different; not necessarily
739: true though! This flag is used only for PCView_ASM() */
740: *ksp = osm->ksp;
741: osm->same_local_solves = PETSC_FALSE;
742: }
743: return(0);
744: }
748: /*@C
749: PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
751: Collective on PC
753: Input Parameters:
754: + pc - the preconditioner context
755: . n - the number of subdomains for this processor (default value = 1)
756: . is - the index set that defines the subdomains for this processor
757: (or NULL for PETSc to determine subdomains)
758: - is_local - the index sets that define the local part of the subdomains for this processor
759: (or NULL to use the default of 1 subdomain per process)
761: Notes:
762: The IS numbering is in the parallel, global numbering of the vector for both is and is_local
764: By default the ASM preconditioner uses 1 block per processor.
766: Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
768: Level: advanced
770: .keywords: PC, ASM, set, local, subdomains, additive Schwarz
772: .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
773: PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
774: @*/
775: PetscErrorCode PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
776: {
781: PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));
782: return(0);
783: }
787: /*@C
788: PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
789: additive Schwarz preconditioner. Either all or no processors in the
790: PC communicator must call this routine, with the same index sets.
792: Collective on PC
794: Input Parameters:
795: + pc - the preconditioner context
796: . N - the number of subdomains for all processors
797: . is - the index sets that define the subdomains for all processors
798: (or NULL to ask PETSc to compe up with subdomains)
799: - is_local - the index sets that define the local part of the subdomains for this processor
800: (or NULL to use the default of 1 subdomain per process)
802: Options Database Key:
803: To set the total number of subdomain blocks rather than specify the
804: index sets, use the option
805: . -pc_asm_blocks <blks> - Sets total blocks
807: Notes:
808: Currently you cannot use this to set the actual subdomains with the argument is.
810: By default the ASM preconditioner uses 1 block per processor.
812: These index sets cannot be destroyed until after completion of the
813: linear solves for which the ASM preconditioner is being used.
815: Use PCASMSetLocalSubdomains() to set local subdomains.
817: The IS numbering is in the parallel, global numbering of the vector for both is and is_local
819: Level: advanced
821: .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
823: .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
824: PCASMCreateSubdomains2D()
825: @*/
826: PetscErrorCode PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
827: {
832: PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));
833: return(0);
834: }
838: /*@
839: PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
840: additive Schwarz preconditioner. Either all or no processors in the
841: PC communicator must call this routine.
843: Logically Collective on PC
845: Input Parameters:
846: + pc - the preconditioner context
847: - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
849: Options Database Key:
850: . -pc_asm_overlap <ovl> - Sets overlap
852: Notes:
853: By default the ASM preconditioner uses 1 block per processor. To use
854: multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
855: PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
857: The overlap defaults to 1, so if one desires that no additional
858: overlap be computed beyond what may have been set with a call to
859: PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
860: must be set to be 0. In particular, if one does not explicitly set
861: the subdomains an application code, then all overlap would be computed
862: internally by PETSc, and using an overlap of 0 would result in an ASM
863: variant that is equivalent to the block Jacobi preconditioner.
865: Note that one can define initial index sets with any overlap via
866: PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
867: PCASMSetOverlap() merely allows PETSc to extend that overlap further
868: if desired.
870: Level: intermediate
872: .keywords: PC, ASM, set, overlap
874: .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
875: PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
876: @*/
877: PetscErrorCode PCASMSetOverlap(PC pc,PetscInt ovl)
878: {
884: PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));
885: return(0);
886: }
890: /*@
891: PCASMSetType - Sets the type of restriction and interpolation used
892: for local problems in the additive Schwarz method.
894: Logically Collective on PC
896: Input Parameters:
897: + pc - the preconditioner context
898: - type - variant of ASM, one of
899: .vb
900: PC_ASM_BASIC - full interpolation and restriction
901: PC_ASM_RESTRICT - full restriction, local processor interpolation
902: PC_ASM_INTERPOLATE - full interpolation, local processor restriction
903: PC_ASM_NONE - local processor restriction and interpolation
904: .ve
906: Options Database Key:
907: . -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
909: Level: intermediate
911: .keywords: PC, ASM, set, type
913: .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
914: PCASMCreateSubdomains2D()
915: @*/
916: PetscErrorCode PCASMSetType(PC pc,PCASMType type)
917: {
923: PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));
924: return(0);
925: }
929: /*@
930: PCASMSetSortIndices - Determines whether subdomain indices are sorted.
932: Logically Collective on PC
934: Input Parameters:
935: + pc - the preconditioner context
936: - doSort - sort the subdomain indices
938: Level: intermediate
940: .keywords: PC, ASM, set, type
942: .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
943: PCASMCreateSubdomains2D()
944: @*/
945: PetscErrorCode PCASMSetSortIndices(PC pc,PetscBool doSort)
946: {
952: PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));
953: return(0);
954: }
958: /*@C
959: PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
960: this processor.
962: Collective on PC iff first_local is requested
964: Input Parameter:
965: . pc - the preconditioner context
967: Output Parameters:
968: + n_local - the number of blocks on this processor or NULL
969: . first_local - the global number of the first block on this processor or NULL,
970: all processors must request or all must pass NULL
971: - ksp - the array of KSP contexts
973: Note:
974: After PCASMGetSubKSP() the array of KSPes is not to be freed.
976: Currently for some matrix implementations only 1 block per processor
977: is supported.
979: You must call KSPSetUp() before calling PCASMGetSubKSP().
981: Fortran note:
982: The output argument 'ksp' must be an array of sufficient length or NULL_OBJECT. The latter can be used to learn the necessary length.
984: Level: advanced
986: .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
988: .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
989: PCASMCreateSubdomains2D(),
990: @*/
991: PetscErrorCode PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
992: {
997: PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));
998: return(0);
999: }
1001: /* -------------------------------------------------------------------------------------*/
1002: /*MC
1003: PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1004: its own KSP object.
1006: Options Database Keys:
1007: + -pc_asm_blocks <blks> - Sets total blocks
1008: . -pc_asm_overlap <ovl> - Sets overlap
1009: - -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1011: IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1012: will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1013: -pc_asm_type basic to use the standard ASM.
1015: Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1016: than one processor. Defaults to one block per processor.
1018: To set options on the solvers for each block append -sub_ to all the KSP, and PC
1019: options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1021: To set the options on the solvers separate for each block call PCASMGetSubKSP()
1022: and set the options directly on the resulting KSP object (you can access its PC
1023: with KSPGetPC())
1026: Level: beginner
1028: Concepts: additive Schwarz method
1030: References:
1031: An additive variant of the Schwarz alternating method for the case of many subregions
1032: M Dryja, OB Widlund - Courant Institute, New York University Technical report
1034: Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1035: Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
1037: .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC,
1038: PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
1039: PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType()
1041: M*/
1045: PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1046: {
1048: PC_ASM *osm;
1051: PetscNewLog(pc,&osm);
1053: osm->n = PETSC_DECIDE;
1054: osm->n_local = 0;
1055: osm->n_local_true = PETSC_DECIDE;
1056: osm->overlap = 1;
1057: osm->ksp = 0;
1058: osm->restriction = 0;
1059: osm->localization = 0;
1060: osm->prolongation = 0;
1061: osm->x = 0;
1062: osm->y = 0;
1063: osm->y_local = 0;
1064: osm->is = 0;
1065: osm->is_local = 0;
1066: osm->mat = 0;
1067: osm->pmat = 0;
1068: osm->type = PC_ASM_RESTRICT;
1069: osm->same_local_solves = PETSC_TRUE;
1070: osm->sort_indices = PETSC_TRUE;
1071: osm->dm_subdomains = PETSC_FALSE;
1073: pc->data = (void*)osm;
1074: pc->ops->apply = PCApply_ASM;
1075: pc->ops->applytranspose = PCApplyTranspose_ASM;
1076: pc->ops->setup = PCSetUp_ASM;
1077: pc->ops->reset = PCReset_ASM;
1078: pc->ops->destroy = PCDestroy_ASM;
1079: pc->ops->setfromoptions = PCSetFromOptions_ASM;
1080: pc->ops->setuponblocks = PCSetUpOnBlocks_ASM;
1081: pc->ops->view = PCView_ASM;
1082: pc->ops->applyrichardson = 0;
1084: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);
1085: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);
1086: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);
1087: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);
1088: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);
1089: PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);
1090: return(0);
1091: }
1095: /*@C
1096: PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1097: preconditioner for a any problem on a general grid.
1099: Collective
1101: Input Parameters:
1102: + A - The global matrix operator
1103: - n - the number of local blocks
1105: Output Parameters:
1106: . outis - the array of index sets defining the subdomains
1108: Level: advanced
1110: Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1111: from these if you use PCASMSetLocalSubdomains()
1113: In the Fortran version you must provide the array outis[] already allocated of length n.
1115: .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1117: .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1118: @*/
1119: PetscErrorCode PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1120: {
1121: MatPartitioning mpart;
1122: const char *prefix;
1123: PetscErrorCode (*f)(Mat,Mat*);
1124: PetscMPIInt size;
1125: PetscInt i,j,rstart,rend,bs;
1126: PetscBool isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1127: Mat Ad = NULL, adj;
1128: IS ispart,isnumb,*is;
1129: PetscErrorCode ierr;
1134: if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1136: /* Get prefix, row distribution, and block size */
1137: MatGetOptionsPrefix(A,&prefix);
1138: MatGetOwnershipRange(A,&rstart,&rend);
1139: MatGetBlockSize(A,&bs);
1140: if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1142: /* Get diagonal block from matrix if possible */
1143: MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
1144: PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",&f);
1145: if (f) {
1146: MatGetDiagonalBlock(A,&Ad);
1147: } else if (size == 1) {
1148: Ad = A;
1149: }
1150: if (Ad) {
1151: PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);
1152: if (!isbaij) {PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);}
1153: }
1154: if (Ad && n > 1) {
1155: PetscBool match,done;
1156: /* Try to setup a good matrix partitioning if available */
1157: MatPartitioningCreate(PETSC_COMM_SELF,&mpart);
1158: PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);
1159: MatPartitioningSetFromOptions(mpart);
1160: PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);
1161: if (!match) {
1162: PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);
1163: }
1164: if (!match) { /* assume a "good" partitioner is available */
1165: PetscInt na;
1166: const PetscInt *ia,*ja;
1167: MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);
1168: if (done) {
1169: /* Build adjacency matrix by hand. Unfortunately a call to
1170: MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1171: remove the block-aij structure and we cannot expect
1172: MatPartitioning to split vertices as we need */
1173: PetscInt i,j,len,nnz,cnt,*iia=0,*jja=0;
1174: const PetscInt *row;
1175: nnz = 0;
1176: for (i=0; i<na; i++) { /* count number of nonzeros */
1177: len = ia[i+1] - ia[i];
1178: row = ja + ia[i];
1179: for (j=0; j<len; j++) {
1180: if (row[j] == i) { /* don't count diagonal */
1181: len--; break;
1182: }
1183: }
1184: nnz += len;
1185: }
1186: PetscMalloc1((na+1),&iia);
1187: PetscMalloc1((nnz),&jja);
1188: nnz = 0;
1189: iia[0] = 0;
1190: for (i=0; i<na; i++) { /* fill adjacency */
1191: cnt = 0;
1192: len = ia[i+1] - ia[i];
1193: row = ja + ia[i];
1194: for (j=0; j<len; j++) {
1195: if (row[j] != i) { /* if not diagonal */
1196: jja[nnz+cnt++] = row[j];
1197: }
1198: }
1199: nnz += cnt;
1200: iia[i+1] = nnz;
1201: }
1202: /* Partitioning of the adjacency matrix */
1203: MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);
1204: MatPartitioningSetAdjacency(mpart,adj);
1205: MatPartitioningSetNParts(mpart,n);
1206: MatPartitioningApply(mpart,&ispart);
1207: ISPartitioningToNumbering(ispart,&isnumb);
1208: MatDestroy(&adj);
1209: foundpart = PETSC_TRUE;
1210: }
1211: MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);
1212: }
1213: MatPartitioningDestroy(&mpart);
1214: }
1216: PetscMalloc1(n,&is);
1217: *outis = is;
1219: if (!foundpart) {
1221: /* Partitioning by contiguous chunks of rows */
1223: PetscInt mbs = (rend-rstart)/bs;
1224: PetscInt start = rstart;
1225: for (i=0; i<n; i++) {
1226: PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1227: ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);
1228: start += count;
1229: }
1231: } else {
1233: /* Partitioning by adjacency of diagonal block */
1235: const PetscInt *numbering;
1236: PetscInt *count,nidx,*indices,*newidx,start=0;
1237: /* Get node count in each partition */
1238: PetscMalloc1(n,&count);
1239: ISPartitioningCount(ispart,n,count);
1240: if (isbaij && bs > 1) { /* adjust for the block-aij case */
1241: for (i=0; i<n; i++) count[i] *= bs;
1242: }
1243: /* Build indices from node numbering */
1244: ISGetLocalSize(isnumb,&nidx);
1245: PetscMalloc1(nidx,&indices);
1246: for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1247: ISGetIndices(isnumb,&numbering);
1248: PetscSortIntWithPermutation(nidx,numbering,indices);
1249: ISRestoreIndices(isnumb,&numbering);
1250: if (isbaij && bs > 1) { /* adjust for the block-aij case */
1251: PetscMalloc1(nidx*bs,&newidx);
1252: for (i=0; i<nidx; i++) {
1253: for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1254: }
1255: PetscFree(indices);
1256: nidx *= bs;
1257: indices = newidx;
1258: }
1259: /* Shift to get global indices */
1260: for (i=0; i<nidx; i++) indices[i] += rstart;
1262: /* Build the index sets for each block */
1263: for (i=0; i<n; i++) {
1264: ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);
1265: ISSort(is[i]);
1266: start += count[i];
1267: }
1269: PetscFree(count);
1270: PetscFree(indices);
1271: ISDestroy(&isnumb);
1272: ISDestroy(&ispart);
1274: }
1275: return(0);
1276: }
1280: /*@C
1281: PCASMDestroySubdomains - Destroys the index sets created with
1282: PCASMCreateSubdomains(). Should be called after setting subdomains
1283: with PCASMSetLocalSubdomains().
1285: Collective
1287: Input Parameters:
1288: + n - the number of index sets
1289: . is - the array of index sets
1290: - is_local - the array of local index sets, can be NULL
1292: Level: advanced
1294: .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1296: .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1297: @*/
1298: PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1299: {
1300: PetscInt i;
1304: if (n <= 0) return(0);
1305: if (is) {
1307: for (i=0; i<n; i++) { ISDestroy(&is[i]); }
1308: PetscFree(is);
1309: }
1310: if (is_local) {
1312: for (i=0; i<n; i++) { ISDestroy(&is_local[i]); }
1313: PetscFree(is_local);
1314: }
1315: return(0);
1316: }
1320: /*@
1321: PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1322: preconditioner for a two-dimensional problem on a regular grid.
1324: Not Collective
1326: Input Parameters:
1327: + m, n - the number of mesh points in the x and y directions
1328: . M, N - the number of subdomains in the x and y directions
1329: . dof - degrees of freedom per node
1330: - overlap - overlap in mesh lines
1332: Output Parameters:
1333: + Nsub - the number of subdomains created
1334: . is - array of index sets defining overlapping (if overlap > 0) subdomains
1335: - is_local - array of index sets defining non-overlapping subdomains
1337: Note:
1338: Presently PCAMSCreateSubdomains2d() is valid only for sequential
1339: preconditioners. More general related routines are
1340: PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1342: Level: advanced
1344: .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1346: .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1347: PCASMSetOverlap()
1348: @*/
1349: PetscErrorCode PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1350: {
1351: PetscInt i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1353: PetscInt nidx,*idx,loc,ii,jj,count;
1356: if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1358: *Nsub = N*M;
1359: PetscMalloc1((*Nsub),is);
1360: PetscMalloc1((*Nsub),is_local);
1361: ystart = 0;
1362: loc_outer = 0;
1363: for (i=0; i<N; i++) {
1364: height = n/N + ((n % N) > i); /* height of subdomain */
1365: if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1366: yleft = ystart - overlap; if (yleft < 0) yleft = 0;
1367: yright = ystart + height + overlap; if (yright > n) yright = n;
1368: xstart = 0;
1369: for (j=0; j<M; j++) {
1370: width = m/M + ((m % M) > j); /* width of subdomain */
1371: if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1372: xleft = xstart - overlap; if (xleft < 0) xleft = 0;
1373: xright = xstart + width + overlap; if (xright > m) xright = m;
1374: nidx = (xright - xleft)*(yright - yleft);
1375: PetscMalloc1(nidx,&idx);
1376: loc = 0;
1377: for (ii=yleft; ii<yright; ii++) {
1378: count = m*ii + xleft;
1379: for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1380: }
1381: ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);
1382: if (overlap == 0) {
1383: PetscObjectReference((PetscObject)(*is)[loc_outer]);
1385: (*is_local)[loc_outer] = (*is)[loc_outer];
1386: } else {
1387: for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1388: for (jj=xstart; jj<xstart+width; jj++) {
1389: idx[loc++] = m*ii + jj;
1390: }
1391: }
1392: ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);
1393: }
1394: PetscFree(idx);
1395: xstart += width;
1396: loc_outer++;
1397: }
1398: ystart += height;
1399: }
1400: for (i=0; i<*Nsub; i++) { ISSort((*is)[i]); }
1401: return(0);
1402: }
1406: /*@C
1407: PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1408: only) for the additive Schwarz preconditioner.
1410: Not Collective
1412: Input Parameter:
1413: . pc - the preconditioner context
1415: Output Parameters:
1416: + n - the number of subdomains for this processor (default value = 1)
1417: . is - the index sets that define the subdomains for this processor
1418: - is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1421: Notes:
1422: The IS numbering is in the parallel, global numbering of the vector.
1424: Level: advanced
1426: .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1428: .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1429: PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1430: @*/
1431: PetscErrorCode PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1432: {
1433: PC_ASM *osm;
1435: PetscBool match;
1441: PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1442: if (!match) {
1443: if (n) *n = 0;
1444: if (is) *is = NULL;
1445: } else {
1446: osm = (PC_ASM*)pc->data;
1447: if (n) *n = osm->n_local_true;
1448: if (is) *is = osm->is;
1449: if (is_local) *is_local = osm->is_local;
1450: }
1451: return(0);
1452: }
1456: /*@C
1457: PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1458: only) for the additive Schwarz preconditioner.
1460: Not Collective
1462: Input Parameter:
1463: . pc - the preconditioner context
1465: Output Parameters:
1466: + n - the number of matrices for this processor (default value = 1)
1467: - mat - the matrices
1470: Level: advanced
1472: Notes: Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks())
1474: Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner.
1476: .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1478: .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1479: PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices()
1480: @*/
1481: PetscErrorCode PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1482: {
1483: PC_ASM *osm;
1485: PetscBool match;
1491: if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1492: PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1493: if (!match) {
1494: if (n) *n = 0;
1495: if (mat) *mat = NULL;
1496: } else {
1497: osm = (PC_ASM*)pc->data;
1498: if (n) *n = osm->n_local_true;
1499: if (mat) *mat = osm->pmat;
1500: }
1501: return(0);
1502: }
1506: /*@
1507: PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1508: Logically Collective
1510: Input Parameter:
1511: + pc - the preconditioner
1512: - flg - boolean indicating whether to use subdomains defined by the DM
1514: Options Database Key:
1515: . -pc_asm_dm_subdomains
1517: Level: intermediate
1519: Notes:
1520: PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1521: so setting either of the first two effectively turns the latter off.
1523: .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1525: .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1526: PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1527: @*/
1528: PetscErrorCode PCASMSetDMSubdomains(PC pc,PetscBool flg)
1529: {
1530: PC_ASM *osm = (PC_ASM*)pc->data;
1532: PetscBool match;
1537: if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1538: PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1539: if (match) {
1540: osm->dm_subdomains = flg;
1541: }
1542: return(0);
1543: }
1547: /*@
1548: PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1549: Not Collective
1551: Input Parameter:
1552: . pc - the preconditioner
1554: Output Parameter:
1555: . flg - boolean indicating whether to use subdomains defined by the DM
1557: Level: intermediate
1559: .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1561: .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1562: PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1563: @*/
1564: PetscErrorCode PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1565: {
1566: PC_ASM *osm = (PC_ASM*)pc->data;
1568: PetscBool match;
1573: PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1574: if (match) {
1575: if (flg) *flg = osm->dm_subdomains;
1576: }
1577: return(0);
1578: }