Actual source code: gasm.c
petsc-3.10.5 2019-03-28
1: /*
2: This file defines an "generalized" additive Schwarz preconditioner for any Mat implementation.
3: In this version each processor may intersect multiple subdomains and any subdomain may
4: intersect multiple processors. Intersections of subdomains with processors are called *local
5: subdomains*.
7: N - total number of distinct global subdomains (set explicitly in PCGASMSetTotalSubdomains() or implicitly PCGASMSetSubdomains() and then calculated in PCSetUp_GASM())
8: n - actual number of local subdomains on this processor (set in PCGASMSetSubdomains() or calculated in PCGASMSetTotalSubdomains())
9: nmax - maximum number of local subdomains per processor (calculated in PCSetUp_GASM())
10: */
11: #include <petsc/private/pcimpl.h>
12: #include <petscdm.h>
14: typedef struct {
15: PetscInt N,n,nmax;
16: PetscInt overlap; /* overlap requested by user */
17: PCGASMType type; /* use reduced interpolation, restriction or both */
18: PetscBool type_set; /* if user set this value (so won't change it for symmetric problems) */
19: PetscBool same_subdomain_solvers; /* flag indicating whether all local solvers are same */
20: PetscBool sort_indices; /* flag to sort subdomain indices */
21: PetscBool user_subdomains; /* whether the user set explicit subdomain index sets -- keep them on PCReset() */
22: PetscBool dm_subdomains; /* whether DM is allowed to define subdomains */
23: PetscBool hierarchicalpartitioning;
24: IS *ois; /* index sets that define the outer (conceptually, overlapping) subdomains */
25: IS *iis; /* index sets that define the inner (conceptually, nonoverlapping) subdomains */
26: KSP *ksp; /* linear solvers for each subdomain */
27: Mat *pmat; /* subdomain block matrices */
28: Vec gx,gy; /* Merged work vectors */
29: Vec *x,*y; /* Split work vectors; storage aliases pieces of storage of the above merged vectors. */
30: VecScatter gorestriction; /* merged restriction to disjoint union of outer subdomains */
31: VecScatter girestriction; /* merged restriction to disjoint union of inner subdomains */
32: VecScatter pctoouter;
33: IS permutationIS;
34: Mat permutationP;
35: Mat pcmat;
36: Vec pcx,pcy;
37: } PC_GASM;
39: static PetscErrorCode PCGASMComputeGlobalSubdomainNumbering_Private(PC pc,PetscInt **numbering,PetscInt **permutation)
40: {
41: PC_GASM *osm = (PC_GASM*)pc->data;
42: PetscInt i;
46: /* Determine the number of globally-distinct subdomains and compute a global numbering for them. */
47: PetscMalloc2(osm->n,numbering,osm->n,permutation);
48: PetscObjectsListGetGlobalNumbering(PetscObjectComm((PetscObject)pc),osm->n,(PetscObject*)osm->iis,NULL,*numbering);
49: for (i = 0; i < osm->n; ++i) (*permutation)[i] = i;
50: PetscSortIntWithPermutation(osm->n,*numbering,*permutation);
51: return(0);
52: }
54: static PetscErrorCode PCGASMSubdomainView_Private(PC pc, PetscInt i, PetscViewer viewer)
55: {
56: PC_GASM *osm = (PC_GASM*)pc->data;
57: PetscInt j,nidx;
58: const PetscInt *idx;
59: PetscViewer sviewer;
60: char *cidx;
64: if (i < 0 || i > osm->n) SETERRQ2(PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_WRONG, "Invalid subdomain %D: must nonnegative and less than %D", i, osm->n);
65: /* Inner subdomains. */
66: ISGetLocalSize(osm->iis[i], &nidx);
67: /*
68: No more than 15 characters per index plus a space.
69: PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx,
70: in case nidx == 0. That will take care of the space for the trailing '\0' as well.
71: For nidx == 0, the whole string 16 '\0'.
72: */
73: PetscMalloc1(16*(nidx+1)+1, &cidx);
74: PetscViewerStringOpen(PETSC_COMM_SELF, cidx, 16*(nidx+1)+1, &sviewer);
75: ISGetIndices(osm->iis[i], &idx);
76: for (j = 0; j < nidx; ++j) {
77: PetscViewerStringSPrintf(sviewer, "%D ", idx[j]);
78: }
79: ISRestoreIndices(osm->iis[i],&idx);
80: PetscViewerDestroy(&sviewer);
81: PetscViewerASCIIPrintf(viewer, "Inner subdomain:\n");
82: PetscViewerFlush(viewer);
83: PetscViewerASCIIPushSynchronized(viewer);
84: PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx);
85: PetscViewerFlush(viewer);
86: PetscViewerASCIIPopSynchronized(viewer);
87: PetscViewerASCIIPrintf(viewer, "\n");
88: PetscViewerFlush(viewer);
89: PetscFree(cidx);
90: /* Outer subdomains. */
91: ISGetLocalSize(osm->ois[i], &nidx);
92: /*
93: No more than 15 characters per index plus a space.
94: PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx,
95: in case nidx == 0. That will take care of the space for the trailing '\0' as well.
96: For nidx == 0, the whole string 16 '\0'.
97: */
98: PetscMalloc1(16*(nidx+1)+1, &cidx);
99: PetscViewerStringOpen(PETSC_COMM_SELF, cidx, 16*(nidx+1)+1, &sviewer);
100: ISGetIndices(osm->ois[i], &idx);
101: for (j = 0; j < nidx; ++j) {
102: PetscViewerStringSPrintf(sviewer,"%D ", idx[j]);
103: }
104: PetscViewerDestroy(&sviewer);
105: ISRestoreIndices(osm->ois[i],&idx);
106: PetscViewerASCIIPrintf(viewer, "Outer subdomain:\n");
107: PetscViewerFlush(viewer);
108: PetscViewerASCIIPushSynchronized(viewer);
109: PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx);
110: PetscViewerFlush(viewer);
111: PetscViewerASCIIPopSynchronized(viewer);
112: PetscViewerASCIIPrintf(viewer, "\n");
113: PetscViewerFlush(viewer);
114: PetscFree(cidx);
115: return(0);
116: }
118: static PetscErrorCode PCGASMPrintSubdomains(PC pc)
119: {
120: PC_GASM *osm = (PC_GASM*)pc->data;
121: const char *prefix;
122: char fname[PETSC_MAX_PATH_LEN+1];
123: PetscInt l, d, count;
124: PetscBool doprint,found;
125: PetscViewer viewer, sviewer = NULL;
126: PetscInt *numbering,*permutation;/* global numbering of locally-supported subdomains and the permutation from the local ordering */
130: PCGetOptionsPrefix(pc,&prefix);
131: doprint = PETSC_FALSE;
132: PetscOptionsGetBool(NULL,prefix,"-pc_gasm_print_subdomains",&doprint,NULL);
133: if (!doprint) return(0);
134: PetscOptionsGetString(NULL,prefix,"-pc_gasm_print_subdomains",fname,PETSC_MAX_PATH_LEN,&found);
135: if (!found) { PetscStrcpy(fname,"stdout"); };
136: PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);
137: /*
138: Make sure the viewer has a name. Otherwise this may cause a deadlock or other weird errors when creating a subcomm viewer:
139: the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed collectively on the comm.
140: */
141: PetscObjectName((PetscObject)viewer);
142: l = 0;
143: PCGASMComputeGlobalSubdomainNumbering_Private(pc,&numbering,&permutation);
144: for (count = 0; count < osm->N; ++count) {
145: /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */
146: if (l<osm->n) {
147: d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */
148: if (numbering[d] == count) {
149: PetscViewerGetSubViewer(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);
150: PCGASMSubdomainView_Private(pc,d,sviewer);
151: PetscViewerRestoreSubViewer(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);
152: ++l;
153: }
154: }
155: MPI_Barrier(PetscObjectComm((PetscObject)pc));
156: }
157: PetscFree2(numbering,permutation);
158: PetscViewerDestroy(&viewer);
159: return(0);
160: }
163: static PetscErrorCode PCView_GASM(PC pc,PetscViewer viewer)
164: {
165: PC_GASM *osm = (PC_GASM*)pc->data;
166: const char *prefix;
168: PetscMPIInt rank, size;
169: PetscInt bsz;
170: PetscBool iascii,view_subdomains=PETSC_FALSE;
171: PetscViewer sviewer;
172: PetscInt count, l;
173: char overlap[256] = "user-defined overlap";
174: char gsubdomains[256] = "unknown total number of subdomains";
175: char msubdomains[256] = "unknown max number of local subdomains";
176: PetscInt *numbering,*permutation;/* global numbering of locally-supported subdomains and the permutation from the local ordering */
179: MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);
180: MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);
182: if (osm->overlap >= 0) {
183: PetscSNPrintf(overlap,sizeof(overlap),"requested amount of overlap = %D",osm->overlap);
184: }
185: if (osm->N != PETSC_DETERMINE) {
186: PetscSNPrintf(gsubdomains, sizeof(gsubdomains), "total number of subdomains = %D",osm->N);
187: }
188: if (osm->nmax != PETSC_DETERMINE) {
189: PetscSNPrintf(msubdomains,sizeof(msubdomains),"max number of local subdomains = %D",osm->nmax);
190: }
192: PCGetOptionsPrefix(pc,&prefix);
193: PetscOptionsGetBool(NULL,prefix,"-pc_gasm_view_subdomains",&view_subdomains,NULL);
195: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
196: if (iascii) {
197: /*
198: Make sure the viewer has a name. Otherwise this may cause a deadlock when creating a subcomm viewer:
199: the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed
200: collectively on the comm.
201: */
202: PetscObjectName((PetscObject)viewer);
203: PetscViewerASCIIPrintf(viewer," Restriction/interpolation type: %s\n",PCGASMTypes[osm->type]);
204: PetscViewerASCIIPrintf(viewer," %s\n",overlap);
205: PetscViewerASCIIPrintf(viewer," %s\n",gsubdomains);
206: PetscViewerASCIIPrintf(viewer," %s\n",msubdomains);
207: PetscViewerASCIIPushSynchronized(viewer);
208: PetscViewerASCIISynchronizedPrintf(viewer," [%d|%d] number of locally-supported subdomains = %D\n",rank,size,osm->n);
209: PetscViewerFlush(viewer);
210: PetscViewerASCIIPopSynchronized(viewer);
211: /* Cannot take advantage of osm->same_subdomain_solvers without a global numbering of subdomains. */
212: PetscViewerASCIIPrintf(viewer," Subdomain solver info is as follows:\n");
213: PetscViewerASCIIPushTab(viewer);
214: PetscViewerASCIIPrintf(viewer," - - - - - - - - - - - - - - - - - -\n");
215: /* Make sure that everybody waits for the banner to be printed. */
216: MPI_Barrier(PetscObjectComm((PetscObject)viewer));
217: /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */
218: PCGASMComputeGlobalSubdomainNumbering_Private(pc,&numbering,&permutation);
219: l = 0;
220: for (count = 0; count < osm->N; ++count) {
221: PetscMPIInt srank, ssize;
222: if (l<osm->n) {
223: PetscInt d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */
224: if (numbering[d] == count) {
225: MPI_Comm_size(((PetscObject)osm->ois[d])->comm, &ssize);
226: MPI_Comm_rank(((PetscObject)osm->ois[d])->comm, &srank);
227: PetscViewerGetSubViewer(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);
228: ISGetLocalSize(osm->ois[d],&bsz);
229: PetscViewerASCIISynchronizedPrintf(sviewer," [%d|%d] (subcomm [%d|%d]) local subdomain number %D, local size = %D\n",rank,size,srank,ssize,d,bsz);
230: PetscViewerFlush(sviewer);
231: if (view_subdomains) {
232: PCGASMSubdomainView_Private(pc,d,sviewer);
233: }
234: if (!pc->setupcalled) {
235: PetscViewerASCIIPrintf(sviewer, " Solver not set up yet: PCSetUp() not yet called\n");
236: } else {
237: KSPView(osm->ksp[d],sviewer);
238: }
239: PetscViewerASCIIPrintf(sviewer," - - - - - - - - - - - - - - - - - -\n");
240: PetscViewerFlush(sviewer);
241: PetscViewerRestoreSubViewer(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);
242: ++l;
243: }
244: }
245: MPI_Barrier(PetscObjectComm((PetscObject)pc));
246: }
247: PetscFree2(numbering,permutation);
248: PetscViewerASCIIPopTab(viewer);
249: }
250: PetscViewerFlush(viewer);
251: PetscViewerASCIIPopSynchronized(viewer);
252: return(0);
253: }
255: PETSC_INTERN PetscErrorCode PCGASMCreateLocalSubdomains(Mat A, PetscInt nloc, IS *iis[]);
259: PetscErrorCode PCGASMSetHierarchicalPartitioning(PC pc)
260: {
261: PC_GASM *osm = (PC_GASM*)pc->data;
262: MatPartitioning part;
263: MPI_Comm comm;
264: PetscMPIInt size;
265: PetscInt nlocalsubdomains,fromrows_localsize;
266: IS partitioning,fromrows,isn;
267: Vec outervec;
268: PetscErrorCode ierr;
271: PetscObjectGetComm((PetscObject)pc,&comm);
272: MPI_Comm_size(comm,&size);
273: /* we do not need a hierarchical partitioning when
274: * the total number of subdomains is consistent with
275: * the number of MPI tasks.
276: * For the following cases, we do not need to use HP
277: * */
278: if(osm->N==PETSC_DETERMINE || osm->N>=size || osm->N==1) return(0);
279: if(size%osm->N != 0) SETERRQ2(PETSC_COMM_WORLD,PETSC_ERR_ARG_INCOMP,"have to specify the total number of subdomains %D to be a factor of the number of processors %d \n",osm->N,size);
280: nlocalsubdomains = size/osm->N;
281: osm->n = 1;
282: MatPartitioningCreate(comm,&part);
283: MatPartitioningSetAdjacency(part,pc->pmat);
284: MatPartitioningSetType(part,MATPARTITIONINGHIERARCH);
285: MatPartitioningHierarchicalSetNcoarseparts(part,osm->N);
286: MatPartitioningHierarchicalSetNfineparts(part,nlocalsubdomains);
287: MatPartitioningSetFromOptions(part);
288: /* get new processor owner number of each vertex */
289: MatPartitioningApply(part,&partitioning);
290: ISBuildTwoSided(partitioning,NULL,&fromrows);
291: ISPartitioningToNumbering(partitioning,&isn);
292: ISDestroy(&isn);
293: ISGetLocalSize(fromrows,&fromrows_localsize);
294: MatPartitioningDestroy(&part);
295: MatCreateVecs(pc->pmat,&outervec,NULL);
296: VecCreateMPI(comm,fromrows_localsize,PETSC_DETERMINE,&(osm->pcx));
297: VecDuplicate(osm->pcx,&(osm->pcy));
298: VecScatterCreate(osm->pcx,NULL,outervec,fromrows,&(osm->pctoouter));
299: MatCreateSubMatrix(pc->pmat,fromrows,fromrows,MAT_INITIAL_MATRIX,&(osm->permutationP));
300: PetscObjectReference((PetscObject)fromrows);
301: osm->permutationIS = fromrows;
302: osm->pcmat = pc->pmat;
303: PetscObjectReference((PetscObject)osm->permutationP);
304: pc->pmat = osm->permutationP;
305: VecDestroy(&outervec);
306: ISDestroy(&fromrows);
307: ISDestroy(&partitioning);
308: osm->n = PETSC_DETERMINE;
309: return(0);
310: }
314: static PetscErrorCode PCSetUp_GASM(PC pc)
315: {
316: PC_GASM *osm = (PC_GASM*)pc->data;
318: PetscBool symset,flg;
319: PetscInt i,nInnerIndices,nTotalInnerIndices;
320: PetscMPIInt rank, size;
321: MatReuse scall = MAT_REUSE_MATRIX;
322: KSP ksp;
323: PC subpc;
324: const char *prefix,*pprefix;
325: Vec x,y;
326: PetscInt oni; /* Number of indices in the i-th local outer subdomain. */
327: const PetscInt *oidxi; /* Indices from the i-th subdomain local outer subdomain. */
328: PetscInt on; /* Number of indices in the disjoint union of local outer subdomains. */
329: PetscInt *oidx; /* Indices in the disjoint union of local outer subdomains. */
330: IS gois; /* Disjoint union the global indices of outer subdomains. */
331: IS goid; /* Identity IS of the size of the disjoint union of outer subdomains. */
332: PetscScalar *gxarray, *gyarray;
333: PetscInt gostart; /* Start of locally-owned indices in the vectors -- osm->gx,osm->gy -- over the disjoint union of outer subdomains. */
334: PetscInt num_subdomains = 0;
335: DM *subdomain_dm = NULL;
336: char **subdomain_names = NULL;
337: PetscInt *numbering;
341: MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
342: MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);
343: if (!pc->setupcalled) {
344: /* use a hierarchical partitioning */
345: if(osm->hierarchicalpartitioning){
346: PCGASMSetHierarchicalPartitioning(pc);
347: }
348: if (!osm->type_set) {
349: MatIsSymmetricKnown(pc->pmat,&symset,&flg);
350: if (symset && flg) osm->type = PC_GASM_BASIC;
351: }
353: if (osm->n == PETSC_DETERMINE) {
354: if (osm->N != PETSC_DETERMINE) {
355: /* No local subdomains given, but the desired number of total subdomains is known, so construct them accordingly. */
356: PCGASMCreateSubdomains(pc->pmat,osm->N,&osm->n,&osm->iis);
357: } else if (osm->dm_subdomains && pc->dm) {
358: /* try pc->dm next, if allowed */
359: PetscInt d;
360: IS *inner_subdomain_is, *outer_subdomain_is;
361: DMCreateDomainDecomposition(pc->dm, &num_subdomains, &subdomain_names, &inner_subdomain_is, &outer_subdomain_is, &subdomain_dm);
362: if (num_subdomains) {
363: PCGASMSetSubdomains(pc, num_subdomains, inner_subdomain_is, outer_subdomain_is);
364: }
365: for (d = 0; d < num_subdomains; ++d) {
366: if (inner_subdomain_is) {ISDestroy(&inner_subdomain_is[d]);}
367: if (outer_subdomain_is) {ISDestroy(&outer_subdomain_is[d]);}
368: }
369: PetscFree(inner_subdomain_is);
370: PetscFree(outer_subdomain_is);
371: } else {
372: /* still no subdomains; use one per processor */
373: osm->nmax = osm->n = 1;
374: MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
375: osm->N = size;
376: PCGASMCreateLocalSubdomains(pc->pmat,osm->n,&osm->iis);
377: }
378: }
379: if (!osm->iis) {
380: /*
381: osm->n was set in PCGASMSetSubdomains(), but the actual subdomains have not been supplied.
382: We create the requisite number of local inner subdomains and then expand them into
383: out subdomains, if necessary.
384: */
385: PCGASMCreateLocalSubdomains(pc->pmat,osm->n,&osm->iis);
386: }
387: if (!osm->ois) {
388: /*
389: Initially make outer subdomains the same as inner subdomains. If nonzero additional overlap
390: has been requested, copy the inner subdomains over so they can be modified.
391: */
392: PetscMalloc1(osm->n,&osm->ois);
393: for (i=0; i<osm->n; ++i) {
394: if (osm->overlap > 0 && osm->N>1) { /* With positive overlap, osm->iis[i] will be modified */
395: ISDuplicate(osm->iis[i],(osm->ois)+i);
396: ISCopy(osm->iis[i],osm->ois[i]);
397: } else {
398: PetscObjectReference((PetscObject)((osm->iis)[i]));
399: osm->ois[i] = osm->iis[i];
400: }
401: }
402: if (osm->overlap>0 && osm->N>1) {
403: /* Extend the "overlapping" regions by a number of steps */
404: MatIncreaseOverlapSplit(pc->pmat,osm->n,osm->ois,osm->overlap);
405: }
406: }
408: /* Now the subdomains are defined. Determine their global and max local numbers, if necessary. */
409: if (osm->nmax == PETSC_DETERMINE) {
410: PetscMPIInt inwork,outwork;
411: /* determine global number of subdomains and the max number of local subdomains */
412: inwork = osm->n;
413: MPIU_Allreduce(&inwork,&outwork,1,MPI_INT,MPI_MAX,PetscObjectComm((PetscObject)pc));
414: osm->nmax = outwork;
415: }
416: if (osm->N == PETSC_DETERMINE) {
417: /* Determine the number of globally-distinct subdomains and compute a global numbering for them. */
418: PetscObjectsListGetGlobalNumbering(PetscObjectComm((PetscObject)pc),osm->n,(PetscObject*)osm->ois,&osm->N,NULL);
419: }
422: if (osm->sort_indices) {
423: for (i=0; i<osm->n; i++) {
424: ISSort(osm->ois[i]);
425: ISSort(osm->iis[i]);
426: }
427: }
428: PCGetOptionsPrefix(pc,&prefix);
429: PCGASMPrintSubdomains(pc);
431: /*
432: Merge the ISs, create merged vectors and restrictions.
433: */
434: /* Merge outer subdomain ISs and construct a restriction onto the disjoint union of local outer subdomains. */
435: on = 0;
436: for (i=0; i<osm->n; i++) {
437: ISGetLocalSize(osm->ois[i],&oni);
438: on += oni;
439: }
440: PetscMalloc1(on, &oidx);
441: on = 0;
442: /* Merge local indices together */
443: for (i=0; i<osm->n; i++) {
444: ISGetLocalSize(osm->ois[i],&oni);
445: ISGetIndices(osm->ois[i],&oidxi);
446: PetscMemcpy(oidx+on,oidxi,sizeof(PetscInt)*oni);
447: ISRestoreIndices(osm->ois[i],&oidxi);
448: on += oni;
449: }
450: ISCreateGeneral(((PetscObject)(pc))->comm,on,oidx,PETSC_OWN_POINTER,&gois);
451: nTotalInnerIndices = 0;
452: for(i=0; i<osm->n; i++){
453: ISGetLocalSize(osm->iis[i],&nInnerIndices);
454: nTotalInnerIndices += nInnerIndices;
455: }
456: VecCreateMPI(((PetscObject)(pc))->comm,nTotalInnerIndices,PETSC_DETERMINE,&x);
457: VecDuplicate(x,&y);
459: VecCreateMPI(PetscObjectComm((PetscObject)pc),on,PETSC_DECIDE,&osm->gx);
460: VecDuplicate(osm->gx,&osm->gy);
461: VecGetOwnershipRange(osm->gx, &gostart, NULL);
462: ISCreateStride(PetscObjectComm((PetscObject)pc),on,gostart,1, &goid);
463: /* gois might indices not on local */
464: VecScatterCreate(x,gois,osm->gx,goid, &(osm->gorestriction));
465: PetscMalloc1(osm->n,&numbering);
466: PetscObjectsListGetGlobalNumbering(PetscObjectComm((PetscObject)pc),osm->n,(PetscObject*)osm->ois,NULL,numbering);
467: VecDestroy(&x);
468: ISDestroy(&gois);
470: /* Merge inner subdomain ISs and construct a restriction onto the disjoint union of local inner subdomains. */
471: {
472: PetscInt ini; /* Number of indices the i-th a local inner subdomain. */
473: PetscInt in; /* Number of indices in the disjoint uniont of local inner subdomains. */
474: PetscInt *iidx; /* Global indices in the merged local inner subdomain. */
475: PetscInt *ioidx; /* Global indices of the disjoint union of inner subdomains within the disjoint union of outer subdomains. */
476: IS giis; /* IS for the disjoint union of inner subdomains. */
477: IS giois; /* IS for the disjoint union of inner subdomains within the disjoint union of outer subdomains. */
478: PetscScalar *array;
479: const PetscInt *indices;
480: PetscInt k;
481: on = 0;
482: for (i=0; i<osm->n; i++) {
483: ISGetLocalSize(osm->ois[i],&oni);
484: on += oni;
485: }
486: PetscMalloc1(on, &iidx);
487: PetscMalloc1(on, &ioidx);
488: VecGetArray(y,&array);
489: /* set communicator id to determine where overlap is */
490: in = 0;
491: for (i=0; i<osm->n; i++) {
492: ISGetLocalSize(osm->iis[i],&ini);
493: for (k = 0; k < ini; ++k){
494: array[in+k] = numbering[i];
495: }
496: in += ini;
497: }
498: VecRestoreArray(y,&array);
499: VecScatterBegin(osm->gorestriction,y,osm->gy,INSERT_VALUES,SCATTER_FORWARD);
500: VecScatterEnd(osm->gorestriction,y,osm->gy,INSERT_VALUES,SCATTER_FORWARD);
501: VecGetOwnershipRange(osm->gy,&gostart, NULL);
502: VecGetArray(osm->gy,&array);
503: on = 0;
504: in = 0;
505: for (i=0; i<osm->n; i++) {
506: ISGetLocalSize(osm->ois[i],&oni);
507: ISGetIndices(osm->ois[i],&indices);
508: for (k=0; k<oni; k++) {
509: /* skip overlapping indices to get inner domain */
510: if(PetscRealPart(array[on+k]) != numbering[i]) continue;
511: iidx[in] = indices[k];
512: ioidx[in++] = gostart+on+k;
513: }
514: ISRestoreIndices(osm->ois[i], &indices);
515: on += oni;
516: }
517: VecRestoreArray(osm->gy,&array);
518: ISCreateGeneral(PetscObjectComm((PetscObject)pc),in,iidx,PETSC_OWN_POINTER,&giis);
519: ISCreateGeneral(PetscObjectComm((PetscObject)pc),in,ioidx,PETSC_OWN_POINTER,&giois);
520: VecScatterCreate(y,giis,osm->gy,giois,&osm->girestriction);
521: VecDestroy(&y);
522: ISDestroy(&giis);
523: ISDestroy(&giois);
524: }
525: ISDestroy(&goid);
526: PetscFree(numbering);
528: /* Create the subdomain work vectors. */
529: PetscMalloc1(osm->n,&osm->x);
530: PetscMalloc1(osm->n,&osm->y);
531: VecGetArray(osm->gx, &gxarray);
532: VecGetArray(osm->gy, &gyarray);
533: for (i=0, on=0; i<osm->n; ++i, on += oni) {
534: PetscInt oNi;
535: ISGetLocalSize(osm->ois[i],&oni);
536: /* on a sub communicator */
537: ISGetSize(osm->ois[i],&oNi);
538: VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm,1,oni,oNi,gxarray+on,&osm->x[i]);
539: VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm,1,oni,oNi,gyarray+on,&osm->y[i]);
540: }
541: VecRestoreArray(osm->gx, &gxarray);
542: VecRestoreArray(osm->gy, &gyarray);
543: /* Create the subdomain solvers */
544: PetscMalloc1(osm->n,&osm->ksp);
545: for (i=0; i<osm->n; i++) {
546: char subprefix[PETSC_MAX_PATH_LEN+1];
547: KSPCreate(((PetscObject)(osm->ois[i]))->comm,&ksp);
548: PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);
549: PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);
550: KSPSetType(ksp,KSPPREONLY);
551: KSPGetPC(ksp,&subpc); /* Why do we need this here? */
552: if (subdomain_dm) {
553: KSPSetDM(ksp,subdomain_dm[i]);
554: DMDestroy(subdomain_dm+i);
555: }
556: PCGetOptionsPrefix(pc,&prefix);
557: KSPSetOptionsPrefix(ksp,prefix);
558: if (subdomain_names && subdomain_names[i]) {
559: PetscSNPrintf(subprefix,PETSC_MAX_PATH_LEN,"sub_%s_",subdomain_names[i]);
560: KSPAppendOptionsPrefix(ksp,subprefix);
561: PetscFree(subdomain_names[i]);
562: }
563: KSPAppendOptionsPrefix(ksp,"sub_");
564: osm->ksp[i] = ksp;
565: }
566: PetscFree(subdomain_dm);
567: PetscFree(subdomain_names);
568: scall = MAT_INITIAL_MATRIX;
570: } else { /* if (pc->setupcalled) */
571: /*
572: Destroy the submatrices from the previous iteration
573: */
574: if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
575: MatDestroyMatrices(osm->n,&osm->pmat);
576: scall = MAT_INITIAL_MATRIX;
577: }
578: if(osm->permutationIS){
579: MatCreateSubMatrix(pc->pmat,osm->permutationIS,osm->permutationIS,scall,&osm->permutationP);
580: PetscObjectReference((PetscObject)osm->permutationP);
581: osm->pcmat = pc->pmat;
582: pc->pmat = osm->permutationP;
583: }
585: }
588: /*
589: Extract out the submatrices.
590: */
591: if (size > 1) {
592: MatCreateSubMatricesMPI(pc->pmat,osm->n,osm->ois,osm->ois,scall,&osm->pmat);
593: } else {
594: MatCreateSubMatrices(pc->pmat,osm->n,osm->ois,osm->ois,scall,&osm->pmat);
595: }
596: if (scall == MAT_INITIAL_MATRIX) {
597: PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);
598: for (i=0; i<osm->n; i++) {
599: PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);
600: PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);
601: }
602: }
604: /* Return control to the user so that the submatrices can be modified (e.g., to apply
605: different boundary conditions for the submatrices than for the global problem) */
606: PCModifySubMatrices(pc,osm->n,osm->ois,osm->ois,osm->pmat,pc->modifysubmatricesP);
608: /*
609: Loop over submatrices putting them into local ksps
610: */
611: for (i=0; i<osm->n; i++) {
612: KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);
613: if (!pc->setupcalled) {
614: KSPSetFromOptions(osm->ksp[i]);
615: }
616: }
617: if(osm->pcmat){
618: MatDestroy(&pc->pmat);
619: pc->pmat = osm->pcmat;
620: osm->pcmat = 0;
621: }
622: return(0);
623: }
625: static PetscErrorCode PCSetUpOnBlocks_GASM(PC pc)
626: {
627: PC_GASM *osm = (PC_GASM*)pc->data;
629: PetscInt i;
632: for (i=0; i<osm->n; i++) {
633: KSPSetUp(osm->ksp[i]);
634: }
635: return(0);
636: }
638: static PetscErrorCode PCApply_GASM(PC pc,Vec xin,Vec yout)
639: {
640: PC_GASM *osm = (PC_GASM*)pc->data;
642: PetscInt i;
643: Vec x,y;
644: ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
647: if(osm->pctoouter){
648: VecScatterBegin(osm->pctoouter,xin,osm->pcx,INSERT_VALUES,SCATTER_REVERSE);
649: VecScatterEnd(osm->pctoouter,xin,osm->pcx,INSERT_VALUES,SCATTER_REVERSE);
650: x = osm->pcx;
651: y = osm->pcy;
652: }else{
653: x = xin;
654: y = yout;
655: }
656: /*
657: Support for limiting the restriction or interpolation only to the inner
658: subdomain values (leaving the other values 0).
659: */
660: if (!(osm->type & PC_GASM_RESTRICT)) {
661: /* have to zero the work RHS since scatter may leave some slots empty */
662: VecZeroEntries(osm->gx);
663: VecScatterBegin(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);
664: } else {
665: VecScatterBegin(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);
666: }
667: VecZeroEntries(osm->gy);
668: if (!(osm->type & PC_GASM_RESTRICT)) {
669: VecScatterEnd(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);
670: } else {
671: VecScatterEnd(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);
672: }
673: /* do the subdomain solves */
674: for (i=0; i<osm->n; ++i) {
675: KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);
676: }
677: /* Do we need to zero y ?? */
678: VecZeroEntries(y);
679: if (!(osm->type & PC_GASM_INTERPOLATE)) {
680: VecScatterBegin(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);
681: VecScatterEnd(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);
682: } else {
683: VecScatterBegin(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);
684: VecScatterEnd(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);
685: }
686: if(osm->pctoouter){
687: VecScatterBegin(osm->pctoouter,y,yout,INSERT_VALUES,SCATTER_FORWARD);
688: VecScatterEnd(osm->pctoouter,y,yout,INSERT_VALUES,SCATTER_FORWARD);
689: }
690: return(0);
691: }
693: static PetscErrorCode PCApplyTranspose_GASM(PC pc,Vec xin,Vec yout)
694: {
695: PC_GASM *osm = (PC_GASM*)pc->data;
697: PetscInt i;
698: Vec x,y;
699: ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
702: if(osm->pctoouter){
703: VecScatterBegin(osm->pctoouter,xin,osm->pcx,INSERT_VALUES,SCATTER_REVERSE);
704: VecScatterEnd(osm->pctoouter,xin,osm->pcx,INSERT_VALUES,SCATTER_REVERSE);
705: x = osm->pcx;
706: y = osm->pcy;
707: }else{
708: x = xin;
709: y = yout;
710: }
711: /*
712: Support for limiting the restriction or interpolation to only local
713: subdomain values (leaving the other values 0).
715: Note: these are reversed from the PCApply_GASM() because we are applying the
716: transpose of the three terms
717: */
718: if (!(osm->type & PC_GASM_INTERPOLATE)) {
719: /* have to zero the work RHS since scatter may leave some slots empty */
720: VecZeroEntries(osm->gx);
721: VecScatterBegin(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);
722: } else {
723: VecScatterBegin(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);
724: }
725: VecZeroEntries(osm->gy);
726: if (!(osm->type & PC_GASM_INTERPOLATE)) {
727: VecScatterEnd(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);
728: } else {
729: VecScatterEnd(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);
730: }
731: /* do the local solves */
732: for (i=0; i<osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */
733: KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);
734: }
735: VecZeroEntries(y);
736: if (!(osm->type & PC_GASM_RESTRICT)) {
737: VecScatterBegin(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);
738: VecScatterEnd(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);
739: } else {
740: VecScatterBegin(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);
741: VecScatterEnd(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);
742: }
743: if(osm->pctoouter){
744: VecScatterBegin(osm->pctoouter,y,yout,INSERT_VALUES,SCATTER_FORWARD);
745: VecScatterEnd(osm->pctoouter,y,yout,INSERT_VALUES,SCATTER_FORWARD);
746: }
747: return(0);
748: }
750: static PetscErrorCode PCReset_GASM(PC pc)
751: {
752: PC_GASM *osm = (PC_GASM*)pc->data;
754: PetscInt i;
757: if (osm->ksp) {
758: for (i=0; i<osm->n; i++) {
759: KSPReset(osm->ksp[i]);
760: }
761: }
762: if (osm->pmat) {
763: if (osm->n > 0) {
764: PetscMPIInt size;
765: MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
766: if (size > 1) {
767: /* osm->pmat is created by MatCreateSubMatricesMPI(), cannot use MatDestroySubMatrices() */
768: MatDestroyMatrices(osm->n,&osm->pmat);
769: } else {
770: MatDestroySubMatrices(osm->n,&osm->pmat);
771: }
772: }
773: }
774: if (osm->x) {
775: for (i=0; i<osm->n; i++) {
776: VecDestroy(&osm->x[i]);
777: VecDestroy(&osm->y[i]);
778: }
779: }
780: VecDestroy(&osm->gx);
781: VecDestroy(&osm->gy);
783: VecScatterDestroy(&osm->gorestriction);
784: VecScatterDestroy(&osm->girestriction);
785: if (!osm->user_subdomains) {
786: PCGASMDestroySubdomains(osm->n,&osm->ois,&osm->iis);
787: osm->N = PETSC_DETERMINE;
788: osm->nmax = PETSC_DETERMINE;
789: }
790: if(osm->pctoouter){
791: VecScatterDestroy(&(osm->pctoouter));
792: }
793: if(osm->permutationIS){
794: ISDestroy(&(osm->permutationIS));
795: }
796: if(osm->pcx){
797: VecDestroy(&(osm->pcx));
798: }
799: if(osm->pcy){
800: VecDestroy(&(osm->pcy));
801: }
802: if(osm->permutationP){
803: MatDestroy(&(osm->permutationP));
804: }
805: if(osm->pcmat){
806: MatDestroy(&osm->pcmat);
807: }
808: return(0);
809: }
811: static PetscErrorCode PCDestroy_GASM(PC pc)
812: {
813: PC_GASM *osm = (PC_GASM*)pc->data;
815: PetscInt i;
818: PCReset_GASM(pc);
819: /* PCReset will not destroy subdomains, if user_subdomains is true. */
820: PCGASMDestroySubdomains(osm->n,&osm->ois,&osm->iis);
821: if (osm->ksp) {
822: for (i=0; i<osm->n; i++) {
823: KSPDestroy(&osm->ksp[i]);
824: }
825: PetscFree(osm->ksp);
826: }
827: PetscFree(osm->x);
828: PetscFree(osm->y);
829: PetscFree(pc->data);
830: return(0);
831: }
833: static PetscErrorCode PCSetFromOptions_GASM(PetscOptionItems *PetscOptionsObject,PC pc)
834: {
835: PC_GASM *osm = (PC_GASM*)pc->data;
837: PetscInt blocks,ovl;
838: PetscBool symset,flg;
839: PCGASMType gasmtype;
842: /* set the type to symmetric if matrix is symmetric */
843: if (!osm->type_set && pc->pmat) {
844: MatIsSymmetricKnown(pc->pmat,&symset,&flg);
845: if (symset && flg) osm->type = PC_GASM_BASIC;
846: }
847: PetscOptionsHead(PetscOptionsObject,"Generalized additive Schwarz options");
848: PetscOptionsBool("-pc_gasm_use_dm_subdomains","If subdomains aren't set, use DMCreateDomainDecomposition() to define subdomains.","PCGASMSetUseDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);
849: PetscOptionsInt("-pc_gasm_total_subdomains","Total number of subdomains across communicator","PCGASMSetTotalSubdomains",osm->N,&blocks,&flg);
850: if (flg) {
851: PCGASMSetTotalSubdomains(pc,blocks);
852: }
853: PetscOptionsInt("-pc_gasm_overlap","Number of overlapping degrees of freedom","PCGASMSetOverlap",osm->overlap,&ovl,&flg);
854: if (flg) {
855: PCGASMSetOverlap(pc,ovl);
856: osm->dm_subdomains = PETSC_FALSE;
857: }
858: flg = PETSC_FALSE;
859: PetscOptionsEnum("-pc_gasm_type","Type of restriction/extension","PCGASMSetType",PCGASMTypes,(PetscEnum)osm->type,(PetscEnum*)&gasmtype,&flg);
860: if (flg) {PCGASMSetType(pc,gasmtype);}
861: PetscOptionsBool("-pc_gasm_use_hierachical_partitioning","use hierarchical partitioning",NULL,osm->hierarchicalpartitioning,&osm->hierarchicalpartitioning,&flg);
862: PetscOptionsTail();
863: return(0);
864: }
866: /*------------------------------------------------------------------------------------*/
868: /*@
869: PCGASMSetTotalSubdomains - sets the total number of subdomains to use across the
870: communicator.
871: Logically collective on pc
873: Input Parameters:
874: + pc - the preconditioner
875: - N - total number of subdomains
878: Level: beginner
880: .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
882: .seealso: PCGASMSetSubdomains(), PCGASMSetOverlap()
883: PCGASMCreateSubdomains2D()
884: @*/
885: PetscErrorCode PCGASMSetTotalSubdomains(PC pc,PetscInt N)
886: {
887: PC_GASM *osm = (PC_GASM*)pc->data;
888: PetscMPIInt size,rank;
892: if (N < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Total number of subdomains must be 1 or more, got N = %D",N);
893: if (pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetTotalSubdomains() should be called before calling PCSetUp().");
895: PCGASMDestroySubdomains(osm->n,&osm->iis,&osm->ois);
896: osm->ois = osm->iis = NULL;
898: MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
899: MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);
900: osm->N = N;
901: osm->n = PETSC_DETERMINE;
902: osm->nmax = PETSC_DETERMINE;
903: osm->dm_subdomains = PETSC_FALSE;
904: return(0);
905: }
908: static PetscErrorCode PCGASMSetSubdomains_GASM(PC pc,PetscInt n,IS iis[],IS ois[])
909: {
910: PC_GASM *osm = (PC_GASM*)pc->data;
911: PetscErrorCode ierr;
912: PetscInt i;
915: if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more subdomains, got n = %D",n);
916: if (pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetSubdomains() should be called before calling PCSetUp().");
918: PCGASMDestroySubdomains(osm->n,&osm->iis,&osm->ois);
919: osm->iis = osm->ois = NULL;
920: osm->n = n;
921: osm->N = PETSC_DETERMINE;
922: osm->nmax = PETSC_DETERMINE;
923: if (ois) {
924: PetscMalloc1(n,&osm->ois);
925: for (i=0; i<n; i++) {
926: PetscObjectReference((PetscObject)ois[i]);
927: osm->ois[i] = ois[i];
928: }
929: /*
930: Since the user set the outer subdomains, even if nontrivial overlap was requested via PCGASMSetOverlap(),
931: it will be ignored. To avoid confusion later on (e.g., when viewing the PC), the overlap size is set to -1.
932: */
933: osm->overlap = -1;
934: /* inner subdomains must be provided */
935: if (!iis) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"inner indices have to be provided \n");
936: }/* end if */
937: if (iis) {
938: PetscMalloc1(n,&osm->iis);
939: for (i=0; i<n; i++) {
940: PetscObjectReference((PetscObject)iis[i]);
941: osm->iis[i] = iis[i];
942: }
943: if (!ois) {
944: osm->ois = NULL;
945: /* if user does not provide outer indices, we will create the corresponding outer indices using osm->overlap =1 in PCSetUp_GASM */
946: }
947: }
948: #if defined(PETSC_USE_DEBUG)
949: {
950: PetscInt j,rstart,rend,*covered,lsize;
951: const PetscInt *indices;
952: /* check if the inner indices cover and only cover the local portion of the preconditioning matrix */
953: MatGetOwnershipRange(pc->pmat,&rstart,&rend);
954: PetscCalloc1(rend-rstart,&covered);
955: /* check if the current processor owns indices from others */
956: for (i=0; i<n; i++) {
957: ISGetIndices(osm->iis[i],&indices);
958: ISGetLocalSize(osm->iis[i],&lsize);
959: for (j=0; j<lsize; j++) {
960: if (indices[j]<rstart || indices[j]>=rend) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"inner subdomains can not own an index %d from other processors", indices[j]);
961: else if (covered[indices[j]-rstart]==1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"inner subdomains can not have an overlapping index %d ",indices[j]);
962: else covered[indices[j]-rstart] = 1;
963: }
964: ISRestoreIndices(osm->iis[i],&indices);
965: }
966: /* check if we miss any indices */
967: for (i=rstart; i<rend; i++) {
968: if (!covered[i-rstart]) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"local entity %d was not covered by inner subdomains",i);
969: }
970: PetscFree(covered);
971: }
972: #endif
973: if (iis) osm->user_subdomains = PETSC_TRUE;
974: return(0);
975: }
978: static PetscErrorCode PCGASMSetOverlap_GASM(PC pc,PetscInt ovl)
979: {
980: PC_GASM *osm = (PC_GASM*)pc->data;
983: if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
984: if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetOverlap() should be called before PCSetUp().");
985: if (!pc->setupcalled) osm->overlap = ovl;
986: return(0);
987: }
989: static PetscErrorCode PCGASMSetType_GASM(PC pc,PCGASMType type)
990: {
991: PC_GASM *osm = (PC_GASM*)pc->data;
994: osm->type = type;
995: osm->type_set = PETSC_TRUE;
996: return(0);
997: }
999: static PetscErrorCode PCGASMSetSortIndices_GASM(PC pc,PetscBool doSort)
1000: {
1001: PC_GASM *osm = (PC_GASM*)pc->data;
1004: osm->sort_indices = doSort;
1005: return(0);
1006: }
1008: /*
1009: FIXME: This routine might need to be modified now that multiple ranks per subdomain are allowed.
1010: In particular, it would upset the global subdomain number calculation.
1011: */
1012: static PetscErrorCode PCGASMGetSubKSP_GASM(PC pc,PetscInt *n,PetscInt *first,KSP **ksp)
1013: {
1014: PC_GASM *osm = (PC_GASM*)pc->data;
1018: if (osm->n < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
1020: if (n) *n = osm->n;
1021: if (first) {
1022: MPI_Scan(&osm->n,first,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));
1023: *first -= osm->n;
1024: }
1025: if (ksp) {
1026: /* Assume that local solves are now different; not necessarily
1027: true, though! This flag is used only for PCView_GASM() */
1028: *ksp = osm->ksp;
1029: osm->same_subdomain_solvers = PETSC_FALSE;
1030: }
1031: return(0);
1032: } /* PCGASMGetSubKSP_GASM() */
1034: /*@C
1035: PCGASMSetSubdomains - Sets the subdomains for this processor
1036: for the additive Schwarz preconditioner.
1038: Collective on pc
1040: Input Parameters:
1041: + pc - the preconditioner object
1042: . n - the number of subdomains for this processor
1043: . iis - the index sets that define the inner subdomains (or NULL for PETSc to determine subdomains)
1044: - ois - the index sets that define the outer subdomains (or NULL to use the same as iis, or to construct by expanding iis by the requested overlap)
1046: Notes:
1047: The IS indices use the parallel, global numbering of the vector entries.
1048: Inner subdomains are those where the correction is applied.
1049: Outer subdomains are those where the residual necessary to obtain the
1050: corrections is obtained (see PCGASMType for the use of inner/outer subdomains).
1051: Both inner and outer subdomains can extend over several processors.
1052: This processor's portion of a subdomain is known as a local subdomain.
1054: Inner subdomains can not overlap with each other, do not have any entities from remote processors,
1055: and have to cover the entire local subdomain owned by the current processor. The index sets on each
1056: process should be ordered such that the ith local subdomain is connected to the ith remote subdomain
1057: on another MPI process.
1059: By default the GASM preconditioner uses 1 (local) subdomain per processor.
1062: Level: advanced
1064: .keywords: PC, GASM, set, subdomains, additive Schwarz
1066: .seealso: PCGASMSetNumSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1067: PCGASMCreateSubdomains2D(), PCGASMGetSubdomains()
1068: @*/
1069: PetscErrorCode PCGASMSetSubdomains(PC pc,PetscInt n,IS iis[],IS ois[])
1070: {
1071: PC_GASM *osm = (PC_GASM*)pc->data;
1076: PetscTryMethod(pc,"PCGASMSetSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,iis,ois));
1077: osm->dm_subdomains = PETSC_FALSE;
1078: return(0);
1079: }
1082: /*@
1083: PCGASMSetOverlap - Sets the overlap between a pair of subdomains for the
1084: additive Schwarz preconditioner. Either all or no processors in the
1085: pc communicator must call this routine.
1087: Logically Collective on pc
1089: Input Parameters:
1090: + pc - the preconditioner context
1091: - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 0)
1093: Options Database Key:
1094: . -pc_gasm_overlap <overlap> - Sets overlap
1096: Notes:
1097: By default the GASM preconditioner uses 1 subdomain per processor. To use
1098: multiple subdomain per perocessor or "straddling" subdomains that intersect
1099: multiple processors use PCGASMSetSubdomains() (or option -pc_gasm_total_subdomains <n>).
1101: The overlap defaults to 0, so if one desires that no additional
1102: overlap be computed beyond what may have been set with a call to
1103: PCGASMSetSubdomains(), then ovl must be set to be 0. In particular, if one does
1104: not explicitly set the subdomains in application code, then all overlap would be computed
1105: internally by PETSc, and using an overlap of 0 would result in an GASM
1106: variant that is equivalent to the block Jacobi preconditioner.
1108: Note that one can define initial index sets with any overlap via
1109: PCGASMSetSubdomains(); the routine PCGASMSetOverlap() merely allows
1110: PETSc to extend that overlap further, if desired.
1112: Level: intermediate
1114: .keywords: PC, GASM, set, overlap
1116: .seealso: PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1117: PCGASMCreateSubdomains2D(), PCGASMGetSubdomains()
1118: @*/
1119: PetscErrorCode PCGASMSetOverlap(PC pc,PetscInt ovl)
1120: {
1122: PC_GASM *osm = (PC_GASM*)pc->data;
1127: PetscTryMethod(pc,"PCGASMSetOverlap_C",(PC,PetscInt),(pc,ovl));
1128: osm->dm_subdomains = PETSC_FALSE;
1129: return(0);
1130: }
1132: /*@
1133: PCGASMSetType - Sets the type of restriction and interpolation used
1134: for local problems in the additive Schwarz method.
1136: Logically Collective on PC
1138: Input Parameters:
1139: + pc - the preconditioner context
1140: - type - variant of GASM, one of
1141: .vb
1142: PC_GASM_BASIC - full interpolation and restriction
1143: PC_GASM_RESTRICT - full restriction, local processor interpolation
1144: PC_GASM_INTERPOLATE - full interpolation, local processor restriction
1145: PC_GASM_NONE - local processor restriction and interpolation
1146: .ve
1148: Options Database Key:
1149: . -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
1151: Level: intermediate
1153: .keywords: PC, GASM, set, type
1155: .seealso: PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1156: PCGASMCreateSubdomains2D()
1157: @*/
1158: PetscErrorCode PCGASMSetType(PC pc,PCGASMType type)
1159: {
1165: PetscTryMethod(pc,"PCGASMSetType_C",(PC,PCGASMType),(pc,type));
1166: return(0);
1167: }
1169: /*@
1170: PCGASMSetSortIndices - Determines whether subdomain indices are sorted.
1172: Logically Collective on PC
1174: Input Parameters:
1175: + pc - the preconditioner context
1176: - doSort - sort the subdomain indices
1178: Level: intermediate
1180: .keywords: PC, GASM, set, type
1182: .seealso: PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1183: PCGASMCreateSubdomains2D()
1184: @*/
1185: PetscErrorCode PCGASMSetSortIndices(PC pc,PetscBool doSort)
1186: {
1192: PetscTryMethod(pc,"PCGASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));
1193: return(0);
1194: }
1196: /*@C
1197: PCGASMGetSubKSP - Gets the local KSP contexts for all blocks on
1198: this processor.
1200: Collective on PC iff first_local is requested
1202: Input Parameter:
1203: . pc - the preconditioner context
1205: Output Parameters:
1206: + n_local - the number of blocks on this processor or NULL
1207: . first_local - the global number of the first block on this processor or NULL,
1208: all processors must request or all must pass NULL
1209: - ksp - the array of KSP contexts
1211: Note:
1212: After PCGASMGetSubKSP() the array of KSPes is not to be freed
1214: Currently for some matrix implementations only 1 block per processor
1215: is supported.
1217: You must call KSPSetUp() before calling PCGASMGetSubKSP().
1219: Level: advanced
1221: .keywords: PC, GASM, additive Schwarz, get, sub, KSP, context
1223: .seealso: PCGASMSetSubdomains(), PCGASMSetOverlap(),
1224: PCGASMCreateSubdomains2D(),
1225: @*/
1226: PetscErrorCode PCGASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1227: {
1232: PetscUseMethod(pc,"PCGASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));
1233: return(0);
1234: }
1236: /* -------------------------------------------------------------------------------------*/
1237: /*MC
1238: PCGASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1239: its own KSP object.
1241: Options Database Keys:
1242: + -pc_gasm_total_subdomains <n> - Sets total number of local subdomains to be distributed among processors
1243: . -pc_gasm_view_subdomains - activates the printing of subdomain indices in PCView(), -ksp_view or -snes_view
1244: . -pc_gasm_print_subdomains - activates the printing of subdomain indices in PCSetUp()
1245: . -pc_gasm_overlap <ovl> - Sets overlap by which to (automatically) extend local subdomains
1246: - -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
1248: IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1249: will get a different convergence rate due to the default option of -pc_gasm_type restrict. Use
1250: -pc_gasm_type basic to use the standard GASM.
1252: Notes:
1253: Blocks can be shared by multiple processes.
1255: To set options on the solvers for each block append -sub_ to all the KSP, and PC
1256: options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1258: To set the options on the solvers separate for each block call PCGASMGetSubKSP()
1259: and set the options directly on the resulting KSP object (you can access its PC
1260: with KSPGetPC())
1263: Level: beginner
1265: Concepts: additive Schwarz method
1267: References:
1268: + 1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1269: Courant Institute, New York University Technical report
1270: - 2. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1271: Cambridge University Press.
1273: .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC,
1274: PCBJACOBI, PCGASMGetSubKSP(), PCGASMSetSubdomains(),
1275: PCSetModifySubmatrices(), PCGASMSetOverlap(), PCGASMSetType()
1277: M*/
1279: PETSC_EXTERN PetscErrorCode PCCreate_GASM(PC pc)
1280: {
1282: PC_GASM *osm;
1285: PetscNewLog(pc,&osm);
1287: osm->N = PETSC_DETERMINE;
1288: osm->n = PETSC_DECIDE;
1289: osm->nmax = PETSC_DETERMINE;
1290: osm->overlap = 0;
1291: osm->ksp = 0;
1292: osm->gorestriction = 0;
1293: osm->girestriction = 0;
1294: osm->pctoouter = 0;
1295: osm->gx = 0;
1296: osm->gy = 0;
1297: osm->x = 0;
1298: osm->y = 0;
1299: osm->pcx = 0;
1300: osm->pcy = 0;
1301: osm->permutationIS = 0;
1302: osm->permutationP = 0;
1303: osm->pcmat = 0;
1304: osm->ois = 0;
1305: osm->iis = 0;
1306: osm->pmat = 0;
1307: osm->type = PC_GASM_RESTRICT;
1308: osm->same_subdomain_solvers = PETSC_TRUE;
1309: osm->sort_indices = PETSC_TRUE;
1310: osm->dm_subdomains = PETSC_FALSE;
1311: osm->hierarchicalpartitioning = PETSC_FALSE;
1313: pc->data = (void*)osm;
1314: pc->ops->apply = PCApply_GASM;
1315: pc->ops->applytranspose = PCApplyTranspose_GASM;
1316: pc->ops->setup = PCSetUp_GASM;
1317: pc->ops->reset = PCReset_GASM;
1318: pc->ops->destroy = PCDestroy_GASM;
1319: pc->ops->setfromoptions = PCSetFromOptions_GASM;
1320: pc->ops->setuponblocks = PCSetUpOnBlocks_GASM;
1321: pc->ops->view = PCView_GASM;
1322: pc->ops->applyrichardson = 0;
1324: PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetSubdomains_C",PCGASMSetSubdomains_GASM);
1325: PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetOverlap_C",PCGASMSetOverlap_GASM);
1326: PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetType_C",PCGASMSetType_GASM);
1327: PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetSortIndices_C",PCGASMSetSortIndices_GASM);
1328: PetscObjectComposeFunction((PetscObject)pc,"PCGASMGetSubKSP_C",PCGASMGetSubKSP_GASM);
1329: return(0);
1330: }
1333: PetscErrorCode PCGASMCreateLocalSubdomains(Mat A, PetscInt nloc, IS *iis[])
1334: {
1335: MatPartitioning mpart;
1336: const char *prefix;
1337: PetscInt i,j,rstart,rend,bs;
1338: PetscBool hasop, isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1339: Mat Ad = NULL, adj;
1340: IS ispart,isnumb,*is;
1341: PetscErrorCode ierr;
1344: if (nloc < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local subdomains must > 0, got nloc = %D",nloc);
1346: /* Get prefix, row distribution, and block size */
1347: MatGetOptionsPrefix(A,&prefix);
1348: MatGetOwnershipRange(A,&rstart,&rend);
1349: MatGetBlockSize(A,&bs);
1350: if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1352: /* Get diagonal block from matrix if possible */
1353: MatHasOperation(A,MATOP_GET_DIAGONAL_BLOCK,&hasop);
1354: if (hasop) {
1355: MatGetDiagonalBlock(A,&Ad);
1356: }
1357: if (Ad) {
1358: PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);
1359: if (!isbaij) {PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);}
1360: }
1361: if (Ad && nloc > 1) {
1362: PetscBool match,done;
1363: /* Try to setup a good matrix partitioning if available */
1364: MatPartitioningCreate(PETSC_COMM_SELF,&mpart);
1365: PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);
1366: MatPartitioningSetFromOptions(mpart);
1367: PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);
1368: if (!match) {
1369: PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);
1370: }
1371: if (!match) { /* assume a "good" partitioner is available */
1372: PetscInt na;
1373: const PetscInt *ia,*ja;
1374: MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);
1375: if (done) {
1376: /* Build adjacency matrix by hand. Unfortunately a call to
1377: MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1378: remove the block-aij structure and we cannot expect
1379: MatPartitioning to split vertices as we need */
1380: PetscInt i,j,len,nnz,cnt,*iia=0,*jja=0;
1381: const PetscInt *row;
1382: nnz = 0;
1383: for (i=0; i<na; i++) { /* count number of nonzeros */
1384: len = ia[i+1] - ia[i];
1385: row = ja + ia[i];
1386: for (j=0; j<len; j++) {
1387: if (row[j] == i) { /* don't count diagonal */
1388: len--; break;
1389: }
1390: }
1391: nnz += len;
1392: }
1393: PetscMalloc1(na+1,&iia);
1394: PetscMalloc1(nnz,&jja);
1395: nnz = 0;
1396: iia[0] = 0;
1397: for (i=0; i<na; i++) { /* fill adjacency */
1398: cnt = 0;
1399: len = ia[i+1] - ia[i];
1400: row = ja + ia[i];
1401: for (j=0; j<len; j++) {
1402: if (row[j] != i) jja[nnz+cnt++] = row[j]; /* if not diagonal */
1403: }
1404: nnz += cnt;
1405: iia[i+1] = nnz;
1406: }
1407: /* Partitioning of the adjacency matrix */
1408: MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);
1409: MatPartitioningSetAdjacency(mpart,adj);
1410: MatPartitioningSetNParts(mpart,nloc);
1411: MatPartitioningApply(mpart,&ispart);
1412: ISPartitioningToNumbering(ispart,&isnumb);
1413: MatDestroy(&adj);
1414: foundpart = PETSC_TRUE;
1415: }
1416: MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);
1417: }
1418: MatPartitioningDestroy(&mpart);
1419: }
1420: PetscMalloc1(nloc,&is);
1421: if (!foundpart) {
1423: /* Partitioning by contiguous chunks of rows */
1425: PetscInt mbs = (rend-rstart)/bs;
1426: PetscInt start = rstart;
1427: for (i=0; i<nloc; i++) {
1428: PetscInt count = (mbs/nloc + ((mbs % nloc) > i)) * bs;
1429: ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);
1430: start += count;
1431: }
1433: } else {
1435: /* Partitioning by adjacency of diagonal block */
1437: const PetscInt *numbering;
1438: PetscInt *count,nidx,*indices,*newidx,start=0;
1439: /* Get node count in each partition */
1440: PetscMalloc1(nloc,&count);
1441: ISPartitioningCount(ispart,nloc,count);
1442: if (isbaij && bs > 1) { /* adjust for the block-aij case */
1443: for (i=0; i<nloc; i++) count[i] *= bs;
1444: }
1445: /* Build indices from node numbering */
1446: ISGetLocalSize(isnumb,&nidx);
1447: PetscMalloc1(nidx,&indices);
1448: for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1449: ISGetIndices(isnumb,&numbering);
1450: PetscSortIntWithPermutation(nidx,numbering,indices);
1451: ISRestoreIndices(isnumb,&numbering);
1452: if (isbaij && bs > 1) { /* adjust for the block-aij case */
1453: PetscMalloc1(nidx*bs,&newidx);
1454: for (i=0; i<nidx; i++) {
1455: for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1456: }
1457: PetscFree(indices);
1458: nidx *= bs;
1459: indices = newidx;
1460: }
1461: /* Shift to get global indices */
1462: for (i=0; i<nidx; i++) indices[i] += rstart;
1464: /* Build the index sets for each block */
1465: for (i=0; i<nloc; i++) {
1466: ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);
1467: ISSort(is[i]);
1468: start += count[i];
1469: }
1471: PetscFree(count);
1472: PetscFree(indices);
1473: ISDestroy(&isnumb);
1474: ISDestroy(&ispart);
1475: }
1476: *iis = is;
1477: return(0);
1478: }
1480: PETSC_INTERN PetscErrorCode PCGASMCreateStraddlingSubdomains(Mat A,PetscInt N,PetscInt *n,IS *iis[])
1481: {
1482: PetscErrorCode ierr;
1485: MatSubdomainsCreateCoalesce(A,N,n,iis);
1486: return(0);
1487: }
1491: /*@C
1492: PCGASMCreateSubdomains - Creates n index sets defining n nonoverlapping subdomains for the additive
1493: Schwarz preconditioner for a any problem based on its matrix.
1495: Collective
1497: Input Parameters:
1498: + A - The global matrix operator
1499: - N - the number of global subdomains requested
1501: Output Parameters:
1502: + n - the number of subdomains created on this processor
1503: - iis - the array of index sets defining the local inner subdomains (on which the correction is applied)
1505: Level: advanced
1507: Note: When N >= A's communicator size, each subdomain is local -- contained within a single processor.
1508: When N < size, the subdomains are 'straddling' (processor boundaries) and are no longer local.
1509: The resulting subdomains can be use in PCGASMSetSubdomains(pc,n,iss,NULL). The overlapping
1510: outer subdomains will be automatically generated from these according to the requested amount of
1511: overlap; this is currently supported only with local subdomains.
1514: .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1516: .seealso: PCGASMSetSubdomains(), PCGASMDestroySubdomains()
1517: @*/
1518: PetscErrorCode PCGASMCreateSubdomains(Mat A,PetscInt N,PetscInt *n,IS *iis[])
1519: {
1520: PetscMPIInt size;
1521: PetscErrorCode ierr;
1527: if (N < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Number of subdomains must be > 0, N = %D",N);
1528: MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
1529: if (N >= size) {
1530: *n = N/size + (N%size);
1531: PCGASMCreateLocalSubdomains(A,*n,iis);
1532: } else {
1533: PCGASMCreateStraddlingSubdomains(A,N,n,iis);
1534: }
1535: return(0);
1536: }
1538: /*@C
1539: PCGASMDestroySubdomains - Destroys the index sets created with
1540: PCGASMCreateSubdomains() or PCGASMCreateSubdomains2D. Should be
1541: called after setting subdomains with PCGASMSetSubdomains().
1543: Collective
1545: Input Parameters:
1546: + n - the number of index sets
1547: . iis - the array of inner subdomains,
1548: - ois - the array of outer subdomains, can be NULL
1550: Level: intermediate
1552: Notes:
1553: this is merely a convenience subroutine that walks each list,
1554: destroys each IS on the list, and then frees the list. At the end the
1555: list pointers are set to NULL.
1557: .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1559: .seealso: PCGASMCreateSubdomains(), PCGASMSetSubdomains()
1560: @*/
1561: PetscErrorCode PCGASMDestroySubdomains(PetscInt n,IS **iis,IS **ois)
1562: {
1563: PetscInt i;
1567: if (n <= 0) return(0);
1568: if (ois) {
1570: if (*ois) {
1572: for (i=0; i<n; i++) {
1573: ISDestroy(&(*ois)[i]);
1574: }
1575: PetscFree((*ois));
1576: }
1577: }
1578: if (iis) {
1580: if (*iis) {
1582: for (i=0; i<n; i++) {
1583: ISDestroy(&(*iis)[i]);
1584: }
1585: PetscFree((*iis));
1586: }
1587: }
1588: return(0);
1589: }
1592: #define PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,xleft_loc,ylow_loc,xright_loc,yhigh_loc,n) \
1593: { \
1594: PetscInt first_row = first/M, last_row = last/M+1; \
1595: /* \
1596: Compute ylow_loc and yhigh_loc so that (ylow_loc,xleft) and (yhigh_loc,xright) are the corners \
1597: of the bounding box of the intersection of the subdomain with the local ownership range (local \
1598: subdomain). \
1599: Also compute xleft_loc and xright_loc as the lower and upper bounds on the first and last rows \
1600: of the intersection. \
1601: */ \
1602: /* ylow_loc is the grid row containing the first element of the local sumbdomain */ \
1603: *ylow_loc = PetscMax(first_row,ylow); \
1604: /* xleft_loc is the offset of first element of the local subdomain within its grid row (might actually be outside the local subdomain) */ \
1605: *xleft_loc = *ylow_loc==first_row ? PetscMax(first%M,xleft) : xleft; \
1606: /* yhigh_loc is the grid row above the last local subdomain element */ \
1607: *yhigh_loc = PetscMin(last_row,yhigh); \
1608: /* xright is the offset of the end of the local subdomain within its grid row (might actually be outside the local subdomain) */ \
1609: *xright_loc = *yhigh_loc==last_row ? PetscMin(xright,last%M) : xright; \
1610: /* Now compute the size of the local subdomain n. */ \
1611: *n = 0; \
1612: if (*ylow_loc < *yhigh_loc) { \
1613: PetscInt width = xright-xleft; \
1614: *n += width*(*yhigh_loc-*ylow_loc-1); \
1615: *n += PetscMin(PetscMax(*xright_loc-xleft,0),width); \
1616: *n -= PetscMin(PetscMax(*xleft_loc-xleft,0), width); \
1617: } \
1618: }
1622: /*@
1623: PCGASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1624: preconditioner for a two-dimensional problem on a regular grid.
1626: Collective
1628: Input Parameters:
1629: + M, N - the global number of grid points in the x and y directions
1630: . Mdomains, Ndomains - the global number of subdomains in the x and y directions
1631: . dof - degrees of freedom per node
1632: - overlap - overlap in mesh lines
1634: Output Parameters:
1635: + Nsub - the number of local subdomains created
1636: . iis - array of index sets defining inner (nonoverlapping) subdomains
1637: - ois - array of index sets defining outer (overlapping, if overlap > 0) subdomains
1640: Level: advanced
1642: .keywords: PC, GASM, additive Schwarz, create, subdomains, 2D, regular grid
1644: .seealso: PCGASMSetSubdomains(), PCGASMGetSubKSP(), PCGASMSetOverlap()
1645: @*/
1646: PetscErrorCode PCGASMCreateSubdomains2D(PC pc,PetscInt M,PetscInt N,PetscInt Mdomains,PetscInt Ndomains,PetscInt dof,PetscInt overlap,PetscInt *nsub,IS **iis,IS **ois)
1647: {
1649: PetscMPIInt size, rank;
1650: PetscInt i, j;
1651: PetscInt maxheight, maxwidth;
1652: PetscInt xstart, xleft, xright, xleft_loc, xright_loc;
1653: PetscInt ystart, ylow, yhigh, ylow_loc, yhigh_loc;
1654: PetscInt x[2][2], y[2][2], n[2];
1655: PetscInt first, last;
1656: PetscInt nidx, *idx;
1657: PetscInt ii,jj,s,q,d;
1658: PetscInt k,kk;
1659: PetscMPIInt color;
1660: MPI_Comm comm, subcomm;
1661: IS **xis = 0, **is = ois, **is_local = iis;
1664: PetscObjectGetComm((PetscObject)pc, &comm);
1665: MPI_Comm_size(comm, &size);
1666: MPI_Comm_rank(comm, &rank);
1667: MatGetOwnershipRange(pc->pmat, &first, &last);
1668: if (first%dof || last%dof) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Matrix row partitioning unsuitable for domain decomposition: local row range (%D,%D) "
1669: "does not respect the number of degrees of freedom per grid point %D", first, last, dof);
1671: /* Determine the number of domains with nonzero intersections with the local ownership range. */
1672: s = 0;
1673: ystart = 0;
1674: for (j=0; j<Ndomains; ++j) {
1675: maxheight = N/Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */
1676: if (maxheight < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the vertical directon for mesh height %D", Ndomains, N);
1677: /* Vertical domain limits with an overlap. */
1678: ylow = PetscMax(ystart - overlap,0);
1679: yhigh = PetscMin(ystart + maxheight + overlap,N);
1680: xstart = 0;
1681: for (i=0; i<Mdomains; ++i) {
1682: maxwidth = M/Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */
1683: if (maxwidth < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the horizontal direction for mesh width %D", Mdomains, M);
1684: /* Horizontal domain limits with an overlap. */
1685: xleft = PetscMax(xstart - overlap,0);
1686: xright = PetscMin(xstart + maxwidth + overlap,M);
1687: /*
1688: Determine whether this subdomain intersects this processor's ownership range of pc->pmat.
1689: */
1690: PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,(&xleft_loc),(&ylow_loc),(&xright_loc),(&yhigh_loc),(&nidx));
1691: if (nidx) ++s;
1692: xstart += maxwidth;
1693: } /* for (i = 0; i < Mdomains; ++i) */
1694: ystart += maxheight;
1695: } /* for (j = 0; j < Ndomains; ++j) */
1697: /* Now we can allocate the necessary number of ISs. */
1698: *nsub = s;
1699: PetscMalloc1(*nsub,is);
1700: PetscMalloc1(*nsub,is_local);
1701: s = 0;
1702: ystart = 0;
1703: for (j=0; j<Ndomains; ++j) {
1704: maxheight = N/Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */
1705: if (maxheight < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the vertical directon for mesh height %D", Ndomains, N);
1706: /* Vertical domain limits with an overlap. */
1707: y[0][0] = PetscMax(ystart - overlap,0);
1708: y[0][1] = PetscMin(ystart + maxheight + overlap,N);
1709: /* Vertical domain limits without an overlap. */
1710: y[1][0] = ystart;
1711: y[1][1] = PetscMin(ystart + maxheight,N);
1712: xstart = 0;
1713: for (i=0; i<Mdomains; ++i) {
1714: maxwidth = M/Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */
1715: if (maxwidth < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the horizontal direction for mesh width %D", Mdomains, M);
1716: /* Horizontal domain limits with an overlap. */
1717: x[0][0] = PetscMax(xstart - overlap,0);
1718: x[0][1] = PetscMin(xstart + maxwidth + overlap,M);
1719: /* Horizontal domain limits without an overlap. */
1720: x[1][0] = xstart;
1721: x[1][1] = PetscMin(xstart+maxwidth,M);
1722: /*
1723: Determine whether this domain intersects this processor's ownership range of pc->pmat.
1724: Do this twice: first for the domains with overlaps, and once without.
1725: During the first pass create the subcommunicators, and use them on the second pass as well.
1726: */
1727: for (q = 0; q < 2; ++q) {
1728: PetscBool split = PETSC_FALSE;
1729: /*
1730: domain limits, (xleft, xright) and (ylow, yheigh) are adjusted
1731: according to whether the domain with an overlap or without is considered.
1732: */
1733: xleft = x[q][0]; xright = x[q][1];
1734: ylow = y[q][0]; yhigh = y[q][1];
1735: PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,(&xleft_loc),(&ylow_loc),(&xright_loc),(&yhigh_loc),(&nidx));
1736: nidx *= dof;
1737: n[q] = nidx;
1738: /*
1739: Based on the counted number of indices in the local domain *with an overlap*,
1740: construct a subcommunicator of all the processors supporting this domain.
1741: Observe that a domain with an overlap might have nontrivial local support,
1742: while the domain without an overlap might not. Hence, the decision to participate
1743: in the subcommunicator must be based on the domain with an overlap.
1744: */
1745: if (q == 0) {
1746: if (nidx) color = 1;
1747: else color = MPI_UNDEFINED;
1748: MPI_Comm_split(comm, color, rank, &subcomm);
1749: split = PETSC_TRUE;
1750: }
1751: /*
1752: Proceed only if the number of local indices *with an overlap* is nonzero.
1753: */
1754: if (n[0]) {
1755: if (q == 0) xis = is;
1756: if (q == 1) {
1757: /*
1758: The IS for the no-overlap subdomain shares a communicator with the overlapping domain.
1759: Moreover, if the overlap is zero, the two ISs are identical.
1760: */
1761: if (overlap == 0) {
1762: (*is_local)[s] = (*is)[s];
1763: PetscObjectReference((PetscObject)(*is)[s]);
1764: continue;
1765: } else {
1766: xis = is_local;
1767: subcomm = ((PetscObject)(*is)[s])->comm;
1768: }
1769: } /* if (q == 1) */
1770: idx = NULL;
1771: PetscMalloc1(nidx,&idx);
1772: if (nidx) {
1773: k = 0;
1774: for (jj=ylow_loc; jj<yhigh_loc; ++jj) {
1775: PetscInt x0 = (jj==ylow_loc) ? xleft_loc : xleft;
1776: PetscInt x1 = (jj==yhigh_loc-1) ? xright_loc : xright;
1777: kk = dof*(M*jj + x0);
1778: for (ii=x0; ii<x1; ++ii) {
1779: for (d = 0; d < dof; ++d) {
1780: idx[k++] = kk++;
1781: }
1782: }
1783: }
1784: }
1785: ISCreateGeneral(subcomm,nidx,idx,PETSC_OWN_POINTER,(*xis)+s);
1786: if (split) {
1787: MPI_Comm_free(&subcomm);
1788: }
1789: }/* if (n[0]) */
1790: }/* for (q = 0; q < 2; ++q) */
1791: if (n[0]) ++s;
1792: xstart += maxwidth;
1793: } /* for (i = 0; i < Mdomains; ++i) */
1794: ystart += maxheight;
1795: } /* for (j = 0; j < Ndomains; ++j) */
1796: return(0);
1797: }
1799: /*@C
1800: PCGASMGetSubdomains - Gets the subdomains supported on this processor
1801: for the additive Schwarz preconditioner.
1803: Not Collective
1805: Input Parameter:
1806: . pc - the preconditioner context
1808: Output Parameters:
1809: + n - the number of subdomains for this processor (default value = 1)
1810: . iis - the index sets that define the inner subdomains (without overlap) supported on this processor (can be NULL)
1811: - ois - the index sets that define the outer subdomains (with overlap) supported on this processor (can be NULL)
1814: Notes:
1815: The user is responsible for destroying the ISs and freeing the returned arrays.
1816: The IS numbering is in the parallel, global numbering of the vector.
1818: Level: advanced
1820: .keywords: PC, GASM, get, subdomains, additive Schwarz
1822: .seealso: PCGASMSetOverlap(), PCGASMGetSubKSP(), PCGASMCreateSubdomains2D(),
1823: PCGASMSetSubdomains(), PCGASMGetSubmatrices()
1824: @*/
1825: PetscErrorCode PCGASMGetSubdomains(PC pc,PetscInt *n,IS *iis[],IS *ois[])
1826: {
1827: PC_GASM *osm;
1829: PetscBool match;
1830: PetscInt i;
1834: PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);
1835: if (!match) SETERRQ2(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Incorrect object type: expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name);
1836: osm = (PC_GASM*)pc->data;
1837: if (n) *n = osm->n;
1838: if (iis) {
1839: PetscMalloc1(osm->n, iis);
1840: }
1841: if (ois) {
1842: PetscMalloc1(osm->n, ois);
1843: }
1844: if (iis || ois) {
1845: for (i = 0; i < osm->n; ++i) {
1846: if (iis) (*iis)[i] = osm->iis[i];
1847: if (ois) (*ois)[i] = osm->ois[i];
1848: }
1849: }
1850: return(0);
1851: }
1853: /*@C
1854: PCGASMGetSubmatrices - Gets the local submatrices (for this processor
1855: only) for the additive Schwarz preconditioner.
1857: Not Collective
1859: Input Parameter:
1860: . pc - the preconditioner context
1862: Output Parameters:
1863: + n - the number of matrices for this processor (default value = 1)
1864: - mat - the matrices
1866: Notes:
1867: matrices returned by this routine have the same communicators as the index sets (IS)
1868: used to define subdomains in PCGASMSetSubdomains()
1869: Level: advanced
1871: .keywords: PC, GASM, set, local, subdomains, additive Schwarz, block Jacobi
1873: .seealso: PCGASMSetOverlap(), PCGASMGetSubKSP(),
1874: PCGASMCreateSubdomains2D(), PCGASMSetSubdomains(), PCGASMGetSubdomains()
1875: @*/
1876: PetscErrorCode PCGASMGetSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1877: {
1878: PC_GASM *osm;
1880: PetscBool match;
1886: if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1887: PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);
1888: if (!match) SETERRQ2(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name);
1889: osm = (PC_GASM*)pc->data;
1890: if (n) *n = osm->n;
1891: if (mat) *mat = osm->pmat;
1892: return(0);
1893: }
1895: /*@
1896: PCGASMSetUseDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1897: Logically Collective
1899: Input Parameter:
1900: + pc - the preconditioner
1901: - flg - boolean indicating whether to use subdomains defined by the DM
1903: Options Database Key:
1904: . -pc_gasm_dm_subdomains -pc_gasm_overlap -pc_gasm_total_subdomains
1906: Level: intermediate
1908: Notes:
1909: PCGASMSetSubdomains(), PCGASMSetTotalSubdomains() or PCGASMSetOverlap() take precedence over PCGASMSetUseDMSubdomains(),
1910: so setting PCGASMSetSubdomains() with nontrivial subdomain ISs or any of PCGASMSetTotalSubdomains() and PCGASMSetOverlap()
1911: automatically turns the latter off.
1913: .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1915: .seealso: PCGASMGetUseDMSubdomains(), PCGASMSetSubdomains(), PCGASMSetOverlap()
1916: PCGASMCreateSubdomains2D()
1917: @*/
1918: PetscErrorCode PCGASMSetUseDMSubdomains(PC pc,PetscBool flg)
1919: {
1920: PC_GASM *osm = (PC_GASM*)pc->data;
1922: PetscBool match;
1927: if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1928: PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);
1929: if (match) {
1930: if (!osm->user_subdomains && osm->N == PETSC_DETERMINE && osm->overlap < 0) {
1931: osm->dm_subdomains = flg;
1932: }
1933: }
1934: return(0);
1935: }
1937: /*@
1938: PCGASMGetUseDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1939: Not Collective
1941: Input Parameter:
1942: . pc - the preconditioner
1944: Output Parameter:
1945: . flg - boolean indicating whether to use subdomains defined by the DM
1947: Level: intermediate
1949: .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1951: .seealso: PCGASMSetUseDMSubdomains(), PCGASMSetOverlap()
1952: PCGASMCreateSubdomains2D()
1953: @*/
1954: PetscErrorCode PCGASMGetUseDMSubdomains(PC pc,PetscBool* flg)
1955: {
1956: PC_GASM *osm = (PC_GASM*)pc->data;
1958: PetscBool match;
1963: PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);
1964: if (match) {
1965: if (flg) *flg = osm->dm_subdomains;
1966: }
1967: return(0);
1968: }