Actual source code: gasm.c

petsc-3.4.5 2014-06-29
  1: /*
  2:   This file defines an "generalized" additive Schwarz preconditioner for any Mat implementation.
  3:   In this version each processor may intersect multiple subdomains and any subdomain may
  4:   intersect multiple processors.  Intersections of subdomains with processors are called *local
  5:   subdomains*.

  7:        N    - total number of local subdomains on all processors  (set in PCGASMSetTotalSubdomains() or calculated in PCSetUp_GASM())
  8:        n    - actual number of local subdomains on this processor (set in PCGASMSetSubdomains() or calculated in PCGASMSetTotalSubdomains())
  9:        nmax - maximum number of local subdomains per processor    (calculated in PCGASMSetTotalSubdomains() or in PCSetUp_GASM())
 10: */
 11: #include <petsc-private/pcimpl.h>     /*I "petscpc.h" I*/
 12: #include <petscdm.h>

 14: typedef struct {
 15:   PetscInt   N,n,nmax;
 16:   PetscInt   overlap;             /* overlap requested by user */
 17:   KSP        *ksp;                /* linear solvers for each block */
 18:   Vec        gx,gy;               /* Merged work vectors */
 19:   Vec        *x,*y;               /* Split work vectors; storage aliases pieces of storage of the above merged vectors. */
 20:   VecScatter gorestriction;       /* merged restriction to disjoint union of outer subdomains */
 21:   VecScatter girestriction;       /* merged restriction to disjoint union of inner subdomains */
 22:   IS         *ois;                /* index sets that define the outer (conceptually, overlapping) subdomains */
 23:   IS         *iis;                /* index sets that define the inner (conceptually, nonoverlapping) subdomains */
 24:   Mat        *pmat;               /* subdomain block matrices */
 25:   PCGASMType type;                /* use reduced interpolation, restriction or both */
 26:   PetscBool  create_local;           /* whether the autocreated subdomains are local or not. */
 27:   PetscBool  type_set;               /* if user set this value (so won't change it for symmetric problems) */
 28:   PetscBool  same_subdomain_solvers; /* flag indicating whether all local solvers are same */
 29:   PetscBool  sort_indices;           /* flag to sort subdomain indices */
 30:   PetscBool  dm_subdomains;          /* whether DM is allowed to define subdomains */
 31: } PC_GASM;

 35: static PetscErrorCode  PCGASMSubdomainView_Private(PC pc, PetscInt i, PetscViewer viewer)
 36: {
 37:   PC_GASM        *osm = (PC_GASM*)pc->data;
 38:   PetscInt       j,nidx;
 39:   const PetscInt *idx;
 40:   PetscViewer    sviewer;
 41:   char           *cidx;

 45:   if (i < 0 || i > osm->n) SETERRQ2(PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_WRONG, "Invalid subdomain %D: must nonnegative and less than %D", i, osm->n);
 46:   /* Inner subdomains. */
 47:   ISGetLocalSize(osm->iis[i], &nidx);
 48:   /*
 49:    No more than 15 characters per index plus a space.
 50:    PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx,
 51:    in case nidx == 0. That will take care of the space for the trailing '\0' as well.
 52:    For nidx == 0, the whole string 16 '\0'.
 53:    */
 54:   PetscMalloc(sizeof(char)*(16*(nidx+1)+1), &cidx);
 55:   PetscViewerStringOpen(PETSC_COMM_SELF, cidx, 16*(nidx+1)+1, &sviewer);
 56:   ISGetIndices(osm->iis[i], &idx);
 57:   for (j = 0; j < nidx; ++j) {
 58:     PetscViewerStringSPrintf(sviewer, "%D ", idx[j]);
 59:   }
 60:   ISRestoreIndices(osm->iis[i],&idx);
 61:   PetscViewerDestroy(&sviewer);
 62:   PetscViewerASCIIPrintf(viewer, "Inner subdomain:\n");
 63:   PetscViewerFlush(viewer);
 64:   PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);
 65:   PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx);
 66:   PetscViewerFlush(viewer);
 67:   PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);
 68:   PetscViewerASCIIPrintf(viewer, "\n");
 69:   PetscViewerFlush(viewer);
 70:   PetscFree(cidx);
 71:   /* Outer subdomains. */
 72:   ISGetLocalSize(osm->ois[i], &nidx);
 73:   /*
 74:    No more than 15 characters per index plus a space.
 75:    PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx,
 76:    in case nidx == 0. That will take care of the space for the trailing '\0' as well.
 77:    For nidx == 0, the whole string 16 '\0'.
 78:    */
 79:   PetscMalloc(sizeof(char)*(16*(nidx+1)+1), &cidx);
 80:   PetscViewerStringOpen(PETSC_COMM_SELF, cidx, 16*(nidx+1)+1, &sviewer);
 81:   ISGetIndices(osm->ois[i], &idx);
 82:   for (j = 0; j < nidx; ++j) {
 83:     PetscViewerStringSPrintf(sviewer,"%D ", idx[j]);
 84:   }
 85:   PetscViewerDestroy(&sviewer);
 86:   ISRestoreIndices(osm->ois[i],&idx);
 87:   PetscViewerASCIIPrintf(viewer, "Outer subdomain:\n");
 88:   PetscViewerFlush(viewer);
 89:   PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);
 90:   PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx);
 91:   PetscViewerFlush(viewer);
 92:   PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);
 93:   PetscViewerASCIIPrintf(viewer, "\n");
 94:   PetscViewerFlush(viewer);
 95:   PetscFree(cidx);
 96:   return(0);
 97: }

101: static PetscErrorCode  PCGASMPrintSubdomains(PC pc)
102: {
103:   PC_GASM        *osm = (PC_GASM*)pc->data;
104:   const char     *prefix;
105:   char           fname[PETSC_MAX_PATH_LEN+1];
106:   PetscInt       i, l, d, count, gcount, *permutation, *numbering;
107:   PetscBool      found;
108:   PetscViewer    viewer, sviewer = NULL;

112:   PetscMalloc2(osm->n, PetscInt, &permutation, osm->n, PetscInt, &numbering);
113:   for (i = 0; i < osm->n; ++i) permutation[i] = i;
114:   PetscObjectsGetGlobalNumbering(PetscObjectComm((PetscObject)pc), osm->n, (PetscObject*)osm->ois, &gcount, numbering);
115:   PetscSortIntWithPermutation(osm->n, numbering, permutation);
116:   PCGetOptionsPrefix(pc,&prefix);
117:   PetscOptionsGetString(prefix,"-pc_gasm_print_subdomains",fname,PETSC_MAX_PATH_LEN,&found);
118:   if (!found) { PetscStrcpy(fname,"stdout"); };
119:   PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);
120:   /*
121:    Make sure the viewer has a name. Otherwise this may cause a deadlock or other weird errors when creating a subcomm viewer:
122:    the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed collectively on the comm.
123:   */
124:   PetscObjectName((PetscObject)viewer);
125:   l    = 0;
126:   for (count = 0; count < gcount; ++count) {
127:     /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */
128:     if (l<osm->n) {
129:       d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */
130:       if (numbering[d] == count) {
131:         PetscViewerGetSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);
132:         PCGASMSubdomainView_Private(pc,d,sviewer);
133:         PetscViewerRestoreSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);
134:         ++l;
135:       }
136:     }
137:     MPI_Barrier(PetscObjectComm((PetscObject)pc));
138:   }
139:   PetscViewerDestroy(&viewer);
140:   PetscFree2(permutation,numbering);
141:   return(0);
142: }


147: static PetscErrorCode PCView_GASM(PC pc,PetscViewer viewer)
148: {
149:   PC_GASM        *osm = (PC_GASM*)pc->data;
150:   const char     *prefix;
152:   PetscMPIInt    rank, size;
153:   PetscInt       i,bsz;
154:   PetscBool      iascii,view_subdomains=PETSC_FALSE;
155:   PetscViewer    sviewer;
156:   PetscInt       count, l, gcount, *numbering, *permutation;
157:   char           overlap[256]     = "user-defined overlap";
158:   char           gsubdomains[256] = "unknown total number of subdomains";
159:   char           lsubdomains[256] = "unknown number of local  subdomains";
160:   char           msubdomains[256] = "unknown max number of local subdomains";

163:   MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);
164:   MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);
165:   PetscMalloc2(osm->n, PetscInt, &permutation, osm->n, PetscInt, &numbering);
166:   for (i = 0; i < osm->n; ++i) permutation[i] = i;
167:   PetscObjectsGetGlobalNumbering(PetscObjectComm((PetscObject)pc), osm->n, (PetscObject*)osm->ois, &gcount, numbering);
168:   PetscSortIntWithPermutation(osm->n, numbering, permutation);

170:   if (osm->overlap >= 0) {
171:     PetscSNPrintf(overlap,sizeof(overlap),"requested amount of overlap = %D",osm->overlap);
172:   }
173:   PetscSNPrintf(gsubdomains, sizeof(gsubdomains), "total number of subdomains = %D",gcount);
174:   if (osm->N > 0) {
175:     PetscSNPrintf(lsubdomains, sizeof(gsubdomains), "number of local subdomains = %D",osm->N);
176:   }
177:   if (osm->nmax > 0) {
178:     PetscSNPrintf(msubdomains,sizeof(msubdomains),"max number of local subdomains = %D",osm->nmax);
179:   }

181:   PCGetOptionsPrefix(pc,&prefix);
182:   PetscOptionsGetBool(prefix,"-pc_gasm_view_subdomains",&view_subdomains,NULL);

184:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
185:   if (iascii) {
186:     /*
187:      Make sure the viewer has a name. Otherwise this may cause a deadlock when creating a subcomm viewer:
188:      the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed
189:      collectively on the comm.
190:      */
191:     PetscObjectName((PetscObject)viewer);
192:     PetscViewerASCIIPrintf(viewer,"Generalized additive Schwarz:\n");
193:     PetscViewerASCIIPrintf(viewer,"Restriction/interpolation type: %s\n",PCGASMTypes[osm->type]);
194:     PetscViewerASCIIPrintf(viewer,"%s\n",overlap);
195:     PetscViewerASCIIPrintf(viewer,"%s\n",gsubdomains);
196:     PetscViewerASCIIPrintf(viewer,"%s\n",lsubdomains);
197:     PetscViewerASCIIPrintf(viewer,"%s\n",msubdomains);
198:     PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);
199:     PetscViewerASCIISynchronizedPrintf(viewer,"[%d:%d] number of locally-supported subdomains = %D\n",(int)rank,(int)size,osm->n);
200:     PetscViewerFlush(viewer);
201:     PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);
202:     /* Cannot take advantage of osm->same_subdomain_solvers without a global numbering of subdomains. */
203:     PetscViewerASCIIPrintf(viewer,"Subdomain solver info is as follows:\n");
204:     PetscViewerASCIIPushTab(viewer);
205:     PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");
206:     /* Make sure that everybody waits for the banner to be printed. */
207:     MPI_Barrier(PetscObjectComm((PetscObject)viewer));
208:     /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */
209:     l = 0;
210:     for (count = 0; count < gcount; ++count) {
211:       PetscMPIInt srank, ssize;
212:       if (l<osm->n) {
213:         PetscInt d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */
214:         if (numbering[d] == count) {
215:           MPI_Comm_size(((PetscObject)osm->ois[d])->comm, &ssize);
216:           MPI_Comm_rank(((PetscObject)osm->ois[d])->comm, &srank);
217:           PetscViewerGetSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);
218:           ISGetLocalSize(osm->ois[d],&bsz);
219:           PetscViewerASCIISynchronizedAllow(sviewer,PETSC_TRUE);
220:           PetscViewerASCIISynchronizedPrintf(sviewer,"[%D:%D] (subcomm [%D:%D]) local subdomain number %D, local size = %D\n",(int)rank,(int)size,(int)srank,(int)ssize,d,bsz);
221:           PetscViewerFlush(sviewer);
222:           PetscViewerASCIISynchronizedAllow(sviewer,PETSC_FALSE);
223:           if (view_subdomains) {
224:             PCGASMSubdomainView_Private(pc,d,sviewer);
225:           }
226:           if (!pc->setupcalled) {
227:             PetscViewerASCIIPrintf(sviewer, "Solver not set up yet: PCSetUp() not yet called\n");
228:           } else {
229:             KSPView(osm->ksp[d],sviewer);
230:           }
231:           PetscViewerASCIIPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");
232:           PetscViewerFlush(sviewer);
233:           PetscViewerRestoreSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);
234:           ++l;
235:         }
236:       }
237:       MPI_Barrier(PetscObjectComm((PetscObject)pc));
238:     }
239:     PetscViewerASCIIPopTab(viewer);
240:   }
241:   PetscFree2(permutation,numbering);
242:   return(0);
243: }





251: static PetscErrorCode PCSetUp_GASM(PC pc)
252: {
253:   PC_GASM        *osm = (PC_GASM*)pc->data;
255:   PetscBool      symset,flg;
256:   PetscInt       i;
257:   PetscMPIInt    rank, size;
258:   MatReuse       scall = MAT_REUSE_MATRIX;
259:   KSP            ksp;
260:   PC             subpc;
261:   const char     *prefix,*pprefix;
262:   Vec            x,y;
263:   PetscInt       oni;       /* Number of indices in the i-th local outer subdomain.               */
264:   const PetscInt *oidxi;    /* Indices from the i-th subdomain local outer subdomain.             */
265:   PetscInt       on;        /* Number of indices in the disjoint union of local outer subdomains. */
266:   PetscInt       *oidx;     /* Indices in the disjoint union of local outer subdomains. */
267:   IS             gois;      /* Disjoint union the global indices of outer subdomains.             */
268:   IS             goid;      /* Identity IS of the size of the disjoint union of outer subdomains. */
269:   PetscScalar    *gxarray, *gyarray;
270:   PetscInt       gofirst;   /* Start of locally-owned indices in the vectors -- osm->gx,osm->gy -- over the disjoint union of outer subdomains. */
271:   DM             *subdomain_dm = NULL;

274:   MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
275:   MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);
276:   if (!pc->setupcalled) {

278:     if (!osm->type_set) {
279:       MatIsSymmetricKnown(pc->pmat,&symset,&flg);
280:       if (symset && flg) osm->type = PC_GASM_BASIC;
281:     }

283:     /*
284:      If subdomains have been set, then the local number of subdomains, osm->n, is NOT PETSC_DECIDE and is at least 1.
285:      The total number of subdomains, osm->N is not necessarily set, might be PETSC_DECIDE, and then will have to be calculated from osm->n.
286:      */
287:     if (osm->n == PETSC_DECIDE) {
288:       /* no subdomains given */
289:       /* try pc->dm first, if allowed */
290:       if (osm->dm_subdomains && pc->dm) {
291:         PetscInt  num_subdomains, d;
292:         char      **subdomain_names;
293:         IS        *inner_subdomain_is, *outer_subdomain_is;
294:         DMCreateDomainDecomposition(pc->dm, &num_subdomains, &subdomain_names, &inner_subdomain_is, &outer_subdomain_is, &subdomain_dm);
295:         if (num_subdomains) {
296:           PCGASMSetSubdomains(pc, num_subdomains, inner_subdomain_is, outer_subdomain_is);
297:         }
298:         for (d = 0; d < num_subdomains; ++d) {
299:           if (subdomain_names)    {PetscFree(subdomain_names[d]);}
300:           if (inner_subdomain_is) {ISDestroy(&inner_subdomain_is[d]);}
301:           if (outer_subdomain_is) {ISDestroy(&outer_subdomain_is[d]);}
302:           if (subdomain_dm)       {DMDestroy(&subdomain_dm[d]);}
303:         }
304:         PetscFree(subdomain_names);
305:         PetscFree(inner_subdomain_is);
306:         PetscFree(outer_subdomain_is);
307:         PetscFree(subdomain_dm);
308:       }
309:       if (osm->n == PETSC_DECIDE) { /* still no subdomains; use one per processor */
310:         osm->nmax = osm->n = 1;
311:         MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
312:         osm->N    = size;
313:       }
314:     }
315:     if (!osm->iis) {
316:       /*
317:        The local number of subdomains was set just above, or in PCGASMSetTotalSubdomains(), or in PCGASMSetSubdomains(),
318:        but the actual subdomains have not been supplied (in PCGASMSetSubdomains()).
319:        We create the requisite number of inner subdomains on PETSC_COMM_SELF (for now).
320:        */
321:       PCGASMCreateLocalSubdomains(pc->pmat,osm->overlap,osm->n,&osm->iis,&osm->ois);
322:     }
323:     if (osm->N == PETSC_DECIDE) {
324:       struct {PetscInt max,sum;} inwork,outwork;
325:       /* determine global number of subdomains and the max number of local subdomains */
326:       inwork.max = osm->n;
327:       inwork.sum = osm->n;
328:       MPI_Allreduce(&inwork,&outwork,1,MPIU_2INT,PetscMaxSum_Op,PetscObjectComm((PetscObject)pc));
329:       osm->nmax  = outwork.max;
330:       osm->N     = outwork.sum;
331:     }

333:     PCGetOptionsPrefix(pc,&prefix);
334:     flg  = PETSC_FALSE;
335:     PetscOptionsGetBool(prefix,"-pc_gasm_print_subdomains",&flg,NULL);
336:     if (flg) { PCGASMPrintSubdomains(pc); }

338:     if (osm->sort_indices) {
339:       for (i=0; i<osm->n; i++) {
340:         ISSort(osm->ois[i]);
341:         ISSort(osm->iis[i]);
342:       }
343:     }
344:     /*
345:      Merge the ISs, create merged vectors and restrictions.
346:      */
347:     /* Merge outer subdomain ISs and construct a restriction onto the disjoint union of local outer subdomains. */
348:     on = 0;
349:     for (i=0; i<osm->n; i++) {
350:       ISGetLocalSize(osm->ois[i],&oni);
351:       on  += oni;
352:     }
353:     PetscMalloc(on*sizeof(PetscInt), &oidx);
354:     on   = 0;
355:     for (i=0; i<osm->n; i++) {
356:       ISGetLocalSize(osm->ois[i],&oni);
357:       ISGetIndices(osm->ois[i],&oidxi);
358:       PetscMemcpy(oidx+on, oidxi, sizeof(PetscInt)*oni);
359:       ISRestoreIndices(osm->ois[i], &oidxi);
360:       on  += oni;
361:     }
362:     ISCreateGeneral(((PetscObject)(pc))->comm, on, oidx, PETSC_OWN_POINTER, &gois);
363:     MatGetVecs(pc->pmat,&x,&y);
364:     VecCreateMPI(PetscObjectComm((PetscObject)pc), on, PETSC_DECIDE, &osm->gx);
365:     VecDuplicate(osm->gx,&osm->gy);
366:     VecGetOwnershipRange(osm->gx, &gofirst, NULL);
367:     ISCreateStride(PetscObjectComm((PetscObject)pc),on,gofirst,1, &goid);
368:     VecScatterCreate(x,gois,osm->gx,goid, &(osm->gorestriction));
369:     VecDestroy(&x);
370:     ISDestroy(&gois);

372:     /* Merge inner subdomain ISs and construct a restriction onto the disjoint union of local inner subdomains. */
373:     {
374:       PetscInt ini;           /* Number of indices the i-th a local inner subdomain. */
375:       PetscInt in;            /* Number of indices in the disjoint uniont of local inner subdomains. */
376:       PetscInt *iidx;         /* Global indices in the merged local inner subdomain. */
377:       PetscInt *ioidx;        /* Global indices of the disjoint union of inner subdomains within the disjoint union of outer subdomains. */
378:       IS       giis;          /* IS for the disjoint union of inner subdomains. */
379:       IS       giois;         /* IS for the disjoint union of inner subdomains within the disjoint union of outer subdomains. */
380:       /**/
381:       in = 0;
382:       for (i=0; i<osm->n; i++) {
383:         ISGetLocalSize(osm->iis[i],&ini);
384:         in  += ini;
385:       }
386:       PetscMalloc(in*sizeof(PetscInt), &iidx);
387:       PetscMalloc(in*sizeof(PetscInt), &ioidx);
388:       VecGetOwnershipRange(osm->gx,&gofirst, NULL);
389:       in   = 0;
390:       on   = 0;
391:       for (i=0; i<osm->n; i++) {
392:         const PetscInt         *iidxi; /* Global indices of the i-th local inner subdomain. */
393:         ISLocalToGlobalMapping ltogi; /* Map from global to local indices of the i-th outer local subdomain. */
394:         PetscInt               *ioidxi; /* Local indices of the i-th local inner subdomain within the local outer subdomain. */
395:         PetscInt               ioni;  /* Number of indices in ioidxi; if ioni != ini the inner subdomain is not a subdomain of the outer subdomain (error). */
396:         PetscInt               k;
397:         ISGetLocalSize(osm->iis[i],&ini);
398:         ISGetLocalSize(osm->ois[i],&oni);
399:         ISGetIndices(osm->iis[i],&iidxi);
400:         PetscMemcpy(iidx+in, iidxi, sizeof(PetscInt)*ini);
401:         ISLocalToGlobalMappingCreateIS(osm->ois[i],&ltogi);
402:         ioidxi = ioidx+in;
403:         ISGlobalToLocalMappingApply(ltogi,IS_GTOLM_DROP,ini,iidxi,&ioni,ioidxi);
404:         ISLocalToGlobalMappingDestroy(&ltogi);
405:         ISRestoreIndices(osm->iis[i], &iidxi);
406:         if (ioni != ini) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inner subdomain %D contains %D indices outside of its outer subdomain", i, ini - ioni);
407:         for (k = 0; k < ini; ++k) ioidxi[k] += gofirst+on;
408:         in += ini;
409:         on += oni;
410:       }
411:       ISCreateGeneral(PetscObjectComm((PetscObject)pc), in, iidx,  PETSC_OWN_POINTER, &giis);
412:       ISCreateGeneral(PetscObjectComm((PetscObject)pc), in, ioidx, PETSC_OWN_POINTER, &giois);
413:       VecScatterCreate(y,giis,osm->gy,giois,&osm->girestriction);
414:       VecDestroy(&y);
415:       ISDestroy(&giis);
416:       ISDestroy(&giois);
417:     }
418:     ISDestroy(&goid);

420:     /* Create the subdomain work vectors. */
421:     PetscMalloc(osm->n*sizeof(Vec),&osm->x);
422:     PetscMalloc(osm->n*sizeof(Vec),&osm->y);
423:     VecGetArray(osm->gx, &gxarray);
424:     VecGetArray(osm->gy, &gyarray);
425:     for (i=0, on=0; i<osm->n; ++i, on += oni) {
426:       PetscInt oNi;
427:       ISGetLocalSize(osm->ois[i],&oni);
428:       ISGetSize(osm->ois[i],&oNi);
429:       VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm,1,oni,oNi,gxarray+on,&osm->x[i]);
430:       VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm,1,oni,oNi,gyarray+on,&osm->y[i]);
431:     }
432:     VecRestoreArray(osm->gx, &gxarray);
433:     VecRestoreArray(osm->gy, &gyarray);
434:     /* Create the local solvers */
435:     PetscMalloc(osm->n*sizeof(KSP*),&osm->ksp);
436:     for (i=0; i<osm->n; i++) { /* KSPs are local */
437:       KSPCreate(((PetscObject)(osm->ois[i]))->comm,&ksp);
438:       PetscLogObjectParent(pc,ksp);
439:       PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);
440:       KSPSetType(ksp,KSPPREONLY);
441:       KSPGetPC(ksp,&subpc);
442:       PCGetOptionsPrefix(pc,&prefix);
443:       KSPSetOptionsPrefix(ksp,prefix);
444:       KSPAppendOptionsPrefix(ksp,"sub_");
445:       osm->ksp[i] = ksp;
446:     }
447:     scall = MAT_INITIAL_MATRIX;

449:   } else { /*if (!pc->setupcalled)*/
450:     /*
451:        Destroy the blocks from the previous iteration
452:     */
453:     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
454:       MatDestroyMatrices(osm->n,&osm->pmat);
455:       scall = MAT_INITIAL_MATRIX;
456:     }
457:   }

459:   /*
460:      Extract out the submatrices.
461:   */
462:   if (size > 1) {
463:     MatGetSubMatricesParallel(pc->pmat,osm->n,osm->ois, osm->ois,scall,&osm->pmat);
464:   } else {
465:     MatGetSubMatrices(pc->pmat,osm->n,osm->ois, osm->ois,scall,&osm->pmat);
466:   }
467:   if (scall == MAT_INITIAL_MATRIX) {
468:     PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);
469:     for (i=0; i<osm->n; i++) {
470:       PetscLogObjectParent(pc,osm->pmat[i]);
471:       PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);
472:     }
473:   }

475:   /* Return control to the user so that the submatrices can be modified (e.g., to apply
476:      different boundary conditions for the submatrices than for the global problem) */
477:   PCModifySubMatrices(pc,osm->n,osm->ois,osm->ois,osm->pmat,pc->modifysubmatricesP);

479:   /*
480:      Loop over submatrices putting them into local ksps
481:   */
482:   for (i=0; i<osm->n; i++) {
483:     KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);
484:     if (!pc->setupcalled) {
485:       KSPSetFromOptions(osm->ksp[i]);
486:     }
487:   }
488:   return(0);
489: }

493: static PetscErrorCode PCSetUpOnBlocks_GASM(PC pc)
494: {
495:   PC_GASM        *osm = (PC_GASM*)pc->data;
497:   PetscInt       i;

500:   for (i=0; i<osm->n; i++) {
501:     KSPSetUp(osm->ksp[i]);
502:   }
503:   return(0);
504: }

508: static PetscErrorCode PCApply_GASM(PC pc,Vec x,Vec y)
509: {
510:   PC_GASM        *osm = (PC_GASM*)pc->data;
512:   PetscInt       i;
513:   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;

516:   /*
517:      Support for limiting the restriction or interpolation only to the inner
518:      subdomain values (leaving the other values 0).
519:   */
520:   if (!(osm->type & PC_GASM_RESTRICT)) {
521:     /* have to zero the work RHS since scatter may leave some slots empty */
522:     VecZeroEntries(osm->gx);
523:     VecScatterBegin(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);
524:   } else {
525:     VecScatterBegin(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);
526:   }
527:   VecZeroEntries(osm->gy);
528:   if (!(osm->type & PC_GASM_RESTRICT)) {
529:     VecScatterEnd(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);
530:   } else {
531:     VecScatterEnd(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);
532:   }
533:   /* do the subdomain solves */
534:   for (i=0; i<osm->n; ++i) {
535:     KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);
536:   }
537:   VecZeroEntries(y);
538:   if (!(osm->type & PC_GASM_INTERPOLATE)) {
539:     VecScatterBegin(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);
540:     VecScatterEnd(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);  return(0);
541:   } else {
542:     VecScatterBegin(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);
543:     VecScatterEnd(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);  return(0);
544:   }
545: }

549: static PetscErrorCode PCApplyTranspose_GASM(PC pc,Vec x,Vec y)
550: {
551:   PC_GASM        *osm = (PC_GASM*)pc->data;
553:   PetscInt       i;
554:   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;

557:   /*
558:      Support for limiting the restriction or interpolation to only local
559:      subdomain values (leaving the other values 0).

561:      Note: these are reversed from the PCApply_GASM() because we are applying the
562:      transpose of the three terms
563:   */
564:   if (!(osm->type & PC_GASM_INTERPOLATE)) {
565:     /* have to zero the work RHS since scatter may leave some slots empty */
566:     VecZeroEntries(osm->gx);
567:     VecScatterBegin(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);
568:   } else {
569:     VecScatterBegin(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);
570:   }
571:   VecZeroEntries(osm->gy);
572:   if (!(osm->type & PC_GASM_INTERPOLATE)) {
573:     VecScatterEnd(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);
574:   } else {
575:     VecScatterEnd(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);
576:   }
577:   /* do the local solves */
578:   for (i=0; i<osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */
579:     KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);
580:   }
581:   VecZeroEntries(y);
582:   if (!(osm->type & PC_GASM_RESTRICT)) {
583:     VecScatterBegin(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);
584:     VecScatterEnd(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);
585:   } else {
586:     VecScatterBegin(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);
587:     VecScatterEnd(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);
588:   }
589:   return(0);
590: }

594: static PetscErrorCode PCReset_GASM(PC pc)
595: {
596:   PC_GASM        *osm = (PC_GASM*)pc->data;
598:   PetscInt       i;

601:   if (osm->ksp) {
602:     for (i=0; i<osm->n; i++) {
603:       KSPReset(osm->ksp[i]);
604:     }
605:   }
606:   if (osm->pmat) {
607:     if (osm->n > 0) {
608:       MatDestroyMatrices(osm->n,&osm->pmat);
609:     }
610:   }
611:   if (osm->x) {
612:     for (i=0; i<osm->n; i++) {
613:       VecDestroy(&osm->x[i]);
614:       VecDestroy(&osm->y[i]);
615:     }
616:   }
617:   VecDestroy(&osm->gx);
618:   VecDestroy(&osm->gy);

620:   VecScatterDestroy(&osm->gorestriction);
621:   VecScatterDestroy(&osm->girestriction);
622:   PCGASMDestroySubdomains(osm->n,osm->ois,osm->iis);
623:   osm->ois = 0;
624:   osm->iis = 0;
625:   return(0);
626: }

630: static PetscErrorCode PCDestroy_GASM(PC pc)
631: {
632:   PC_GASM        *osm = (PC_GASM*)pc->data;
634:   PetscInt       i;

637:   PCReset_GASM(pc);
638:   if (osm->ksp) {
639:     for (i=0; i<osm->n; i++) {
640:       KSPDestroy(&osm->ksp[i]);
641:     }
642:     PetscFree(osm->ksp);
643:   }
644:   PetscFree(osm->x);
645:   PetscFree(osm->y);
646:   PetscFree(pc->data);
647:   return(0);
648: }

652: static PetscErrorCode PCSetFromOptions_GASM(PC pc)
653: {
654:   PC_GASM        *osm = (PC_GASM*)pc->data;
656:   PetscInt       blocks,ovl;
657:   PetscBool      symset,flg;
658:   PCGASMType     gasmtype;

661:   /* set the type to symmetric if matrix is symmetric */
662:   if (!osm->type_set && pc->pmat) {
663:     MatIsSymmetricKnown(pc->pmat,&symset,&flg);
664:     if (symset && flg) osm->type = PC_GASM_BASIC;
665:   }
666:   PetscOptionsHead("Generalized additive Schwarz options");
667:   PetscOptionsBool("-pc_gasm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCGASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);
668:   PetscOptionsInt("-pc_gasm_total_subdomains","Total number of subdomains across communicator","PCGASMSetTotalSubdomains",osm->n,&blocks,&flg);
669:   if (flg) {
670:     osm->create_local = PETSC_TRUE;
671:     PetscOptionsBool("-pc_gasm_subdomains_create_local","Whether to make autocreated subdomains local (true by default)","PCGASMSetTotalSubdomains",osm->create_local,&osm->create_local,NULL);
672:     PCGASMSetTotalSubdomains(pc,blocks,osm->create_local);
673:     osm->dm_subdomains = PETSC_FALSE;
674:   }
675:   PetscOptionsInt("-pc_gasm_overlap","Number of overlapping degrees of freedom","PCGASMSetOverlap",osm->overlap,&ovl,&flg);
676:   if (flg) {
677:     PCGASMSetOverlap(pc,ovl);
678:     osm->dm_subdomains = PETSC_FALSE;
679:   }
680:   flg  = PETSC_FALSE;
681:   PetscOptionsEnum("-pc_gasm_type","Type of restriction/extension","PCGASMSetType",PCGASMTypes,(PetscEnum)osm->type,(PetscEnum*)&gasmtype,&flg);
682:   if (flg) {PCGASMSetType(pc,gasmtype); }
683:   PetscOptionsTail();
684:   return(0);
685: }

687: /*------------------------------------------------------------------------------------*/

691: static PetscErrorCode  PCGASMSetSubdomains_GASM(PC pc,PetscInt n,IS iis[],IS ois[])
692: {
693:   PC_GASM        *osm = (PC_GASM*)pc->data;
695:   PetscInt       i;

698:   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more subdomains, n = %D",n);
699:   if (pc->setupcalled && (n != osm->n || iis || ois)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetSubdomains() should be called before calling PCSetUp().");

701:   if (!pc->setupcalled) {
702:     osm->n   = n;
703:     osm->ois = 0;
704:     osm->iis = 0;
705:     if (ois) {
706:       for (i=0; i<n; i++) {PetscObjectReference((PetscObject)ois[i]);}
707:     }
708:     if (iis) {
709:       for (i=0; i<n; i++) {PetscObjectReference((PetscObject)iis[i]);}
710:     }
711:     PCGASMDestroySubdomains(osm->n,osm->iis,osm->ois);
712:     if (ois) {
713:       PetscMalloc(n*sizeof(IS),&osm->ois);
714:       for (i=0; i<n; i++) osm->ois[i] = ois[i];
715:       /* Flag indicating that the user has set outer subdomains, so PCGASM should not increase their size. */
716:       osm->overlap = -1;
717:       if (!iis) {
718:         PetscMalloc(n*sizeof(IS),&osm->iis);
719:         for (i=0; i<n; i++) {
720:           for (i=0; i<n; i++) {PetscObjectReference((PetscObject)ois[i]);}
721:           osm->iis[i] = ois[i];
722:         }
723:       }
724:     }
725:     if (iis) {
726:       PetscMalloc(n*sizeof(IS),&osm->iis);
727:       for (i=0; i<n; i++) osm->iis[i] = iis[i];
728:       if (!ois) {
729:         PetscMalloc(n*sizeof(IS),&osm->ois);
730:         for (i=0; i<n; i++) {
731:           for (i=0; i<n; i++) {
732:             PetscObjectReference((PetscObject)iis[i]);
733:             osm->ois[i] = iis[i];
734:           }
735:         }
736:         if (osm->overlap > 0) {
737:           /* Extend the "overlapping" regions by a number of steps */
738:           MatIncreaseOverlap(pc->pmat,osm->n,osm->ois,osm->overlap);
739:         }
740:       }
741:     }
742:   }
743:   return(0);
744: }

748: static PetscErrorCode  PCGASMSetTotalSubdomains_GASM(PC pc,PetscInt N, PetscBool create_local)
749: {
750:   PC_GASM        *osm = (PC_GASM*)pc->data;
752:   PetscMPIInt    rank,size;
753:   PetscInt       n;
754:   PetscInt       Nmin, Nmax;

757:   if (!create_local) SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "No suppor for autocreation of nonlocal subdomains.");
758:   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Total number of subdomains must be > 0, N = %D",N);
759:   MPI_Allreduce(&N,&Nmin,1,MPIU_INT,MPIU_MIN,PetscObjectComm((PetscObject)pc));
760:   MPI_Allreduce(&N,&Nmax,1,MPIU_INT,MPIU_MAX,PetscObjectComm((PetscObject)pc));
761:   if (Nmin != Nmax) SETERRQ2(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "All processors must use the same number of subdomains.  min(N) = %D != %D = max(N)", Nmin, Nmax);

763:   osm->create_local = create_local;
764:   /*
765:      Split the subdomains equally among all processors
766:   */
767:   MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);
768:   MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
769:   n    = N/size + ((N % size) > rank);
770:   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one subdomain: total processors %d total blocks %D",(int)rank,(int)size,N);
771:   if (pc->setupcalled && n != osm->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetTotalSubdomains() should be called before PCSetUp().");
772:   if (!pc->setupcalled) {
773:     PCGASMDestroySubdomains(osm->n,osm->iis,osm->ois);
774:     osm->N    = N;
775:     osm->n    = n;
776:     osm->nmax = N/size + ((N%size) ? 1 : 0);
777:     osm->ois  = 0;
778:     osm->iis  = 0;
779:   }
780:   return(0);
781: }

785: static PetscErrorCode  PCGASMSetOverlap_GASM(PC pc,PetscInt ovl)
786: {
787:   PC_GASM *osm = (PC_GASM*)pc->data;

790:   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
791:   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetOverlap() should be called before PCSetUp().");
792:   if (!pc->setupcalled) osm->overlap = ovl;
793:   return(0);
794: }

798: static PetscErrorCode  PCGASMSetType_GASM(PC pc,PCGASMType type)
799: {
800:   PC_GASM *osm = (PC_GASM*)pc->data;

803:   osm->type     = type;
804:   osm->type_set = PETSC_TRUE;
805:   return(0);
806: }

810: static PetscErrorCode  PCGASMSetSortIndices_GASM(PC pc,PetscBool doSort)
811: {
812:   PC_GASM *osm = (PC_GASM*)pc->data;

815:   osm->sort_indices = doSort;
816:   return(0);
817: }

821: /*
822:    FIX: This routine might need to be modified once multiple ranks per subdomain are allowed.
823:         In particular, it would upset the global subdomain number calculation.
824: */
825: static PetscErrorCode  PCGASMGetSubKSP_GASM(PC pc,PetscInt *n,PetscInt *first,KSP **ksp)
826: {
827:   PC_GASM        *osm = (PC_GASM*)pc->data;

831:   if (osm->n < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");

833:   if (n) *n = osm->n;
834:   if (first) {
835:     MPI_Scan(&osm->n,first,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));
836:     *first -= osm->n;
837:   }
838:   if (ksp) {
839:     /* Assume that local solves are now different; not necessarily
840:        true, though!  This flag is used only for PCView_GASM() */
841:     *ksp                        = osm->ksp;
842:     osm->same_subdomain_solvers = PETSC_FALSE;
843:   }
844:   return(0);
845: } /* PCGASMGetSubKSP_GASM() */

849: /*@C
850:     PCGASMSetSubdomains - Sets the subdomains for this processor
851:     for the additive Schwarz preconditioner.

853:     Collective on PC

855:     Input Parameters:
856: +   pc  - the preconditioner context
857: .   n   - the number of subdomains for this processor
858: .   iis - the index sets that define this processor's local inner subdomains
859:          (or NULL for PETSc to determine subdomains)
860: -   ois- the index sets that define this processor's local outer subdomains
861:          (or NULL to use the same as iis)

863:     Notes:
864:     The IS indices use the parallel, global numbering of the vector entries.
865:     Inner subdomains are those where the correction is applied.
866:     Outer subdomains are those where the residual necessary to obtain the
867:     corrections is obtained (see PCGASMType for the use of inner/outer subdomains).
868:     Both inner and outer subdomains can extend over several processors.
869:     This processor's portion of a subdomain is known as a local subdomain.

871:     By default the GASM preconditioner uses 1 (local) subdomain per processor.
872:     Use PCGASMSetTotalSubdomains() to set the total number of subdomains across
873:     all processors that PCGASM will create automatically, and to specify whether
874:     they should be local or not.


877:     Level: advanced

879: .keywords: PC, GASM, set, subdomains, additive Schwarz

881: .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
882:           PCGASMCreateSubdomains2D(), PCGASMGetSubdomains()
883: @*/
884: PetscErrorCode  PCGASMSetSubdomains(PC pc,PetscInt n,IS iis[],IS ois[])
885: {

890:   PetscTryMethod(pc,"PCGASMSetSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,iis,ois));
891:   return(0);
892: }

896: /*@C
897:     PCGASMSetTotalSubdomains - Sets the total number of subdomains to use in the generalized additive
898:     Schwarz preconditioner.  The number of subdomains is cumulative across all processors in pc's
899:     communicator. Either all or no processors in the PC communicator must call this routine with
900:     the same N.  The subdomains will be created automatically during PCSetUp().

902:     Collective on PC

904:     Input Parameters:
905: +   pc           - the preconditioner context
906: .   N            - the total number of subdomains cumulative across all processors
907: -   create_local - whether the subdomains to be created are to be local

909:     Options Database Key:
910:     To set the total number of subdomains and let PCGASM autocreate them, rather than specify the index sets, use the following options:
911: +    -pc_gasm_total_subdomains <n>                  - sets the total number of subdomains to be autocreated by PCGASM
912: -    -pc_gasm_subdomains_create_local <true|false>  - whether autocreated subdomains should be local or not (default is true)

914:     By default the GASM preconditioner uses 1 subdomain per processor.


917:     Use PCGASMSetSubdomains() to set subdomains explicitly or to set different numbers
918:     of subdomains per processor.

920:     Level: advanced

922: .keywords: PC, GASM, set, total, global, subdomains, additive Schwarz

924: .seealso: PCGASMSetSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
925:           PCGASMCreateSubdomains2D()
926: @*/
927: PetscErrorCode  PCGASMSetTotalSubdomains(PC pc,PetscInt N, PetscBool create_local)
928: {

933:   PetscTryMethod(pc,"PCGASMSetTotalSubdomains_C",(PC,PetscInt,PetscBool),(pc,N,create_local));
934:   return(0);
935: }

939: /*@
940:     PCGASMSetOverlap - Sets the overlap between a pair of subdomains for the
941:     additive Schwarz preconditioner.  Either all or no processors in the
942:     PC communicator must call this routine.

944:     Logically Collective on PC

946:     Input Parameters:
947: +   pc  - the preconditioner context
948: -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)

950:     Options Database Key:
951: .   -pc_gasm_overlap <overlap> - Sets overlap

953:     Notes:
954:     By default the GASM preconditioner uses 1 subdomain per processor.  To use
955:     multiple subdomain per perocessor, see PCGASMSetTotalSubdomains() or
956:     PCGASMSetSubdomains() (and the option -pc_gasm_total_subdomains <n>).

958:     The overlap defaults to 1, so if one desires that no additional
959:     overlap be computed beyond what may have been set with a call to
960:     PCGASMSetTotalSubdomains() or PCGASMSetSubdomains(), then ovl
961:     must be set to be 0.  In particular, if one does not explicitly set
962:     the subdomains in application code, then all overlap would be computed
963:     internally by PETSc, and using an overlap of 0 would result in an GASM
964:     variant that is equivalent to the block Jacobi preconditioner.

966:     Note that one can define initial index sets with any overlap via
967:     PCGASMSetSubdomains(); the routine PCGASMSetOverlap() merely allows
968:     PETSc to extend that overlap further, if desired.

970:     Level: intermediate

972: .keywords: PC, GASM, set, overlap

974: .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
975:           PCGASMCreateSubdomains2D(), PCGASMGetSubdomains()
976: @*/
977: PetscErrorCode  PCGASMSetOverlap(PC pc,PetscInt ovl)
978: {

984:   PetscTryMethod(pc,"PCGASMSetOverlap_C",(PC,PetscInt),(pc,ovl));
985:   return(0);
986: }

990: /*@
991:     PCGASMSetType - Sets the type of restriction and interpolation used
992:     for local problems in the additive Schwarz method.

994:     Logically Collective on PC

996:     Input Parameters:
997: +   pc  - the preconditioner context
998: -   type - variant of GASM, one of
999: .vb
1000:       PC_GASM_BASIC       - full interpolation and restriction
1001:       PC_GASM_RESTRICT    - full restriction, local processor interpolation
1002:       PC_GASM_INTERPOLATE - full interpolation, local processor restriction
1003:       PC_GASM_NONE        - local processor restriction and interpolation
1004: .ve

1006:     Options Database Key:
1007: .   -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type

1009:     Level: intermediate

1011: .keywords: PC, GASM, set, type

1013: .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1014:           PCGASMCreateSubdomains2D()
1015: @*/
1016: PetscErrorCode  PCGASMSetType(PC pc,PCGASMType type)
1017: {

1023:   PetscTryMethod(pc,"PCGASMSetType_C",(PC,PCGASMType),(pc,type));
1024:   return(0);
1025: }

1029: /*@
1030:     PCGASMSetSortIndices - Determines whether subdomain indices are sorted.

1032:     Logically Collective on PC

1034:     Input Parameters:
1035: +   pc  - the preconditioner context
1036: -   doSort - sort the subdomain indices

1038:     Level: intermediate

1040: .keywords: PC, GASM, set, type

1042: .seealso: PCGASMSetSubdomains(), PCGASMSetTotalSubdomains(), PCGASMGetSubKSP(),
1043:           PCGASMCreateSubdomains2D()
1044: @*/
1045: PetscErrorCode  PCGASMSetSortIndices(PC pc,PetscBool doSort)
1046: {

1052:   PetscTryMethod(pc,"PCGASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));
1053:   return(0);
1054: }

1058: /*@C
1059:    PCGASMGetSubKSP - Gets the local KSP contexts for all blocks on
1060:    this processor.

1062:    Collective on PC iff first_local is requested

1064:    Input Parameter:
1065: .  pc - the preconditioner context

1067:    Output Parameters:
1068: +  n_local - the number of blocks on this processor or NULL
1069: .  first_local - the global number of the first block on this processor or NULL,
1070:                  all processors must request or all must pass NULL
1071: -  ksp - the array of KSP contexts

1073:    Note:
1074:    After PCGASMGetSubKSP() the array of KSPes is not to be freed

1076:    Currently for some matrix implementations only 1 block per processor
1077:    is supported.

1079:    You must call KSPSetUp() before calling PCGASMGetSubKSP().

1081:    Level: advanced

1083: .keywords: PC, GASM, additive Schwarz, get, sub, KSP, context

1085: .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMSetOverlap(),
1086:           PCGASMCreateSubdomains2D(),
1087: @*/
1088: PetscErrorCode  PCGASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1089: {

1094:   PetscUseMethod(pc,"PCGASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));
1095:   return(0);
1096: }

1098: /* -------------------------------------------------------------------------------------*/
1099: /*MC
1100:    PCGASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1101:            its own KSP object.

1103:    Options Database Keys:
1104: +  -pc_gasm_total_block_count <n> - Sets total number of local subdomains (known as blocks) to be distributed among processors
1105: .  -pc_gasm_view_subdomains       - activates the printing of subdomain indices in PCView(), -ksp_view or -snes_view
1106: .  -pc_gasm_print_subdomains      - activates the printing of subdomain indices in PCSetUp()
1107: .  -pc_gasm_overlap <ovl>         - Sets overlap by which to (automatically) extend local subdomains
1108: -  -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type

1110:      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1111:       will get a different convergence rate due to the default option of -pc_gasm_type restrict. Use
1112:       -pc_gasm_type basic to use the standard GASM.

1114:    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1115:      than one processor. Defaults to one block per processor.

1117:      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1118:         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly

1120:      To set the options on the solvers separate for each block call PCGASMGetSubKSP()
1121:          and set the options directly on the resulting KSP object (you can access its PC
1122:          with KSPGetPC())


1125:    Level: beginner

1127:    Concepts: additive Schwarz method

1129:     References:
1130:     An additive variant of the Schwarz alternating method for the case of many subregions
1131:     M Dryja, OB Widlund - Courant Institute, New York University Technical report

1133:     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1134:     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.

1136: .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1137:            PCBJACOBI,  PCGASMGetSubKSP(), PCGASMSetSubdomains(),
1138:            PCGASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCGASMSetOverlap(), PCGASMSetType()

1140: M*/

1144: PETSC_EXTERN PetscErrorCode PCCreate_GASM(PC pc)
1145: {
1147:   PC_GASM        *osm;

1150:   PetscNewLog(pc,PC_GASM,&osm);

1152:   osm->N                      = PETSC_DECIDE;
1153:   osm->n                      = PETSC_DECIDE;
1154:   osm->nmax                   = 0;
1155:   osm->overlap                = 1;
1156:   osm->ksp                    = 0;
1157:   osm->gorestriction          = 0;
1158:   osm->girestriction          = 0;
1159:   osm->gx                     = 0;
1160:   osm->gy                     = 0;
1161:   osm->x                      = 0;
1162:   osm->y                      = 0;
1163:   osm->ois                    = 0;
1164:   osm->iis                    = 0;
1165:   osm->pmat                   = 0;
1166:   osm->type                   = PC_GASM_RESTRICT;
1167:   osm->same_subdomain_solvers = PETSC_TRUE;
1168:   osm->sort_indices           = PETSC_TRUE;
1169:   osm->dm_subdomains          = PETSC_FALSE;

1171:   pc->data                 = (void*)osm;
1172:   pc->ops->apply           = PCApply_GASM;
1173:   pc->ops->applytranspose  = PCApplyTranspose_GASM;
1174:   pc->ops->setup           = PCSetUp_GASM;
1175:   pc->ops->reset           = PCReset_GASM;
1176:   pc->ops->destroy         = PCDestroy_GASM;
1177:   pc->ops->setfromoptions  = PCSetFromOptions_GASM;
1178:   pc->ops->setuponblocks   = PCSetUpOnBlocks_GASM;
1179:   pc->ops->view            = PCView_GASM;
1180:   pc->ops->applyrichardson = 0;

1182:   PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetSubdomains_C",PCGASMSetSubdomains_GASM);
1183:   PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetTotalSubdomains_C",PCGASMSetTotalSubdomains_GASM);
1184:   PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetOverlap_C",PCGASMSetOverlap_GASM);
1185:   PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetType_C",PCGASMSetType_GASM);
1186:   PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetSortIndices_C",PCGASMSetSortIndices_GASM);
1187:   PetscObjectComposeFunction((PetscObject)pc,"PCGASMGetSubKSP_C",PCGASMGetSubKSP_GASM);
1188:   return(0);
1189: }


1194: /*@C
1195:    PCGASMCreateLocalSubdomains - Creates n local index sets for the overlapping
1196:    Schwarz preconditioner for a any problem based on its matrix.

1198:    Collective

1200:    Input Parameters:
1201: +  A       - The global matrix operator
1202: .  overlap - amount of overlap in outer subdomains
1203: -  n       - the number of local subdomains

1205:    Output Parameters:
1206: +  iis - the array of index sets defining the local inner subdomains (on which the correction is applied)
1207: -  ois - the array of index sets defining the local outer subdomains (on which the residual is computed)

1209:    Level: advanced

1211:    Note: this generates n nonoverlapping local inner subdomains on PETSC_COMM_SELF;
1212:          PCGASM will generate the overlap from these if you use them in PCGASMSetSubdomains() and set a
1213:          nonzero overlap with PCGASMSetOverlap()

1215:     In the Fortran version you must provide the array outis[] already allocated of length n.

1217: .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid

1219: .seealso: PCGASMSetSubdomains(), PCGASMDestroySubdomains()
1220: @*/
1221: PetscErrorCode  PCGASMCreateLocalSubdomains(Mat A, PetscInt overlap, PetscInt n, IS *iis[], IS *ois[])
1222: {
1223:   MatPartitioning mpart;
1224:   const char      *prefix;
1225:   PetscErrorCode  (*f)(Mat,MatReuse,Mat*);
1226:   PetscMPIInt     size;
1227:   PetscInt        i,j,rstart,rend,bs;
1228:   PetscBool       isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1229:   Mat             Ad     = NULL, adj;
1230:   IS              ispart,isnumb,*is;
1231:   PetscErrorCode  ierr;

1236:   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);

1238:   /* Get prefix, row distribution, and block size */
1239:   MatGetOptionsPrefix(A,&prefix);
1240:   MatGetOwnershipRange(A,&rstart,&rend);
1241:   MatGetBlockSize(A,&bs);
1242:   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);

1244:   /* Get diagonal block from matrix if possible */
1245:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
1246:   PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",&f);
1247:   if (f) {
1248:     MatGetDiagonalBlock(A,&Ad);
1249:   } else if (size == 1) {
1250:     Ad = A;
1251:   }
1252:   if (Ad) {
1253:     PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);
1254:     if (!isbaij) {PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);}
1255:   }
1256:   if (Ad && n > 1) {
1257:     PetscBool  match,done;
1258:     /* Try to setup a good matrix partitioning if available */
1259:     MatPartitioningCreate(PETSC_COMM_SELF,&mpart);
1260:     PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);
1261:     MatPartitioningSetFromOptions(mpart);
1262:     PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);
1263:     if (!match) {
1264:       PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);
1265:     }
1266:     if (!match) { /* assume a "good" partitioner is available */
1267:       PetscInt       na;
1268:       const PetscInt *ia,*ja;
1269:       MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);
1270:       if (done) {
1271:         /* Build adjacency matrix by hand. Unfortunately a call to
1272:            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1273:            remove the block-aij structure and we cannot expect
1274:            MatPartitioning to split vertices as we need */
1275:         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1276:         const PetscInt *row;
1277:         nnz = 0;
1278:         for (i=0; i<na; i++) { /* count number of nonzeros */
1279:           len = ia[i+1] - ia[i];
1280:           row = ja + ia[i];
1281:           for (j=0; j<len; j++) {
1282:             if (row[j] == i) { /* don't count diagonal */
1283:               len--; break;
1284:             }
1285:           }
1286:           nnz += len;
1287:         }
1288:         PetscMalloc((na+1)*sizeof(PetscInt),&iia);
1289:         PetscMalloc((nnz)*sizeof(PetscInt),&jja);
1290:         nnz    = 0;
1291:         iia[0] = 0;
1292:         for (i=0; i<na; i++) { /* fill adjacency */
1293:           cnt = 0;
1294:           len = ia[i+1] - ia[i];
1295:           row = ja + ia[i];
1296:           for (j=0; j<len; j++) {
1297:             if (row[j] != i) jja[nnz+cnt++] = row[j]; /* if not diagonal */
1298:           }
1299:           nnz += cnt;
1300:           iia[i+1] = nnz;
1301:         }
1302:         /* Partitioning of the adjacency matrix */
1303:         MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);
1304:         MatPartitioningSetAdjacency(mpart,adj);
1305:         MatPartitioningSetNParts(mpart,n);
1306:         MatPartitioningApply(mpart,&ispart);
1307:         ISPartitioningToNumbering(ispart,&isnumb);
1308:         MatDestroy(&adj);
1309:         foundpart = PETSC_TRUE;
1310:       }
1311:       MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);
1312:     }
1313:     MatPartitioningDestroy(&mpart);
1314:   }
1315:   PetscMalloc(n*sizeof(IS),&is);
1316:   if (!foundpart) {

1318:     /* Partitioning by contiguous chunks of rows */

1320:     PetscInt mbs   = (rend-rstart)/bs;
1321:     PetscInt start = rstart;
1322:     for (i=0; i<n; i++) {
1323:       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1324:       ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);
1325:       start += count;
1326:     }

1328:   } else {

1330:     /* Partitioning by adjacency of diagonal block  */

1332:     const PetscInt *numbering;
1333:     PetscInt       *count,nidx,*indices,*newidx,start=0;
1334:     /* Get node count in each partition */
1335:     PetscMalloc(n*sizeof(PetscInt),&count);
1336:     ISPartitioningCount(ispart,n,count);
1337:     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1338:       for (i=0; i<n; i++) count[i] *= bs;
1339:     }
1340:     /* Build indices from node numbering */
1341:     ISGetLocalSize(isnumb,&nidx);
1342:     PetscMalloc(nidx*sizeof(PetscInt),&indices);
1343:     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1344:     ISGetIndices(isnumb,&numbering);
1345:     PetscSortIntWithPermutation(nidx,numbering,indices);
1346:     ISRestoreIndices(isnumb,&numbering);
1347:     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1348:       PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);
1349:       for (i=0; i<nidx; i++) {
1350:         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1351:       }
1352:       PetscFree(indices);
1353:       nidx   *= bs;
1354:       indices = newidx;
1355:     }
1356:     /* Shift to get global indices */
1357:     for (i=0; i<nidx; i++) indices[i] += rstart;

1359:     /* Build the index sets for each block */
1360:     for (i=0; i<n; i++) {
1361:       ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);
1362:       ISSort(is[i]);
1363:       start += count[i];
1364:     }

1366:     PetscFree(count);
1367:     PetscFree(indices);
1368:     ISDestroy(&isnumb);
1369:     ISDestroy(&ispart);
1370:   }
1371:   *iis = is;
1372:   if (!ois) return(0);
1373:   /*
1374:    Initially make outer subdomains the same as inner subdomains. If nonzero additional overlap
1375:    has been requested, copy the inner subdomains over so they can be modified.
1376:    */
1377:   PetscMalloc(n*sizeof(IS),ois);
1378:   for (i=0; i<n; ++i) {
1379:     if (overlap > 0) { /* With positive overlap, (*iis)[i] will be modified */
1380:       ISDuplicate((*iis)[i],(*ois)+i);
1381:       ISCopy((*iis)[i],(*ois)[i]);
1382:     } else {
1383:       PetscObjectReference((PetscObject)(*iis)[i]);
1384:       (*ois)[i] = (*iis)[i];
1385:     }
1386:   }
1387:   if (overlap > 0) {
1388:     /* Extend the "overlapping" regions by a number of steps */
1389:     MatIncreaseOverlap(A,n,*ois,overlap);
1390:   }
1391:   return(0);
1392: }

1396: /*@C
1397:    PCGASMDestroySubdomains - Destroys the index sets created with
1398:    PCGASMCreateLocalSubdomains() or PCGASMCreateSubdomains2D. Should be
1399:    called after setting subdomains with PCGASMSetSubdomains().

1401:    Collective

1403:    Input Parameters:
1404: +  n   - the number of index sets
1405: .  iis - the array of inner subdomains,
1406: -  ois - the array of outer subdomains, can be NULL

1408:    Level: intermediate

1410:    Notes: this is merely a convenience subroutine that walks each list,
1411:    destroys each IS on the list, and then frees the list.

1413: .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid

1415: .seealso: PCGASMCreateLocalSubdomains(), PCGASMSetSubdomains()
1416: @*/
1417: PetscErrorCode  PCGASMDestroySubdomains(PetscInt n, IS iis[], IS ois[])
1418: {
1419:   PetscInt       i;

1423:   if (n <= 0) return(0);
1424:   if (iis) {
1426:     for (i=0; i<n; i++) {
1427:       ISDestroy(&iis[i]);
1428:     }
1429:     PetscFree(iis);
1430:   }
1431:   if (ois) {
1432:     for (i=0; i<n; i++) {
1433:       ISDestroy(&ois[i]);
1434:     }
1435:     PetscFree(ois);
1436:   }
1437:   return(0);
1438: }


1441: #define PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,xleft_loc,ylow_loc,xright_loc,yhigh_loc,n) \
1442:   {                                                                                                       \
1443:     PetscInt first_row = first/M, last_row = last/M+1;                                                     \
1444:     /*                                                                                                    \
1445:      Compute ylow_loc and yhigh_loc so that (ylow_loc,xleft) and (yhigh_loc,xright) are the corners       \
1446:      of the bounding box of the intersection of the subdomain with the local ownership range (local       \
1447:      subdomain).                                                                                          \
1448:      Also compute xleft_loc and xright_loc as the lower and upper bounds on the first and last rows       \
1449:      of the intersection.                                                                                 \
1450:     */                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                             \
1451:     /* ylow_loc is the grid row containing the first element of the local sumbdomain */                   \
1452:     *ylow_loc = PetscMax(first_row,ylow);                                                                    \
1453:     /* xleft_loc is the offset of first element of the local subdomain within its grid row (might actually be outside the local subdomain) */ \
1454:     *xleft_loc = *ylow_loc==first_row ? PetscMax(first%M,xleft) : xleft;                                                                            \
1455:     /* yhigh_loc is the grid row above the last local subdomain element */                                                                    \
1456:     *yhigh_loc = PetscMin(last_row,yhigh);                                                                                                     \
1457:     /* xright is the offset of the end of the  local subdomain within its grid row (might actually be outside the local subdomain) */         \
1458:     *xright_loc = *yhigh_loc==last_row ? PetscMin(xright,last%M) : xright;                                                                          \
1459:     /* Now compute the size of the local subdomain n. */ \
1460:     *n = 0;                                               \
1461:     if (*ylow_loc < *yhigh_loc) {                           \
1462:       PetscInt width = xright-xleft;                     \
1463:       *n += width*(*yhigh_loc-*ylow_loc-1);                 \
1464:       *n += PetscMin(PetscMax(*xright_loc-xleft,0),width); \
1465:       *n -= PetscMin(PetscMax(*xleft_loc-xleft,0), width); \
1466:     } \
1467:   }



1473: /*@
1474:    PCGASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1475:    preconditioner for a two-dimensional problem on a regular grid.

1477:    Collective

1479:    Input Parameters:
1480: +  M, N               - the global number of grid points in the x and y directions
1481: .  Mdomains, Ndomains - the global number of subdomains in the x and y directions
1482: .  dof                - degrees of freedom per node
1483: -  overlap            - overlap in mesh lines

1485:    Output Parameters:
1486: +  Nsub - the number of local subdomains created
1487: .  iis  - array of index sets defining inner (nonoverlapping) subdomains
1488: -  ois  - array of index sets defining outer (overlapping, if overlap > 0) subdomains


1491:    Level: advanced

1493: .keywords: PC, GASM, additive Schwarz, create, subdomains, 2D, regular grid

1495: .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1496:           PCGASMSetOverlap()
1497: @*/
1498: PetscErrorCode  PCGASMCreateSubdomains2D(PC pc, PetscInt M,PetscInt N,PetscInt Mdomains,PetscInt Ndomains,PetscInt dof,PetscInt overlap, PetscInt *nsub,IS **iis,IS **ois)
1499: {
1501:   PetscMPIInt    size, rank;
1502:   PetscInt       i, j;
1503:   PetscInt       maxheight, maxwidth;
1504:   PetscInt       xstart, xleft, xright, xleft_loc, xright_loc;
1505:   PetscInt       ystart, ylow,  yhigh,  ylow_loc,  yhigh_loc;
1506:   PetscInt       x[2][2], y[2][2], n[2];
1507:   PetscInt       first, last;
1508:   PetscInt       nidx, *idx;
1509:   PetscInt       ii,jj,s,q,d;
1510:   PetscInt       k,kk;
1511:   PetscMPIInt    color;
1512:   MPI_Comm       comm, subcomm;
1513:   IS             **xis = 0, **is = ois, **is_local = iis;

1516:   PetscObjectGetComm((PetscObject)pc, &comm);
1517:   MPI_Comm_size(comm, &size);
1518:   MPI_Comm_rank(comm, &rank);
1519:   MatGetOwnershipRange(pc->pmat, &first, &last);
1520:   if (first%dof || last%dof) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Matrix row partitioning unsuitable for domain decomposition: local row range (%D,%D) "
1521:                                       "does not respect the number of degrees of freedom per grid point %D", first, last, dof);

1523:   /* Determine the number of domains with nonzero intersections with the local ownership range. */
1524:   s      = 0;
1525:   ystart = 0;
1526:   for (j=0; j<Ndomains; ++j) {
1527:     maxheight = N/Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */
1528:     if (maxheight < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the vertical directon for mesh height %D", Ndomains, N);
1529:     /* Vertical domain limits with an overlap. */
1530:     ylow   = PetscMax(ystart - overlap,0);
1531:     yhigh  = PetscMin(ystart + maxheight + overlap,N);
1532:     xstart = 0;
1533:     for (i=0; i<Mdomains; ++i) {
1534:       maxwidth = M/Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */
1535:       if (maxwidth < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the horizontal direction for mesh width %D", Mdomains, M);
1536:       /* Horizontal domain limits with an overlap. */
1537:       xleft  = PetscMax(xstart - overlap,0);
1538:       xright = PetscMin(xstart + maxwidth + overlap,M);
1539:       /*
1540:          Determine whether this subdomain intersects this processor's ownership range of pc->pmat.
1541:       */
1542:       PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,(&xleft_loc),(&ylow_loc),(&xright_loc),(&yhigh_loc),(&nidx));
1543:       if (nidx) ++s;
1544:       xstart += maxwidth;
1545:     } /* for (i = 0; i < Mdomains; ++i) */
1546:     ystart += maxheight;
1547:   } /* for (j = 0; j < Ndomains; ++j) */

1549:   /* Now we can allocate the necessary number of ISs. */
1550:   *nsub  = s;
1551:   PetscMalloc((*nsub)*sizeof(IS*),is);
1552:   PetscMalloc((*nsub)*sizeof(IS*),is_local);
1553:   s      = 0;
1554:   ystart = 0;
1555:   for (j=0; j<Ndomains; ++j) {
1556:     maxheight = N/Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */
1557:     if (maxheight < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the vertical directon for mesh height %D", Ndomains, N);
1558:     /* Vertical domain limits with an overlap. */
1559:     y[0][0] = PetscMax(ystart - overlap,0);
1560:     y[0][1] = PetscMin(ystart + maxheight + overlap,N);
1561:     /* Vertical domain limits without an overlap. */
1562:     y[1][0] = ystart;
1563:     y[1][1] = PetscMin(ystart + maxheight,N);
1564:     xstart  = 0;
1565:     for (i=0; i<Mdomains; ++i) {
1566:       maxwidth = M/Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */
1567:       if (maxwidth < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the horizontal direction for mesh width %D", Mdomains, M);
1568:       /* Horizontal domain limits with an overlap. */
1569:       x[0][0] = PetscMax(xstart - overlap,0);
1570:       x[0][1] = PetscMin(xstart + maxwidth + overlap,M);
1571:       /* Horizontal domain limits without an overlap. */
1572:       x[1][0] = xstart;
1573:       x[1][1] = PetscMin(xstart+maxwidth,M);
1574:       /*
1575:          Determine whether this domain intersects this processor's ownership range of pc->pmat.
1576:          Do this twice: first for the domains with overlaps, and once without.
1577:          During the first pass create the subcommunicators, and use them on the second pass as well.
1578:       */
1579:       for (q = 0; q < 2; ++q) {
1580:         /*
1581:           domain limits, (xleft, xright) and (ylow, yheigh) are adjusted
1582:           according to whether the domain with an overlap or without is considered.
1583:         */
1584:         xleft = x[q][0]; xright = x[q][1];
1585:         ylow  = y[q][0]; yhigh  = y[q][1];
1586:         PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,(&xleft_loc),(&ylow_loc),(&xright_loc),(&yhigh_loc),(&nidx));
1587:         nidx *= dof;
1588:         n[q]  = nidx;
1589:         /*
1590:          Based on the counted number of indices in the local domain *with an overlap*,
1591:          construct a subcommunicator of all the processors supporting this domain.
1592:          Observe that a domain with an overlap might have nontrivial local support,
1593:          while the domain without an overlap might not.  Hence, the decision to participate
1594:          in the subcommunicator must be based on the domain with an overlap.
1595:          */
1596:         if (q == 0) {
1597:           if (nidx) color = 1;
1598:           else color = MPI_UNDEFINED;

1600:           MPI_Comm_split(comm, color, rank, &subcomm);
1601:         }
1602:         /*
1603:          Proceed only if the number of local indices *with an overlap* is nonzero.
1604:          */
1605:         if (n[0]) {
1606:           if (q == 0) xis = is;
1607:           if (q == 1) {
1608:             /*
1609:              The IS for the no-overlap subdomain shares a communicator with the overlapping domain.
1610:              Moreover, if the overlap is zero, the two ISs are identical.
1611:              */
1612:             if (overlap == 0) {
1613:               (*is_local)[s] = (*is)[s];
1614:               PetscObjectReference((PetscObject)(*is)[s]);
1615:               continue;
1616:             } else {
1617:               xis     = is_local;
1618:               subcomm = ((PetscObject)(*is)[s])->comm;
1619:             }
1620:           } /* if (q == 1) */
1621:           idx  = NULL;
1622:           PetscMalloc(nidx*sizeof(PetscInt),&idx);
1623:           if (nidx) {
1624:             k = 0;
1625:             for (jj=ylow_loc; jj<yhigh_loc; ++jj) {
1626:               PetscInt x0 = (jj==ylow_loc) ? xleft_loc : xleft;
1627:               PetscInt x1 = (jj==yhigh_loc-1) ? xright_loc : xright;
1628:               kk = dof*(M*jj + x0);
1629:               for (ii=x0; ii<x1; ++ii) {
1630:                 for (d = 0; d < dof; ++d) {
1631:                   idx[k++] = kk++;
1632:                 }
1633:               }
1634:             }
1635:           }
1636:           ISCreateGeneral(subcomm,nidx,idx,PETSC_OWN_POINTER,(*xis)+s);
1637:         }/* if (n[0]) */
1638:       }/* for (q = 0; q < 2; ++q) */
1639:       if (n[0]) ++s;
1640:       xstart += maxwidth;
1641:     } /* for (i = 0; i < Mdomains; ++i) */
1642:     ystart += maxheight;
1643:   } /* for (j = 0; j < Ndomains; ++j) */
1644:   return(0);
1645: }

1649: /*@C
1650:     PCGASMGetSubdomains - Gets the subdomains supported on this processor
1651:     for the additive Schwarz preconditioner.

1653:     Not Collective

1655:     Input Parameter:
1656: .   pc - the preconditioner context

1658:     Output Parameters:
1659: +   n   - the number of subdomains for this processor (default value = 1)
1660: .   iis - the index sets that define the inner subdomains (without overlap) supported on this processor (can be NULL)
1661: -   ois - the index sets that define the outer subdomains (with overlap) supported on this processor (can be NULL)


1664:     Notes:
1665:     The user is responsible for destroying the ISs and freeing the returned arrays.
1666:     The IS numbering is in the parallel, global numbering of the vector.

1668:     Level: advanced

1670: .keywords: PC, GASM, get, subdomains, additive Schwarz

1672: .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1673:           PCGASMCreateSubdomains2D(), PCGASMSetSubdomains(), PCGASMGetSubmatrices()
1674: @*/
1675: PetscErrorCode  PCGASMGetSubdomains(PC pc,PetscInt *n,IS *iis[],IS *ois[])
1676: {
1677:   PC_GASM        *osm;
1679:   PetscBool      match;
1680:   PetscInt       i;

1684:   PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);
1685:   if (!match) SETERRQ2(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Incorrect object type: expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name);
1686:   osm = (PC_GASM*)pc->data;
1687:   if (n) *n = osm->n;
1688:   if (iis) {
1689:     PetscMalloc(osm->n*sizeof(IS), iis);
1690:   }
1691:   if (ois) {
1692:     PetscMalloc(osm->n*sizeof(IS), ois);
1693:   }
1694:   if (iis || ois) {
1695:     for (i = 0; i < osm->n; ++i) {
1696:       if (iis) (*iis)[i] = osm->iis[i];
1697:       if (ois) (*ois)[i] = osm->ois[i];
1698:     }
1699:   }
1700:   return(0);
1701: }

1705: /*@C
1706:     PCGASMGetSubmatrices - Gets the local submatrices (for this processor
1707:     only) for the additive Schwarz preconditioner.

1709:     Not Collective

1711:     Input Parameter:
1712: .   pc - the preconditioner context

1714:     Output Parameters:
1715: +   n   - the number of matrices for this processor (default value = 1)
1716: -   mat - the matrices

1718:     Notes: matrices returned by this routine have the same communicators as the index sets (IS)
1719:            used to define subdomains in PCGASMSetSubdomains(), or PETSC_COMM_SELF, if the
1720:            subdomains were defined using PCGASMSetTotalSubdomains().
1721:     Level: advanced

1723: .keywords: PC, GASM, set, local, subdomains, additive Schwarz, block Jacobi

1725: .seealso: PCGASMSetTotalSubdomain(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1726:           PCGASMCreateSubdomains2D(), PCGASMSetSubdomains(), PCGASMGetSubdomains()
1727: @*/
1728: PetscErrorCode  PCGASMGetSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1729: {
1730:   PC_GASM        *osm;
1732:   PetscBool      match;

1738:   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1739:   PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);
1740:   if (!match) SETERRQ2(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name);
1741:   osm = (PC_GASM*)pc->data;
1742:   if (n) *n = osm->n;
1743:   if (mat) *mat = osm->pmat;
1744:   return(0);
1745: }

1749: /*@
1750:     PCGASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1751:     Logically Collective

1753:     Input Parameter:
1754: +   pc  - the preconditioner
1755: -   flg - boolean indicating whether to use subdomains defined by the DM

1757:     Options Database Key:
1758: .   -pc_gasm_dm_subdomains

1760:     Level: intermediate

1762:     Notes:
1763:     PCGASMSetTotalSubdomains() and PCGASMSetOverlap() take precedence over PCGASMSetDMSubdomains(),
1764:     so setting either of the first two effectively turns the latter off.

1766: .keywords: PC, ASM, DM, set, subdomains, additive Schwarz

1768: .seealso: PCGASMGetDMSubdomains(), PCGASMSetTotalSubdomains(), PCGASMSetOverlap()
1769:           PCGASMCreateSubdomains2D()
1770: @*/
1771: PetscErrorCode  PCGASMSetDMSubdomains(PC pc,PetscBool flg)
1772: {
1773:   PC_GASM        *osm = (PC_GASM*)pc->data;
1775:   PetscBool      match;

1780:   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1781:   PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);
1782:   if (match) {
1783:     osm->dm_subdomains = flg;
1784:   }
1785:   return(0);
1786: }

1790: /*@
1791:     PCGASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1792:     Not Collective

1794:     Input Parameter:
1795: .   pc  - the preconditioner

1797:     Output Parameter:
1798: .   flg - boolean indicating whether to use subdomains defined by the DM

1800:     Level: intermediate

1802: .keywords: PC, ASM, DM, set, subdomains, additive Schwarz

1804: .seealso: PCGASMSetDMSubdomains(), PCGASMSetTotalSubdomains(), PCGASMSetOverlap()
1805:           PCGASMCreateSubdomains2D()
1806: @*/
1807: PetscErrorCode  PCGASMGetDMSubdomains(PC pc,PetscBool* flg)
1808: {
1809:   PC_GASM        *osm = (PC_GASM*)pc->data;
1811:   PetscBool      match;

1816:   PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);
1817:   if (match) {
1818:     if (flg) *flg = osm->dm_subdomains;
1819:   }
1820:   return(0);
1821: }