Actual source code: telescope.c

petsc-3.12.5 2020-03-29
Report Typos and Errors

  2:  #include <petsc/private/petscimpl.h>
  3:  #include <petsc/private/matimpl.h>
  4:  #include <petsc/private/pcimpl.h>
  5:  #include <petscksp.h>
  6:  #include <petscdm.h>
  7: #include "../src/ksp/pc/impls/telescope/telescope.h"

  9: static PetscBool  cited = PETSC_FALSE;
 10: static const char citation[] =
 11: "@inproceedings{MaySananRuppKnepleySmith2016,\n"
 12: "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
 13: "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
 14: "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
 15: "  series    = {PASC '16},\n"
 16: "  isbn      = {978-1-4503-4126-4},\n"
 17: "  location  = {Lausanne, Switzerland},\n"
 18: "  pages     = {5:1--5:12},\n"
 19: "  articleno = {5},\n"
 20: "  numpages  = {12},\n"
 21: "  url       = {https://doi.acm.org/10.1145/2929908.2929913},\n"
 22: "  doi       = {10.1145/2929908.2929913},\n"
 23: "  acmid     = {2929913},\n"
 24: "  publisher = {ACM},\n"
 25: "  address   = {New York, NY, USA},\n"
 26: "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
 27: "  year      = {2016}\n"
 28: "}\n";

 30: /*
 31:  default setup mode

 33:  [1a] scatter to (FORWARD)
 34:  x(comm) -> xtmp(comm)
 35:  [1b] local copy (to) ranks with color = 0
 36:  xred(subcomm) <- xtmp
 37:  
 38:  [2] solve on sub KSP to obtain yred(subcomm)

 40:  [3a] local copy (from) ranks with color = 0
 41:  yred(subcomm) --> xtmp
 42:  [2b] scatter from (REVERSE)
 43:  xtmp(comm) -> y(comm)
 44: */

 46: /*
 47:   Collective[comm_f]
 48:   Notes
 49:    * Using comm_f = MPI_COMM_NULL will result in an error
 50:    * Using comm_c = MPI_COMM_NULL is valid. If all instances of comm_c are NULL the subcomm is not valid.
 51:    * If any non NULL comm_c communicator cannot map any of its ranks to comm_f, the subcomm is not valid.
 52: */
 53: PetscErrorCode PCTelescopeTestValidSubcomm(MPI_Comm comm_f,MPI_Comm comm_c,PetscBool *isvalid)
 54: {
 55:   PetscInt       valid = 1;
 56:   MPI_Group      group_f,group_c;
 58:   PetscMPIInt    count,k,size_f = 0,size_c = 0,size_c_sum = 0;
 59:   PetscMPIInt    *ranks_f,*ranks_c;

 62:   if (comm_f == MPI_COMM_NULL) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"comm_f cannot be MPI_COMM_NULL");

 64:   MPI_Comm_group(comm_f,&group_f);
 65:   if (comm_c != MPI_COMM_NULL) {
 66:     MPI_Comm_group(comm_c,&group_c);
 67:   }

 69:   MPI_Comm_size(comm_f,&size_f);
 70:   if (comm_c != MPI_COMM_NULL) {
 71:     MPI_Comm_size(comm_c,&size_c);
 72:   }

 74:   /* check not all comm_c's are NULL */
 75:   size_c_sum = size_c;
 76:   MPI_Allreduce(MPI_IN_PLACE,&size_c_sum,1,MPI_INT,MPI_SUM,comm_f);
 77:   if (size_c_sum == 0) valid = 0;

 79:   /* check we can map at least 1 rank in comm_c to comm_f */
 80:   PetscMalloc1(size_f,&ranks_f);
 81:   PetscMalloc1(size_c,&ranks_c);
 82:   for (k=0; k<size_f; k++) ranks_f[k] = MPI_UNDEFINED;
 83:   for (k=0; k<size_c; k++) ranks_c[k] = k;

 85:   /*
 86:    MPI_Group_translate_ranks() returns a non-zero exit code if any rank cannot be translated.
 87:    I do not want the code to terminate immediately if this occurs, rather I want to throw 
 88:    the error later (during PCSetUp_Telescope()) via SETERRQ() with a message indicating 
 89:    that comm_c is not a valid sub-communicator.
 90:    Hence I purposefully do not call CHKERRQ() after MPI_Group_translate_ranks().
 91:   */
 92:   count = 0;
 93:   if (comm_c != MPI_COMM_NULL) {
 94:     (void)MPI_Group_translate_ranks(group_c,size_c,ranks_c,group_f,ranks_f);
 95:     for (k=0; k<size_f; k++) {
 96:       if (ranks_f[k] == MPI_UNDEFINED) {
 97:         count++;
 98:       }
 99:     }
100:   }
101:   if (count == size_f) valid = 0;

103:   MPI_Allreduce(MPI_IN_PLACE,&valid,1,MPIU_INT,MPI_MIN,comm_f);
104:   if (valid == 1) *isvalid = PETSC_TRUE;
105:   else *isvalid = PETSC_FALSE;

107:   PetscFree(ranks_f);
108:   PetscFree(ranks_c);
109:   MPI_Group_free(&group_f);
110:   if (comm_c != MPI_COMM_NULL) {
111:     MPI_Group_free(&group_c);
112:   }
113:   return(0);
114: }

116: DM private_PCTelescopeGetSubDM(PC_Telescope sred)
117: {
118:   DM subdm = NULL;

120:   if (!PCTelescope_isActiveRank(sred)) { subdm = NULL; }
121:   else {
122:     switch (sred->sr_type) {
123:     case TELESCOPE_DEFAULT: subdm = NULL;
124:       break;
125:     case TELESCOPE_DMDA:    subdm = ((PC_Telescope_DMDACtx*)sred->dm_ctx)->dmrepart;
126:       break;
127:     case TELESCOPE_DMPLEX:  subdm = NULL;
128:       break;
129:     case TELESCOPE_COARSEDM: if (sred->ksp) { KSPGetDM(sred->ksp,&subdm); }
130:       break;
131:     }
132:   }
133:   return(subdm);
134: }

136: PetscErrorCode PCTelescopeSetUp_default(PC pc,PC_Telescope sred)
137: {
139:   PetscInt       m,M,bs,st,ed;
140:   Vec            x,xred,yred,xtmp;
141:   Mat            B;
142:   MPI_Comm       comm,subcomm;
143:   VecScatter     scatter;
144:   IS             isin;

147:   PetscInfo(pc,"PCTelescope: setup (default)\n");
148:   comm = PetscSubcommParent(sred->psubcomm);
149:   subcomm = PetscSubcommChild(sred->psubcomm);

151:   PCGetOperators(pc,NULL,&B);
152:   MatGetSize(B,&M,NULL);
153:   MatGetBlockSize(B,&bs);
154:   MatCreateVecs(B,&x,NULL);

156:   xred = NULL;
157:   m    = 0;
158:   if (PCTelescope_isActiveRank(sred)) {
159:     VecCreate(subcomm,&xred);
160:     VecSetSizes(xred,PETSC_DECIDE,M);
161:     VecSetBlockSize(xred,bs);
162:     VecSetFromOptions(xred);
163:     VecGetLocalSize(xred,&m);
164:   }

166:   yred = NULL;
167:   if (PCTelescope_isActiveRank(sred)) {
168:     VecDuplicate(xred,&yred);
169:   }

171:   VecCreate(comm,&xtmp);
172:   VecSetSizes(xtmp,m,PETSC_DECIDE);
173:   VecSetBlockSize(xtmp,bs);
174:   VecSetType(xtmp,((PetscObject)x)->type_name);

176:   if (PCTelescope_isActiveRank(sred)) {
177:     VecGetOwnershipRange(xred,&st,&ed);
178:     ISCreateStride(comm,(ed-st),st,1,&isin);
179:   } else {
180:     VecGetOwnershipRange(x,&st,&ed);
181:     ISCreateStride(comm,0,st,1,&isin);
182:   }
183:   ISSetBlockSize(isin,bs);

185:   VecScatterCreate(x,isin,xtmp,NULL,&scatter);

187:   sred->isin    = isin;
188:   sred->scatter = scatter;
189:   sred->xred    = xred;
190:   sred->yred    = yred;
191:   sred->xtmp    = xtmp;
192:   VecDestroy(&x);
193:   return(0);
194: }

196: PetscErrorCode PCTelescopeMatCreate_default(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A)
197: {
199:   MPI_Comm       comm,subcomm;
200:   Mat            Bred,B;
201:   PetscInt       nr,nc;
202:   IS             isrow,iscol;
203:   Mat            Blocal,*_Blocal;

206:   PetscInfo(pc,"PCTelescope: updating the redundant preconditioned operator (default)\n");
207:   PetscObjectGetComm((PetscObject)pc,&comm);
208:   subcomm = PetscSubcommChild(sred->psubcomm);
209:   PCGetOperators(pc,NULL,&B);
210:   MatGetSize(B,&nr,&nc);
211:   isrow = sred->isin;
212:   ISCreateStride(PETSC_COMM_SELF,nc,0,1,&iscol);
213:   ISSetIdentity(iscol);
214:   MatSetOption(B,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);
215:   MatCreateSubMatrices(B,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&_Blocal);
216:   Blocal = *_Blocal;
217:   PetscFree(_Blocal);
218:   Bred = NULL;
219:   if (PCTelescope_isActiveRank(sred)) {
220:     PetscInt mm;

222:     if (reuse != MAT_INITIAL_MATRIX) { Bred = *A; }

224:     MatGetSize(Blocal,&mm,NULL);
225:     MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,mm,reuse,&Bred);
226:   }
227:   *A = Bred;
228:   ISDestroy(&iscol);
229:   MatDestroy(&Blocal);
230:   return(0);
231: }

233: static PetscErrorCode PCTelescopeSubNullSpaceCreate_Telescope(PC pc,PC_Telescope sred,MatNullSpace nullspace,MatNullSpace *sub_nullspace)
234: {
236:   PetscBool      has_const;
237:   const Vec      *vecs;
238:   Vec            *sub_vecs = NULL;
239:   PetscInt       i,k,n = 0;
240:   MPI_Comm       subcomm;

243:   subcomm = PetscSubcommChild(sred->psubcomm);
244:   MatNullSpaceGetVecs(nullspace,&has_const,&n,&vecs);

246:   if (PCTelescope_isActiveRank(sred)) {
247:     if (n) {
248:       VecDuplicateVecs(sred->xred,n,&sub_vecs);
249:     }
250:   }

252:   /* copy entries */
253:   for (k=0; k<n; k++) {
254:     const PetscScalar *x_array;
255:     PetscScalar       *LA_sub_vec;
256:     PetscInt          st,ed;

258:     /* pull in vector x->xtmp */
259:     VecScatterBegin(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);
260:     VecScatterEnd(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);
261:     if (sub_vecs) {
262:       /* copy vector entries into xred */
263:       VecGetArrayRead(sred->xtmp,&x_array);
264:       if (sub_vecs[k]) {
265:         VecGetOwnershipRange(sub_vecs[k],&st,&ed);
266:         VecGetArray(sub_vecs[k],&LA_sub_vec);
267:         for (i=0; i<ed-st; i++) {
268:           LA_sub_vec[i] = x_array[i];
269:         }
270:         VecRestoreArray(sub_vecs[k],&LA_sub_vec);
271:       }
272:       VecRestoreArrayRead(sred->xtmp,&x_array);
273:     }
274:   }

276:   if (PCTelescope_isActiveRank(sred)) {
277:     /* create new (near) nullspace for redundant object */
278:     MatNullSpaceCreate(subcomm,has_const,n,sub_vecs,sub_nullspace);
279:     VecDestroyVecs(n,&sub_vecs);
280:     if (nullspace->remove) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
281:     if (nullspace->rmctx) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
282:   }
283:   return(0);
284: }

286: static PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc,PC_Telescope sred,Mat sub_mat)
287: {
289:   Mat            B;

292:   PCGetOperators(pc,NULL,&B);
293:   /* Propagate the nullspace if it exists */
294:   {
295:     MatNullSpace nullspace,sub_nullspace;
296:     MatGetNullSpace(B,&nullspace);
297:     if (nullspace) {
298:       PetscInfo(pc,"PCTelescope: generating nullspace (default)\n");
299:       PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nullspace,&sub_nullspace);
300:       if (PCTelescope_isActiveRank(sred)) {
301:         MatSetNullSpace(sub_mat,sub_nullspace);
302:         MatNullSpaceDestroy(&sub_nullspace);
303:       }
304:     }
305:   }
306:   /* Propagate the near nullspace if it exists */
307:   {
308:     MatNullSpace nearnullspace,sub_nearnullspace;
309:     MatGetNearNullSpace(B,&nearnullspace);
310:     if (nearnullspace) {
311:       PetscInfo(pc,"PCTelescope: generating near nullspace (default)\n");
312:       PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nearnullspace,&sub_nearnullspace);
313:       if (PCTelescope_isActiveRank(sred)) {
314:         MatSetNearNullSpace(sub_mat,sub_nearnullspace);
315:         MatNullSpaceDestroy(&sub_nearnullspace);
316:       }
317:     }
318:   }
319:   return(0);
320: }

322: static PetscErrorCode PCView_Telescope(PC pc,PetscViewer viewer)
323: {
324:   PC_Telescope   sred = (PC_Telescope)pc->data;
326:   PetscBool      iascii,isstring;
327:   PetscViewer    subviewer;

330:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
331:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
332:   if (iascii) {
333:     {
334:       MPI_Comm    comm,subcomm;
335:       PetscMPIInt comm_size,subcomm_size;
336:       DM          dm = NULL,subdm = NULL;

338:       PCGetDM(pc,&dm);
339:       subdm = private_PCTelescopeGetSubDM(sred);

341:       if (sred->psubcomm) {
342:         comm = PetscSubcommParent(sred->psubcomm);
343:         subcomm = PetscSubcommChild(sred->psubcomm);
344:         MPI_Comm_size(comm,&comm_size);
345:         MPI_Comm_size(subcomm,&subcomm_size);

347:         PetscViewerASCIIPushTab(viewer);
348:         PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent comm size reduction factor = %D\n",sred->redfactor);
349:         PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent_size = %d , subcomm_size = %d\n",(int)comm_size,(int)subcomm_size);
350:         switch (sred->subcommtype) {
351:         case PETSC_SUBCOMM_INTERLACED :
352:           PetscViewerASCIIPrintf(viewer,"petsc subcomm: type = interlaced\n",sred->subcommtype);
353:           break;
354:         case PETSC_SUBCOMM_CONTIGUOUS :
355:           PetscViewerASCIIPrintf(viewer,"petsc subcomm type = contiguous\n",sred->subcommtype);
356:           break;
357:         default :
358:           SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"General subcomm type not supported by PCTelescope");
359:         }
360:         PetscViewerASCIIPopTab(viewer);
361:       } else {
362:         PetscObjectGetComm((PetscObject)pc,&comm);
363:         subcomm = sred->subcomm;
364:         if (!PCTelescope_isActiveRank(sred)) {
365:           subcomm = PETSC_COMM_SELF;
366:         }

368:         PetscViewerASCIIPushTab(viewer);
369:         PetscViewerASCIIPrintf(viewer,"subcomm: using user provided sub-communicator\n");
370:         PetscViewerASCIIPopTab(viewer);
371:       }

373:       PetscViewerGetSubViewer(viewer,subcomm,&subviewer);
374:       if (PCTelescope_isActiveRank(sred)) {
375:         PetscViewerASCIIPushTab(subviewer);

377:         if (dm && sred->ignore_dm) {
378:           PetscViewerASCIIPrintf(subviewer,"ignoring DM\n");
379:         }
380:         if (sred->ignore_kspcomputeoperators) {
381:           PetscViewerASCIIPrintf(subviewer,"ignoring KSPComputeOperators\n");
382:         }
383:         switch (sred->sr_type) {
384:         case TELESCOPE_DEFAULT:
385:           PetscViewerASCIIPrintf(subviewer,"setup type: default\n");
386:           break;
387:         case TELESCOPE_DMDA:
388:           PetscViewerASCIIPrintf(subviewer,"setup type: DMDA auto-repartitioning\n");
389:           DMView_DA_Short(subdm,subviewer);
390:           break;
391:         case TELESCOPE_DMPLEX:
392:           PetscViewerASCIIPrintf(subviewer,"setup type: DMPLEX auto-repartitioning\n");
393:           break;
394:         case TELESCOPE_COARSEDM:
395:           PetscViewerASCIIPrintf(subviewer,"setup type: coarse DM\n");
396:           break;
397:         }

399:         if (dm) {
400:           PetscObject obj = (PetscObject)dm;
401:           PetscViewerASCIIPrintf(subviewer,"Parent DM object:");
402:           PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE);
403:           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); }
404:           if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); }
405:           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); }
406:           PetscViewerASCIIPrintf(subviewer,"\n");
407:           PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE);
408:         } else {
409:           PetscViewerASCIIPrintf(subviewer,"Parent DM object: NULL\n");
410:         }
411:         if (subdm) {
412:           PetscObject obj = (PetscObject)subdm;
413:           PetscViewerASCIIPrintf(subviewer,"Sub DM object:");
414:           PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE);
415:           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); }
416:           if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); }
417:           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); }
418:           PetscViewerASCIIPrintf(subviewer,"\n");
419:           PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE);
420:         } else {
421:           PetscViewerASCIIPrintf(subviewer,"Sub DM object: NULL\n");
422:         }
423: 
424:         KSPView(sred->ksp,subviewer);
425:         PetscViewerASCIIPopTab(subviewer);
426:       }
427:       PetscViewerRestoreSubViewer(viewer,subcomm,&subviewer);
428:     }
429:   }
430:   return(0);
431: }

433: static PetscErrorCode PCSetUp_Telescope(PC pc)
434: {
435:   PC_Telescope    sred = (PC_Telescope)pc->data;
436:   PetscErrorCode  ierr;
437:   MPI_Comm        comm,subcomm=0;
438:   PCTelescopeType sr_type;

441:   PetscObjectGetComm((PetscObject)pc,&comm);

443:   /* Determine type of setup/update */
444:   if (!pc->setupcalled) {
445:     PetscBool has_dm,same;
446:     DM        dm;

448:     sr_type = TELESCOPE_DEFAULT;
449:     has_dm = PETSC_FALSE;
450:     PCGetDM(pc,&dm);
451:     if (dm) { has_dm = PETSC_TRUE; }
452:     if (has_dm) {
453:       /* check for dmda */
454:       PetscObjectTypeCompare((PetscObject)dm,DMDA,&same);
455:       if (same) {
456:         PetscInfo(pc,"PCTelescope: found DMDA\n");
457:         sr_type = TELESCOPE_DMDA;
458:       }
459:       /* check for dmplex */
460:       PetscObjectTypeCompare((PetscObject)dm,DMPLEX,&same);
461:       if (same) {
462:         PetscInfo(pc,"PCTelescope: found DMPLEX\n");
463:         sr_type = TELESCOPE_DMPLEX;
464:       }

466:       if (sred->use_coarse_dm) {
467:         PetscInfo(pc,"PCTelescope: using coarse DM\n");
468:         sr_type = TELESCOPE_COARSEDM;
469:       }

471:       if (sred->ignore_dm) {
472:         PetscInfo(pc,"PCTelescope: ignoring DM\n");
473:         sr_type = TELESCOPE_DEFAULT;
474:       }
475:     }
476:     sred->sr_type = sr_type;
477:   } else {
478:     sr_type = sred->sr_type;
479:   }

481:   /* set function pointers for repartition setup, matrix creation/update, matrix (near) nullspace, and reset functionality */
482:   switch (sr_type) {
483:   case TELESCOPE_DEFAULT:
484:     sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
485:     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
486:     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
487:     sred->pctelescope_reset_type              = NULL;
488:     break;
489:   case TELESCOPE_DMDA:
490:     pc->ops->apply                            = PCApply_Telescope_dmda;
491:     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_dmda;
492:     sred->pctelescope_setup_type              = PCTelescopeSetUp_dmda;
493:     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_dmda;
494:     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda;
495:     sred->pctelescope_reset_type              = PCReset_Telescope_dmda;
496:     break;
497:   case TELESCOPE_DMPLEX: SETERRQ(comm,PETSC_ERR_SUP,"Support for DMPLEX is currently not available");
498:     break;
499:   case TELESCOPE_COARSEDM:
500:     pc->ops->apply                            = PCApply_Telescope_CoarseDM;
501:     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_CoarseDM;
502:     sred->pctelescope_setup_type              = PCTelescopeSetUp_CoarseDM;
503:     sred->pctelescope_matcreate_type          = NULL;
504:     sred->pctelescope_matnullspacecreate_type = NULL; /* PCTelescopeMatNullSpaceCreate_CoarseDM; */
505:     sred->pctelescope_reset_type              = PCReset_Telescope_CoarseDM;
506:     break;
507:   default: SETERRQ(comm,PETSC_ERR_SUP,"Support only provided for: repartitioning an operator; repartitioning a DMDA; or using a coarse DM");
508:     break;
509:   }

511:   /* subcomm definition */
512:   if (!pc->setupcalled) {
513:     if ((sr_type == TELESCOPE_DEFAULT) || (sr_type == TELESCOPE_DMDA)) {
514:       if (!sred->psubcomm) {
515:         PetscSubcommCreate(comm,&sred->psubcomm);
516:         PetscSubcommSetNumber(sred->psubcomm,sred->redfactor);
517:         PetscSubcommSetType(sred->psubcomm,sred->subcommtype);
518:         PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));
519:         sred->subcomm = PetscSubcommChild(sred->psubcomm);
520:       }
521:     } else { /* query PC for DM, check communicators */
522:       DM          dm,dm_coarse_partition = NULL;
523:       MPI_Comm    comm_fine,comm_coarse_partition = MPI_COMM_NULL;
524:       PetscMPIInt csize_fine=0,csize_coarse_partition=0,cs[2],csg[2],cnt=0;
525:       PetscBool   isvalidsubcomm;

527:       PCGetDM(pc,&dm);
528:       comm_fine = PetscObjectComm((PetscObject)dm);
529:       DMGetCoarseDM(dm,&dm_coarse_partition);
530:       if (dm_coarse_partition) { cnt = 1; }
531:       MPI_Allreduce(MPI_IN_PLACE,&cnt,1,MPI_INT,MPI_SUM,comm_fine);
532:       if (cnt == 0) SETERRQ(comm_fine,PETSC_ERR_SUP,"Zero instances of a coarse DM were found");

534:       MPI_Comm_size(comm_fine,&csize_fine);
535:       if (dm_coarse_partition) {
536:         comm_coarse_partition = PetscObjectComm((PetscObject)dm_coarse_partition);
537:         MPI_Comm_size(comm_coarse_partition,&csize_coarse_partition);
538:       }

540:       cs[0] = csize_fine;
541:       cs[1] = csize_coarse_partition;
542:       MPI_Allreduce(cs,csg,2,MPI_INT,MPI_MAX,comm_fine);
543:       if (csg[0] == csg[1]) SETERRQ(comm_fine,PETSC_ERR_SUP,"Coarse DM uses the same size communicator as the parent DM attached to the PC");

545:       PCTelescopeTestValidSubcomm(comm_fine,comm_coarse_partition,&isvalidsubcomm);
546:       if (!isvalidsubcomm) SETERRQ(comm_fine,PETSC_ERR_SUP,"Coarse DM communicator is not a sub-communicator of parentDM->comm");
547:       sred->subcomm = comm_coarse_partition;
548:     }
549:   }
550:   subcomm = sred->subcomm;

552:   /* internal KSP */
553:   if (!pc->setupcalled) {
554:     const char *prefix;

556:     if (PCTelescope_isActiveRank(sred)) {
557:       KSPCreate(subcomm,&sred->ksp);
558:       KSPSetErrorIfNotConverged(sred->ksp,pc->erroriffailure);
559:       PetscObjectIncrementTabLevel((PetscObject)sred->ksp,(PetscObject)pc,1);
560:       PetscLogObjectParent((PetscObject)pc,(PetscObject)sred->ksp);
561:       KSPSetType(sred->ksp,KSPPREONLY);
562:       PCGetOptionsPrefix(pc,&prefix);
563:       KSPSetOptionsPrefix(sred->ksp,prefix);
564:       KSPAppendOptionsPrefix(sred->ksp,"telescope_");
565:     }
566:   }

568:   /* setup */
569:   if (!pc->setupcalled && sred->pctelescope_setup_type) {
570:     sred->pctelescope_setup_type(pc,sred);
571:   }
572:   /* update */
573:   if (!pc->setupcalled) {
574:     if (sred->pctelescope_matcreate_type) {
575:       sred->pctelescope_matcreate_type(pc,sred,MAT_INITIAL_MATRIX,&sred->Bred);
576:     }
577:     if (sred->pctelescope_matnullspacecreate_type) {
578:       sred->pctelescope_matnullspacecreate_type(pc,sred,sred->Bred);
579:     }
580:   } else {
581:     if (sred->pctelescope_matcreate_type) {
582:       sred->pctelescope_matcreate_type(pc,sred,MAT_REUSE_MATRIX,&sred->Bred);
583:     }
584:   }

586:   /* common - no construction */
587:   if (PCTelescope_isActiveRank(sred)) {
588:     KSPSetOperators(sred->ksp,sred->Bred,sred->Bred);
589:     if (pc->setfromoptionscalled && !pc->setupcalled){
590:       KSPSetFromOptions(sred->ksp);
591:     }
592:   }
593:   return(0);
594: }

596: static PetscErrorCode PCApply_Telescope(PC pc,Vec x,Vec y)
597: {
598:   PC_Telescope      sred = (PC_Telescope)pc->data;
599:   PetscErrorCode    ierr;
600:   Vec               xtmp,xred,yred;
601:   PetscInt          i,st,ed;
602:   VecScatter        scatter;
603:   PetscScalar       *array;
604:   const PetscScalar *x_array;

607:   PetscCitationsRegister(citation,&cited);

609:   xtmp    = sred->xtmp;
610:   scatter = sred->scatter;
611:   xred    = sred->xred;
612:   yred    = sred->yred;

614:   /* pull in vector x->xtmp */
615:   VecScatterBegin(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);
616:   VecScatterEnd(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);

618:   /* copy vector entries into xred */
619:   VecGetArrayRead(xtmp,&x_array);
620:   if (xred) {
621:     PetscScalar *LA_xred;
622:     VecGetOwnershipRange(xred,&st,&ed);
623:     VecGetArray(xred,&LA_xred);
624:     for (i=0; i<ed-st; i++) {
625:       LA_xred[i] = x_array[i];
626:     }
627:     VecRestoreArray(xred,&LA_xred);
628:   }
629:   VecRestoreArrayRead(xtmp,&x_array);
630:   /* solve */
631:   if (PCTelescope_isActiveRank(sred)) {
632:     KSPSolve(sred->ksp,xred,yred);
633:     KSPCheckSolve(sred->ksp,pc,yred);
634:   }
635:   /* return vector */
636:   VecGetArray(xtmp,&array);
637:   if (yred) {
638:     const PetscScalar *LA_yred;
639:     VecGetOwnershipRange(yred,&st,&ed);
640:     VecGetArrayRead(yred,&LA_yred);
641:     for (i=0; i<ed-st; i++) {
642:       array[i] = LA_yred[i];
643:     }
644:     VecRestoreArrayRead(yred,&LA_yred);
645:   }
646:   VecRestoreArray(xtmp,&array);
647:   VecScatterBegin(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);
648:   VecScatterEnd(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);
649:   return(0);
650: }

652: static PetscErrorCode PCApplyRichardson_Telescope(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt *outits,PCRichardsonConvergedReason *reason)
653: {
654:   PC_Telescope      sred = (PC_Telescope)pc->data;
655:   PetscErrorCode    ierr;
656:   Vec               xtmp,yred;
657:   PetscInt          i,st,ed;
658:   VecScatter        scatter;
659:   const PetscScalar *x_array;
660:   PetscBool         default_init_guess_value;

663:   xtmp    = sred->xtmp;
664:   scatter = sred->scatter;
665:   yred    = sred->yred;

667:   if (its > 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PCApplyRichardson_Telescope only supports max_it = 1");
668:   *reason = (PCRichardsonConvergedReason)0;

670:   if (!zeroguess) {
671:     PetscInfo(pc,"PCTelescope: Scattering y for non-zero initial guess\n");
672:     /* pull in vector y->xtmp */
673:     VecScatterBegin(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);
674:     VecScatterEnd(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);

676:     /* copy vector entries into xred */
677:     VecGetArrayRead(xtmp,&x_array);
678:     if (yred) {
679:       PetscScalar *LA_yred;
680:       VecGetOwnershipRange(yred,&st,&ed);
681:       VecGetArray(yred,&LA_yred);
682:       for (i=0; i<ed-st; i++) {
683:         LA_yred[i] = x_array[i];
684:       }
685:       VecRestoreArray(yred,&LA_yred);
686:     }
687:     VecRestoreArrayRead(xtmp,&x_array);
688:   }

690:   if (PCTelescope_isActiveRank(sred)) {
691:     KSPGetInitialGuessNonzero(sred->ksp,&default_init_guess_value);
692:     if (!zeroguess) KSPSetInitialGuessNonzero(sred->ksp,PETSC_TRUE);
693:   }

695:   PCApply_Telescope(pc,x,y);

697:   if (PCTelescope_isActiveRank(sred)) {
698:     KSPSetInitialGuessNonzero(sred->ksp,default_init_guess_value);
699:   }

701:   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
702:   *outits = 1;
703:   return(0);
704: }

706: static PetscErrorCode PCReset_Telescope(PC pc)
707: {
708:   PC_Telescope   sred = (PC_Telescope)pc->data;

711:   ISDestroy(&sred->isin);
712:   VecScatterDestroy(&sred->scatter);
713:   VecDestroy(&sred->xred);
714:   VecDestroy(&sred->yred);
715:   VecDestroy(&sred->xtmp);
716:   MatDestroy(&sred->Bred);
717:   KSPReset(sred->ksp);
718:   if (sred->pctelescope_reset_type) {
719:     sred->pctelescope_reset_type(pc);
720:   }
721:   return(0);
722: }

724: static PetscErrorCode PCDestroy_Telescope(PC pc)
725: {
726:   PC_Telescope   sred = (PC_Telescope)pc->data;

730:   PCReset_Telescope(pc);
731:   KSPDestroy(&sred->ksp);
732:   PetscSubcommDestroy(&sred->psubcomm);
733:   PetscFree(sred->dm_ctx);
734:   PetscFree(pc->data);
735:   return(0);
736: }

738: static PetscErrorCode PCSetFromOptions_Telescope(PetscOptionItems *PetscOptionsObject,PC pc)
739: {
740:   PC_Telescope     sred = (PC_Telescope)pc->data;
741:   PetscErrorCode   ierr;
742:   MPI_Comm         comm;
743:   PetscMPIInt      size;
744:   PetscBool        flg;
745:   PetscSubcommType subcommtype;

748:   PetscObjectGetComm((PetscObject)pc,&comm);
749:   MPI_Comm_size(comm,&size);
750:   PetscOptionsHead(PetscOptionsObject,"Telescope options");
751:   PetscOptionsEnum("-pc_telescope_subcomm_type","Subcomm type (interlaced or contiguous)","PCTelescopeSetSubcommType",PetscSubcommTypes,(PetscEnum)sred->subcommtype,(PetscEnum*)&subcommtype,&flg);
752:   if (flg) {
753:     PCTelescopeSetSubcommType(pc,subcommtype);
754:   }
755:   PetscOptionsInt("-pc_telescope_reduction_factor","Factor to reduce comm size by","PCTelescopeSetReductionFactor",sred->redfactor,&sred->redfactor,0);
756:   if (sred->redfactor > size) SETERRQ(comm,PETSC_ERR_ARG_WRONG,"-pc_telescope_reduction_factor <= comm size");
757:   PetscOptionsBool("-pc_telescope_ignore_dm","Ignore any DM attached to the PC","PCTelescopeSetIgnoreDM",sred->ignore_dm,&sred->ignore_dm,0);
758:   PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators","Ignore method used to compute A","PCTelescopeSetIgnoreKSPComputeOperators",sred->ignore_kspcomputeoperators,&sred->ignore_kspcomputeoperators,0);
759:   PetscOptionsBool("-pc_telescope_use_coarse_dm","Define sub-communicator from the coarse DM","PCTelescopeSetUseCoarseDM",sred->use_coarse_dm,&sred->use_coarse_dm,0);
760:   PetscOptionsTail();
761:   return(0);
762: }

764: /* PC simplementation specific API's */

766: static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc,KSP *ksp)
767: {
768:   PC_Telescope red = (PC_Telescope)pc->data;
770:   if (ksp) *ksp = red->ksp;
771:   return(0);
772: }

774: static PetscErrorCode PCTelescopeGetSubcommType_Telescope(PC pc,PetscSubcommType *subcommtype)
775: {
776:   PC_Telescope red = (PC_Telescope)pc->data;
778:   if (subcommtype) *subcommtype = red->subcommtype;
779:   return(0);
780: }

782: static PetscErrorCode PCTelescopeSetSubcommType_Telescope(PC pc,PetscSubcommType subcommtype)
783: {
784:   PC_Telescope     red = (PC_Telescope)pc->data;

787:   if (pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"You cannot change the subcommunicator type for PCTelescope after it has been set up.");
788:   red->subcommtype = subcommtype;
789:   return(0);
790: }

792: static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc,PetscInt *fact)
793: {
794:   PC_Telescope red = (PC_Telescope)pc->data;
796:   if (fact) *fact = red->redfactor;
797:   return(0);
798: }

800: static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc,PetscInt fact)
801: {
802:   PC_Telescope     red = (PC_Telescope)pc->data;
803:   PetscMPIInt      size;
804:   PetscErrorCode   ierr;

807:   MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
808:   if (fact <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be positive",fact);
809:   if (fact > size) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be <= comm.size",fact);
810:   red->redfactor = fact;
811:   return(0);
812: }

814: static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc,PetscBool *v)
815: {
816:   PC_Telescope red = (PC_Telescope)pc->data;
818:   if (v) *v = red->ignore_dm;
819:   return(0);
820: }

822: static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc,PetscBool v)
823: {
824:   PC_Telescope red = (PC_Telescope)pc->data;
826:   red->ignore_dm = v;
827:   return(0);
828: }

830: static PetscErrorCode PCTelescopeGetUseCoarseDM_Telescope(PC pc,PetscBool *v)
831: {
832:   PC_Telescope red = (PC_Telescope)pc->data;
834:   if (v) *v = red->use_coarse_dm;
835:   return(0);
836: }

838: static PetscErrorCode PCTelescopeSetUseCoarseDM_Telescope(PC pc,PetscBool v)
839: {
840:   PC_Telescope red = (PC_Telescope)pc->data;
842:   red->use_coarse_dm = v;
843:   return(0);
844: }

846: static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool *v)
847: {
848:   PC_Telescope red = (PC_Telescope)pc->data;
850:   if (v) *v = red->ignore_kspcomputeoperators;
851:   return(0);
852: }

854: static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool v)
855: {
856:   PC_Telescope red = (PC_Telescope)pc->data;
858:   red->ignore_kspcomputeoperators = v;
859:   return(0);
860: }

862: static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc,DM *dm)
863: {
864:   PC_Telescope red = (PC_Telescope)pc->data;
866:   *dm = private_PCTelescopeGetSubDM(red);
867:   return(0);
868: }

870: /*@
871:  PCTelescopeGetKSP - Gets the KSP created by the telescoping PC.

873:  Not Collective

875:  Input Parameter:
876: .  pc - the preconditioner context

878:  Output Parameter:
879: .  subksp - the KSP defined the smaller set of processes

881:  Level: advanced

883: @*/
884: PetscErrorCode PCTelescopeGetKSP(PC pc,KSP *subksp)
885: {
888:   PetscUseMethod(pc,"PCTelescopeGetKSP_C",(PC,KSP*),(pc,subksp));
889:   return(0);
890: }

892: /*@
893:  PCTelescopeGetReductionFactor - Gets the factor by which the original number of processes has been reduced by.

895:  Not Collective

897:  Input Parameter:
898: .  pc - the preconditioner context

900:  Output Parameter:
901: .  fact - the reduction factor

903:  Level: advanced

905: @*/
906: PetscErrorCode PCTelescopeGetReductionFactor(PC pc,PetscInt *fact)
907: {
910:   PetscUseMethod(pc,"PCTelescopeGetReductionFactor_C",(PC,PetscInt*),(pc,fact));
911:   return(0);
912: }

914: /*@
915:  PCTelescopeSetReductionFactor - Sets the factor by which the original number of processes has been reduced by.

917:  Not Collective

919:  Input Parameter:
920: .  pc - the preconditioner context

922:  Output Parameter:
923: .  fact - the reduction factor

925:  Level: advanced

927: @*/
928: PetscErrorCode PCTelescopeSetReductionFactor(PC pc,PetscInt fact)
929: {
932:   PetscTryMethod(pc,"PCTelescopeSetReductionFactor_C",(PC,PetscInt),(pc,fact));
933:   return(0);
934: }

936: /*@
937:  PCTelescopeGetIgnoreDM - Get the flag indicating if any DM attached to the PC will be used.

939:  Not Collective

941:  Input Parameter:
942: .  pc - the preconditioner context

944:  Output Parameter:
945: .  v - the flag

947:  Level: advanced

949: @*/
950: PetscErrorCode PCTelescopeGetIgnoreDM(PC pc,PetscBool *v)
951: {
954:   PetscUseMethod(pc,"PCTelescopeGetIgnoreDM_C",(PC,PetscBool*),(pc,v));
955:   return(0);
956: }

958: /*@
959:  PCTelescopeSetIgnoreDM - Set a flag to ignore any DM attached to the PC.

961:  Not Collective

963:  Input Parameter:
964: .  pc - the preconditioner context

966:  Output Parameter:
967: .  v - Use PETSC_TRUE to ignore any DM

969:  Level: advanced

971: @*/
972: PetscErrorCode PCTelescopeSetIgnoreDM(PC pc,PetscBool v)
973: {
976:   PetscTryMethod(pc,"PCTelescopeSetIgnoreDM_C",(PC,PetscBool),(pc,v));
977:   return(0);
978: }

980: /*@
981:  PCTelescopeGetUseCoarseDM - Get the flag indicating if the coarse DM attached to DM associated with the PC will be used.

983:  Not Collective

985:  Input Parameter:
986: .  pc - the preconditioner context

988:  Output Parameter:
989: .  v - the flag

991:  Level: advanced

993: @*/
994: PetscErrorCode PCTelescopeGetUseCoarseDM(PC pc,PetscBool *v)
995: {
998:   PetscUseMethod(pc,"PCTelescopeGetUseCoarseDM_C",(PC,PetscBool*),(pc,v));
999:   return(0);
1000: }

1002: /*@
1003:  PCTelescopeSetUseCoarseDM - Set a flag to query the DM attached to the PC if it also has a coarse DM

1005:  Not Collective

1007:  Input Parameter:
1008: .  pc - the preconditioner context

1010:  Output Parameter:
1011: .  v - Use PETSC_FALSE to ignore any coarse DM

1013:  Notes:
1014:  When you have specified to use a coarse DM, the communicator used to create the sub-KSP within PCTelescope
1015:  will be that of the coarse DM. Hence the flags -pc_telescope_reduction_factor and
1016:  -pc_telescope_subcomm_type will no longer have any meaning.
1017:  It is required that the communicator associated with the parent (fine) and the coarse DM are of different sizes.
1018:  An error will occur of the size of the communicator associated with the coarse DM
1019:  is the same as that of the parent DM.
1020:  Furthermore, it is required that the communicator on the coarse DM is a sub-communicator of the parent.
1021:  This will be checked at the time the preconditioner is setup and an error will occur if
1022:  the coarse DM does not define a sub-communicator of that used by the parent DM.

1024:  The particular Telescope setup invoked when using a coarse DM is agnostic with respect to the type of
1025:  the DM used (e.g. it supports DMSHELL, DMPLEX, etc).

1027:  Support is currently only provided for the case when you are using KSPSetComputeOperators()

1029:  The user is required to compose a function with the parent DM to facilitate the transfer of fields (Vec) between the different decompositions defined by the fine and coarse DMs.
1030:  In the user code, this is achieved via
1031: .vb
1032:    {
1033:      DM dm_fine;
1034:      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeFieldScatter",your_field_scatter_method);
1035:    }
1036: .ve
1037:  The signature of the user provided field scatter method is
1038: .vb
1039:    PetscErrorCode your_field_scatter_method(DM dm_fine,Vec x_fine,ScatterMode mode,DM dm_coarse,Vec x_coarse);
1040: .ve
1041:  The user must provide support for both mode = SCATTER_FORWARD and mode = SCATTER_REVERSE.
1042:  SCATTER_FORWARD implies the direction of transfer is from the parent (fine) DM to the coarse DM.

1044:  Optionally, the user may also compose a function with the parent DM to facilitate the transfer
1045:  of state variables between the fine and coarse DMs.
1046:  In the context of a finite element discretization, an example state variable might be
1047:  values associated with quadrature points within each element.
1048:  A user provided state scatter method is composed via
1049: .vb
1050:    {
1051:      DM dm_fine;
1052:      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeStateScatter",your_state_scatter_method);
1053:    }
1054: .ve
1055:  The signature of the user provided state scatter method is
1056: .vb
1057:    PetscErrorCode your_state_scatter_method(DM dm_fine,ScatterMode mode,DM dm_coarse);
1058: .ve
1059:  SCATTER_FORWARD implies the direction of transfer is from the fine DM to the coarse DM.
1060:  The user is only required to support mode = SCATTER_FORWARD.
1061:  No assumption is made about the data type of the state variables.
1062:  These must be managed by the user and must be accessible from the DM.

1064:  Care must be taken in defining the user context passed to KSPSetComputeOperators() which is to be
1065:  associated with the sub-KSP residing within PCTelescope.
1066:  In general, PCTelescope assumes that the context on the fine and coarse DM used with
1067:  KSPSetComputeOperators() should be "similar" in type or origin.
1068:  Specifically the following rules are used to infer what context on the sub-KSP should be.

1070:  First the contexts from the KSP and the fine and coarse DMs are retrieved.
1071:  Note that the special case of a DMSHELL context is queried.

1073: .vb
1074:    DMKSPGetComputeOperators(dm_fine,&dmfine_kspfunc,&dmfine_kspctx);
1075:    DMGetApplicationContext(dm_fine,&dmfine_appctx);
1076:    DMShellGetContext(dm_fine,&dmfine_shellctx);

1078:    DMGetApplicationContext(dm_coarse,&dmcoarse_appctx);
1079:    DMShellGetContext(dm_coarse,&dmcoarse_shellctx);
1080: .ve

1082:  The following rules are then enforced:

1084:  1. If dmfine_kspctx = NULL, then we provide a NULL pointer as the context for the sub-KSP:
1085:  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,NULL);

1087:  2. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_appctx,
1088:  check that dmcoarse_appctx is also non-NULL. If this is true, then:
1089:  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_appctx);

1091:  3. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_shellctx,
1092:  check that dmcoarse_shellctx is also non-NULL. If this is true, then:
1093:  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_shellctx);

1095:  If neither of the above three tests passed, then PCTelescope cannot safely determine what
1096:  context should be provided to KSPSetComputeOperators() for use with the sub-KSP.
1097:  In this case, an additional mechanism is provided via a composed function which will return
1098:  the actual context to be used. To use this feature you must compose the "getter" function
1099:  with the coarse DM, e.g.
1100: .vb
1101:    {
1102:      DM dm_coarse;
1103:      PetscObjectCompose((PetscObject)dm_coarse,"PCTelescopeGetCoarseDMKSPContext",your_coarse_context_getter);
1104:    }
1105: .ve
1106:  The signature of the user provided method is
1107: .vb
1108:    PetscErrorCode your_coarse_context_getter(DM dm_coarse,void **your_kspcontext);
1109: .ve

1111:  Level: advanced

1113: @*/
1114: PetscErrorCode PCTelescopeSetUseCoarseDM(PC pc,PetscBool v)
1115: {
1118:   PetscTryMethod(pc,"PCTelescopeSetUseCoarseDM_C",(PC,PetscBool),(pc,v));
1119:   return(0);
1120: }

1122: /*@
1123:  PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if KSPComputeOperators will be used.

1125:  Not Collective

1127:  Input Parameter:
1128: .  pc - the preconditioner context

1130:  Output Parameter:
1131: .  v - the flag

1133:  Level: advanced

1135: @*/
1136: PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc,PetscBool *v)
1137: {
1140:   PetscUseMethod(pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",(PC,PetscBool*),(pc,v));
1141:   return(0);
1142: }

1144: /*@
1145:  PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to ignore KSPComputeOperators.

1147:  Not Collective

1149:  Input Parameter:
1150: .  pc - the preconditioner context

1152:  Output Parameter:
1153: .  v - Use PETSC_TRUE to ignore the method (if defined) set via KSPSetComputeOperators on pc

1155:  Level: advanced

1157: @*/
1158: PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc,PetscBool v)
1159: {
1162:   PetscTryMethod(pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",(PC,PetscBool),(pc,v));
1163:   return(0);
1164: }

1166: /*@
1167:  PCTelescopeGetDM - Get the re-partitioned DM attached to the sub KSP.

1169:  Not Collective

1171:  Input Parameter:
1172: .  pc - the preconditioner context

1174:  Output Parameter:
1175: .  subdm - The re-partitioned DM

1177:  Level: advanced

1179: @*/
1180: PetscErrorCode PCTelescopeGetDM(PC pc,DM *subdm)
1181: {
1184:   PetscUseMethod(pc,"PCTelescopeGetDM_C",(PC,DM*),(pc,subdm));
1185:   return(0);
1186: }

1188: /*@
1189:  PCTelescopeSetSubcommType - set subcommunicator type (interlaced or contiguous)

1191:  Logically Collective

1193:  Input Parameter:
1194: +  pc - the preconditioner context
1195: -  subcommtype - the subcommunicator type (see PetscSubcommType)

1197:  Level: advanced

1199: .seealso: PetscSubcommType, PetscSubcomm, PCTELESCOPE
1200: @*/
1201: PetscErrorCode PCTelescopeSetSubcommType(PC pc, PetscSubcommType subcommtype)
1202: {
1205:   PetscTryMethod(pc,"PCTelescopeSetSubcommType_C",(PC,PetscSubcommType),(pc,subcommtype));
1206:   return(0);
1207: }

1209: /*@
1210:  PCTelescopeGetSubcommType - Get the subcommunicator type (interlaced or contiguous)

1212:  Not Collective

1214:  Input Parameter:
1215: .  pc - the preconditioner context

1217:  Output Parameter:
1218: .  subcommtype - the subcommunicator type (see PetscSubcommType)

1220:  Level: advanced

1222: .seealso: PetscSubcomm, PetscSubcommType, PCTELESCOPE
1223: @*/
1224: PetscErrorCode PCTelescopeGetSubcommType(PC pc, PetscSubcommType *subcommtype)
1225: {
1228:   PetscUseMethod(pc,"PCTelescopeGetSubcommType_C",(PC,PetscSubcommType*),(pc,subcommtype));
1229:   return(0);
1230: }

1232: /* -------------------------------------------------------------------------------------*/
1233: /*MC
1234:    PCTELESCOPE - Runs a KSP solver on a sub-communicator. MPI ranks not in the sub-communicator are idle during the solve.

1236:    Options Database:
1237: +  -pc_telescope_reduction_factor <r> - factor to reduce the communicator size by. e.g. with 64 MPI ranks and r=4, the new sub-communicator will have 64/4 = 16 ranks.
1238: .  -pc_telescope_ignore_dm  - flag to indicate whether an attached DM should be ignored.
1239: .  -pc_telescope_subcomm_type <interlaced,contiguous> - defines the selection of MPI ranks on the sub-communicator. see PetscSubcomm for more information.
1240: .  -pc_telescope_ignore_kspcomputeoperators - flag to indicate whether KSPSetComputeOperators should be used on the sub-KSP.
1241: -  -pc_telescope_use_coarse_dm - flag to indicate whether the coarse DM should be used to define the sub-communicator.

1243:    Level: advanced

1245:    Notes:
1246:    Assuming that the parent preconditioner (PC) is defined on a communicator c, this implementation
1247:    creates a child sub-communicator (c') containing fewer MPI ranks than the original parent preconditioner (PC).
1248:    The preconditioner is deemed telescopic as it only calls KSPSolve() on a single
1249:    sub-communicator, in contrast with PCREDUNDANT which calls KSPSolve() on N sub-communicators.
1250:    This means there will be MPI ranks which will be idle during the application of this preconditioner.
1251:    Additionally, in comparison with PCREDUNDANT, PCTELESCOPE can utilize an attached DM.

1253:    The default type of the sub KSP (the KSP defined on c') is PREONLY.

1255:    There are three setup mechanisms for PCTelescope. Features support by each type are described below.
1256:    In the following, we will refer to the operators B and B', these are the Bmat provided to the KSP on the
1257:    communicators c and c' respectively.

1259:    [1] Default setup
1260:    The sub-communicator c' is created via PetscSubcommCreate().
1261:    Explicitly defined nullspace and near nullspace vectors will be propogated from B to B'.
1262:    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1263:    No support is provided for KSPSetComputeOperators().
1264:    Currently there is no support for the flag -pc_use_amat.

1266:    [2] DM aware setup
1267:    If a DM is attached to the PC, it is re-partitioned on the sub-communicator c'.
1268:    c' is created via PetscSubcommCreate().
1269:    Both the Bmat operator and the right hand side vector are permuted into the new DOF ordering defined by the re-partitioned DM.
1270:    Currently only support for re-partitioning a DMDA is provided.
1271:    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, re-partitioned and set on the re-partitioned Bmat operator (B').
1272:    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1273:    Support is provided for KSPSetComputeOperators(). The user provided function and context is propagated to the sub KSP.
1274:    This is fragile since the user must ensure that their user context is valid for use on c'.
1275:    Currently there is no support for the flag -pc_use_amat.

1277:    [3] Coarse DM setup
1278:    If a DM (dmfine) is attached to the PC, dmfine is queried for a "coarse" DM (call this dmcoarse) via DMGetCoarseDM().
1279:    PCTELESCOPE will interpret the coarse DM as being defined on a sub-communicator of c.
1280:    The communicator associated with dmcoarse will define the c' to be used within PCTELESCOPE.
1281:    PCTELESCOPE will check that c' is in fact a sub-communicator of c. If it is not, an error will be reported.
1282:    The intention of this setup type is that PCTELESCOPE will use an existing (e.g. user defined) communicator hierarchy, say as would be
1283:    available with using multi-grid on unstructured meshes.
1284:    This setup will not use the command line options -pc_telescope_reduction_factor or -pc_telescope_subcomm_type.
1285:    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, scattered into the correct ordering consistent with dmcoarse and set on B'.
1286:    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1287:    There is no general method to permute field orderings, hence only KSPSetComputeOperators() is supported.
1288:    The user must use PetscObjectComposeFunction() with dmfine to define the method to scatter fields from dmfine to dmcoarse.
1289:    Propogation of the user context for KSPSetComputeOperators() on the sub KSP is attempted by querying the DM contexts associated with dmfine and dmcoarse. Alternatively, the user may use PetscObjectComposeFunction() with dmcoarse to define a method which will return the appropriate user context for KSPSetComputeOperators().
1290:    Currently there is no support for the flag -pc_use_amat.
1291:    This setup can be invoked by the option -pc_telescope_use_coarse_dm or by calling PCTelescopeSetUseCoarseDM(pc,PETSC_TRUE);
1292:    Further information about the user-provided methods required by this setup type are described here PCTelescopeSetUseCoarseDM().

1294:    Developer Notes:
1295:    During PCSetup, the B operator is scattered onto c'.
1296:    Within PCApply, the RHS vector (x) is scattered into a redundant vector, xred (defined on c').
1297:    Then, KSPSolve() is executed on the c' communicator.

1299:    The communicator used within the telescoping preconditioner is defined by a PetscSubcomm using the INTERLACED 
1300:    creation routine by default (this can be changed with -pc_telescope_subcomm_type). We run the sub KSP on only the ranks within the communicator which have a color equal to zero.

1302:    The telescoping preconditioner is aware of nullspaces and near nullspaces which are attached to the B operator.
1303:    In the case where B has a (near) nullspace attached, the (near) nullspace vectors are extracted from B and mapped into
1304:    a new (near) nullspace, defined on the sub-communicator, which is attached to B' (the B operator which was scattered to c')

1306:    The telescoping preconditioner can re-partition an attached DM if it is a DMDA (2D or 3D -
1307:    support for 1D DMDAs is not provided). If a DMDA is found, a topologically equivalent DMDA is created on c'
1308:    and this new DM is attached the sub KSP. The design of telescope is such that it should be possible to extend support
1309:    for re-partitioning other to DM's (e.g. DMPLEX). The user can supply a flag to ignore attached DMs.
1310:    Alternatively, user-provided re-partitioned DMs can be used via -pc_telescope_use_coarse_dm.

1312:    With the default setup mode, B' is defined by fusing rows (in order) associated with MPI ranks common to c and c'.

1314:    When a DMDA is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permutation of B
1315:    into the ordering defined by the DMDA on c', (ii) extracting the local chunks via MatCreateSubMatrices(), (iii) fusing the
1316:    locally (sequential) matrices defined on the ranks common to c and c' into B' using MatCreateMPIMatConcatenateSeqMat()

1318:    Limitations/improvements include the following.
1319:    VecPlaceArray() could be used within PCApply() to improve efficiency and reduce memory usage.
1320:    A unified mechanism to query for user contexts as required by KSPSetComputeOperators() and MatNullSpaceSetFunction().

1322:    The symmetric permutation used when a DMDA is encountered is performed via explicitly assmbleming a permutation matrix P,
1323:    and performing P^T.A.P. Possibly it might be more efficient to use MatPermute(). We opted to use P^T.A.P as it appears
1324:    VecPermute() does not supported for the use case required here. By computing P, one can permute both the operator and RHS in a 
1325:    consistent manner.

1327:    Mapping of vectors (default setup mode) is performed in the following way.
1328:    Suppose the parent communicator size was 4, and we set a reduction factor of 2; this would give a comm size on c' of 2.
1329:    Using the interlaced creation routine, the ranks in c with color = 0 will be rank 0 and 2.
1330:    We perform the scatter to the sub-communicator in the following way.
1331:    [1] Given a vector x defined on communicator c

1333: .vb
1334:    rank(c)  local values of x
1335:    ------- ----------------------------------------
1336:         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0 ]
1337:         1   [  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1338:         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0 ]
1339:         3   [ 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1340: .ve

1342:    scatter into xtmp defined also on comm c, so that we have the following values

1344: .vb
1345:    rank(c)  local values of xtmp
1346:    ------- ----------------------------------------------------------------------------
1347:         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1348:         1   [ ]
1349:         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1350:         3   [ ]
1351: .ve

1353:    The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values


1356:    [2] Copy the values from ranks 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'.
1357:    Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0.

1359: .vb
1360:    rank(c')  local values of xred
1361:    -------- ----------------------------------------------------------------------------
1362:          0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1363:          1   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1364: .ve

1366:   Contributed by Dave May

1368:   Reference:
1369:   Dave A. May, Patrick Sanan, Karl Rupp, Matthew G. Knepley, and Barry F. Smith, "Extreme-Scale Multigrid Components within PETSc". 2016. In Proceedings of the Platform for Advanced Scientific Computing Conference (PASC '16). DOI: 10.1145/2929908.2929913

1371: .seealso:  PCTelescopeGetKSP(), PCTelescopeGetDM(), PCTelescopeGetReductionFactor(), PCTelescopeSetReductionFactor(), PCTelescopeGetIgnoreDM(), PCTelescopeSetIgnoreDM(), PCREDUNDANT
1372: M*/
1373: PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc)
1374: {
1375:   PetscErrorCode       ierr;
1376:   struct _PC_Telescope *sred;

1379:   PetscNewLog(pc,&sred);
1380:   sred->psubcomm       = NULL;
1381:   sred->subcommtype    = PETSC_SUBCOMM_INTERLACED;
1382:   sred->subcomm        = MPI_COMM_NULL;
1383:   sred->redfactor      = 1;
1384:   sred->ignore_dm      = PETSC_FALSE;
1385:   sred->ignore_kspcomputeoperators = PETSC_FALSE;
1386:   sred->use_coarse_dm  = PETSC_FALSE;
1387:   pc->data             = (void*)sred;

1389:   pc->ops->apply           = PCApply_Telescope;
1390:   pc->ops->applytranspose  = NULL;
1391:   pc->ops->applyrichardson = PCApplyRichardson_Telescope;
1392:   pc->ops->setup           = PCSetUp_Telescope;
1393:   pc->ops->destroy         = PCDestroy_Telescope;
1394:   pc->ops->reset           = PCReset_Telescope;
1395:   pc->ops->setfromoptions  = PCSetFromOptions_Telescope;
1396:   pc->ops->view            = PCView_Telescope;

1398:   sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
1399:   sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
1400:   sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
1401:   sred->pctelescope_reset_type              = NULL;

1403:   PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetKSP_C",PCTelescopeGetKSP_Telescope);
1404:   PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetSubcommType_C",PCTelescopeGetSubcommType_Telescope);
1405:   PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetSubcommType_C",PCTelescopeSetSubcommType_Telescope);
1406:   PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetReductionFactor_C",PCTelescopeGetReductionFactor_Telescope);
1407:   PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetReductionFactor_C",PCTelescopeSetReductionFactor_Telescope);
1408:   PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreDM_C",PCTelescopeGetIgnoreDM_Telescope);
1409:   PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreDM_C",PCTelescopeSetIgnoreDM_Telescope);
1410:   PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",PCTelescopeGetIgnoreKSPComputeOperators_Telescope);
1411:   PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",PCTelescopeSetIgnoreKSPComputeOperators_Telescope);
1412:   PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetDM_C",PCTelescopeGetDM_Telescope);
1413:   PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetUseCoarseDM_C",PCTelescopeGetUseCoarseDM_Telescope);
1414:   PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetUseCoarseDM_C",PCTelescopeSetUseCoarseDM_Telescope);
1415:   return(0);
1416: }