Actual source code: telescope.c
petsc-3.7.7 2017-09-25
4: #include <petsc/private/matimpl.h>
5: #include <petsc/private/pcimpl.h>
6: #include <petscksp.h> /*I "petscksp.h" I*/
7: #include <petscdm.h> /*I "petscdm.h" I*/
9: #include ../src/ksp/pc/impls/telescope/telescope.h
11: /*
12: PCTelescopeSetUp_default()
13: PCTelescopeMatCreate_default()
15: default
17: // scatter in
18: x(comm) -> xtmp(comm)
20: xred(subcomm) <- xtmp
21: yred(subcomm)
23: yred(subcomm) --> xtmp
25: // scatter out
26: xtmp(comm) -> y(comm)
27: */
29: PetscBool isActiveRank(PetscSubcomm scomm)
30: {
31: if (scomm->color == 0) { return PETSC_TRUE; }
32: else { return PETSC_FALSE; }
33: }
37: DM private_PCTelescopeGetSubDM(PC_Telescope sred)
38: {
39: DM subdm = NULL;
41: if (!isActiveRank(sred->psubcomm)) { subdm = NULL; }
42: else {
43: switch (sred->sr_type) {
44: case TELESCOPE_DEFAULT: subdm = NULL;
45: break;
46: case TELESCOPE_DMDA: subdm = ((PC_Telescope_DMDACtx*)sred->dm_ctx)->dmrepart;
47: break;
48: case TELESCOPE_DMPLEX: subdm = NULL;
49: break;
50: }
51: }
52: return(subdm);
53: }
57: PetscErrorCode PCTelescopeSetUp_default(PC pc,PC_Telescope sred)
58: {
60: PetscInt m,M,bs,st,ed;
61: Vec x,xred,yred,xtmp;
62: Mat B;
63: MPI_Comm comm,subcomm;
64: VecScatter scatter;
65: IS isin;
68: PetscInfo(pc,"PCTelescope: setup (default)\n");
69: comm = PetscSubcommParent(sred->psubcomm);
70: subcomm = PetscSubcommChild(sred->psubcomm);
72: PCGetOperators(pc,NULL,&B);
73: MatGetSize(B,&M,NULL);
74: MatGetBlockSize(B,&bs);
75: MatCreateVecs(B,&x,NULL);
77: xred = NULL;
78: m = 0;
79: if (isActiveRank(sred->psubcomm)) {
80: VecCreate(subcomm,&xred);
81: VecSetSizes(xred,PETSC_DECIDE,M);
82: VecSetBlockSize(xred,bs);
83: VecSetFromOptions(xred);
84: VecGetLocalSize(xred,&m);
85: }
87: yred = NULL;
88: if (isActiveRank(sred->psubcomm)) {
89: VecDuplicate(xred,&yred);
90: }
92: VecCreate(comm,&xtmp);
93: VecSetSizes(xtmp,m,PETSC_DECIDE);
94: VecSetBlockSize(xtmp,bs);
95: VecSetType(xtmp,((PetscObject)x)->type_name);
97: if (isActiveRank(sred->psubcomm)) {
98: VecGetOwnershipRange(xred,&st,&ed);
99: ISCreateStride(comm,(ed-st),st,1,&isin);
100: } else {
101: VecGetOwnershipRange(x,&st,&ed);
102: ISCreateStride(comm,0,st,1,&isin);
103: }
104: ISSetBlockSize(isin,bs);
106: VecScatterCreate(x,isin,xtmp,NULL,&scatter);
108: sred->isin = isin;
109: sred->scatter = scatter;
110: sred->xred = xred;
111: sred->yred = yred;
112: sred->xtmp = xtmp;
113: VecDestroy(&x);
114: return(0);
115: }
119: PetscErrorCode PCTelescopeMatCreate_default(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A)
120: {
122: MPI_Comm comm,subcomm;
123: Mat Bred,B;
124: PetscInt nr,nc;
125: IS isrow,iscol;
126: Mat Blocal,*_Blocal;
129: PetscInfo(pc,"PCTelescope: updating the redundant preconditioned operator (default)\n");
130: PetscObjectGetComm((PetscObject)pc,&comm);
131: subcomm = PetscSubcommChild(sred->psubcomm);
132: PCGetOperators(pc,NULL,&B);
133: MatGetSize(B,&nr,&nc);
134: isrow = sred->isin;
135: ISCreateStride(comm,nc,0,1,&iscol);
136: MatGetSubMatrices(B,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&_Blocal);
137: Blocal = *_Blocal;
138: PetscFree(_Blocal);
139: Bred = NULL;
140: if (isActiveRank(sred->psubcomm)) {
141: PetscInt mm;
143: if (reuse != MAT_INITIAL_MATRIX) { Bred = *A; }
145: MatGetSize(Blocal,&mm,NULL);
146: MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,mm,reuse,&Bred);
147: }
148: *A = Bred;
149: ISDestroy(&iscol);
150: MatDestroy(&Blocal);
151: return(0);
152: }
156: PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc,PC_Telescope sred,Mat sub_mat)
157: {
158: PetscErrorCode ierr;
159: MatNullSpace nullspace,sub_nullspace;
160: Mat A,B;
161: PetscBool has_const;
162: PetscInt i,k,n = 0;
163: const Vec *vecs;
164: Vec *sub_vecs = NULL;
165: MPI_Comm subcomm;
168: PCGetOperators(pc,&A,&B);
169: MatGetNullSpace(B,&nullspace);
170: if (!nullspace) return(0);
172: PetscInfo(pc,"PCTelescope: generating nullspace (default)\n");
173: subcomm = PetscSubcommChild(sred->psubcomm);
174: MatNullSpaceGetVecs(nullspace,&has_const,&n,&vecs);
176: if (isActiveRank(sred->psubcomm)) {
177: if (n) {
178: VecDuplicateVecs(sred->xred,n,&sub_vecs);
179: }
180: }
182: /* copy entries */
183: for (k=0; k<n; k++) {
184: const PetscScalar *x_array;
185: PetscScalar *LA_sub_vec;
186: PetscInt st,ed;
188: /* pull in vector x->xtmp */
189: VecScatterBegin(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);
190: VecScatterEnd(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);
191: if (sub_vecs) {
192: /* copy vector entires into xred */
193: VecGetArrayRead(sred->xtmp,&x_array);
194: if (sub_vecs[k]) {
195: VecGetOwnershipRange(sub_vecs[k],&st,&ed);
196: VecGetArray(sub_vecs[k],&LA_sub_vec);
197: for (i=0; i<ed-st; i++) {
198: LA_sub_vec[i] = x_array[i];
199: }
200: VecRestoreArray(sub_vecs[k],&LA_sub_vec);
201: }
202: VecRestoreArrayRead(sred->xtmp,&x_array);
203: }
204: }
206: if (isActiveRank(sred->psubcomm)) {
207: /* create new nullspace for redundant object */
208: MatNullSpaceCreate(subcomm,has_const,n,sub_vecs,&sub_nullspace);
209: sub_nullspace->remove = nullspace->remove;
210: sub_nullspace->rmctx = nullspace->rmctx;
212: /* attach redundant nullspace to Bred */
213: MatSetNullSpace(sub_mat,sub_nullspace);
214: VecDestroyVecs(n,&sub_vecs);
215: }
216: return(0);
217: }
221: static PetscErrorCode PCView_Telescope(PC pc,PetscViewer viewer)
222: {
223: PC_Telescope sred = (PC_Telescope)pc->data;
224: PetscErrorCode ierr;
225: PetscBool iascii,isstring;
226: PetscViewer subviewer;
229: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
230: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
231: if (iascii) {
232: if (!sred->psubcomm) {
233: PetscViewerASCIIPrintf(viewer," Telescope: preconditioner not yet setup\n");
234: } else {
235: MPI_Comm comm,subcomm;
236: PetscMPIInt comm_size,subcomm_size;
237: DM dm,subdm;
239: PCGetDM(pc,&dm);
240: subdm = private_PCTelescopeGetSubDM(sred);
241: comm = PetscSubcommParent(sred->psubcomm);
242: subcomm = PetscSubcommChild(sred->psubcomm);
243: MPI_Comm_size(comm,&comm_size);
244: MPI_Comm_size(subcomm,&subcomm_size);
246: PetscViewerASCIIPrintf(viewer," Telescope: parent comm size reduction factor = %D\n",sred->redfactor);
247: PetscViewerASCIIPrintf(viewer," Telescope: comm_size = %d , subcomm_size = %d\n",(int)comm_size,(int)subcomm_size);
248: PetscViewerGetSubViewer(viewer,subcomm,&subviewer);
249: if (isActiveRank(sred->psubcomm)) {
250: PetscViewerASCIIPushTab(subviewer);
252: if (dm && sred->ignore_dm) {
253: PetscViewerASCIIPrintf(subviewer," Telescope: ignoring DM\n");
254: }
255: if (sred->ignore_kspcomputeoperators) {
256: PetscViewerASCIIPrintf(subviewer," Telescope: ignoring KSPComputeOperators\n");
257: }
258: switch (sred->sr_type) {
259: case TELESCOPE_DEFAULT:
260: PetscViewerASCIIPrintf(subviewer," Telescope: using default setup\n");
261: break;
262: case TELESCOPE_DMDA:
263: PetscViewerASCIIPrintf(subviewer," Telescope: DMDA detected\n");
264: DMView_DMDAShort(subdm,subviewer);
265: break;
266: case TELESCOPE_DMPLEX:
267: PetscViewerASCIIPrintf(subviewer," Telescope: DMPLEX detected\n");
268: break;
269: }
271: KSPView(sred->ksp,subviewer);
272: PetscViewerASCIIPopTab(subviewer);
273: }
274: PetscViewerRestoreSubViewer(viewer,subcomm,&subviewer);
275: }
276: }
277: return(0);
278: }
282: static PetscErrorCode PCSetUp_Telescope(PC pc)
283: {
284: PC_Telescope sred = (PC_Telescope)pc->data;
285: PetscErrorCode ierr;
286: MPI_Comm comm,subcomm=0;
287: PCTelescopeType sr_type;
290: PetscObjectGetComm((PetscObject)pc,&comm);
292: /* subcomm definition */
293: if (!pc->setupcalled) {
294: if (!sred->psubcomm) {
295: PetscSubcommCreate(comm,&sred->psubcomm);
296: PetscSubcommSetNumber(sred->psubcomm,sred->redfactor);
297: PetscSubcommSetType(sred->psubcomm,PETSC_SUBCOMM_INTERLACED);
298: /* disable runtime switch of psubcomm type, e.g., '-psubcomm_type interlaced */
299: /* PetscSubcommSetFromOptions(sred->psubcomm); */
300: PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));
301: /* create a new PC that processors in each subcomm have copy of */
302: }
303: }
304: subcomm = PetscSubcommChild(sred->psubcomm);
306: /* internal KSP */
307: if (!pc->setupcalled) {
308: const char *prefix;
310: if (isActiveRank(sred->psubcomm)) {
311: KSPCreate(subcomm,&sred->ksp);
312: KSPSetErrorIfNotConverged(sred->ksp,pc->erroriffailure);
313: PetscObjectIncrementTabLevel((PetscObject)sred->ksp,(PetscObject)pc,1);
314: PetscLogObjectParent((PetscObject)pc,(PetscObject)sred->ksp);
315: KSPSetType(sred->ksp,KSPPREONLY);
316: PCGetOptionsPrefix(pc,&prefix);
317: KSPSetOptionsPrefix(sred->ksp,prefix);
318: KSPAppendOptionsPrefix(sred->ksp,"telescope_");
319: }
320: }
321: /* Determine type of setup/update */
322: if (!pc->setupcalled) {
323: PetscBool has_dm,same;
324: DM dm;
326: sr_type = TELESCOPE_DEFAULT;
327: has_dm = PETSC_FALSE;
328: PCGetDM(pc,&dm);
329: if (dm) { has_dm = PETSC_TRUE; }
330: if (has_dm) {
331: /* check for dmda */
332: PetscObjectTypeCompare((PetscObject)dm,DMDA,&same);
333: if (same) {
334: PetscInfo(pc,"PCTelescope: found DMDA\n");
335: sr_type = TELESCOPE_DMDA;
336: }
337: /* check for dmplex */
338: PetscObjectTypeCompare((PetscObject)dm,DMPLEX,&same);
339: if (same) {
340: PetscInfo(pc,"PCTelescope: found DMPLEX\n");
341: sr_type = TELESCOPE_DMPLEX;
342: }
343: }
345: if (sred->ignore_dm) {
346: PetscInfo(pc,"PCTelescope: ignore DM\n");
347: sr_type = TELESCOPE_DEFAULT;
348: }
349: sred->sr_type = sr_type;
350: } else {
351: sr_type = sred->sr_type;
352: }
354: /* set function pointers for repartition setup, matrix creation/update, matrix nullspace and reset functionality */
355: switch (sr_type) {
356: case TELESCOPE_DEFAULT:
357: sred->pctelescope_setup_type = PCTelescopeSetUp_default;
358: sred->pctelescope_matcreate_type = PCTelescopeMatCreate_default;
359: sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
360: sred->pctelescope_reset_type = NULL;
361: break;
362: case TELESCOPE_DMDA:
363: pc->ops->apply = PCApply_Telescope_dmda;
364: pc->ops->applyrichardson = PCApplyRichardson_Telescope_dmda;
365: sred->pctelescope_setup_type = PCTelescopeSetUp_dmda;
366: sred->pctelescope_matcreate_type = PCTelescopeMatCreate_dmda;
367: sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda;
368: sred->pctelescope_reset_type = PCReset_Telescope_dmda;
369: break;
370: case TELESCOPE_DMPLEX: SETERRQ(comm,PETSC_ERR_SUP,"Supprt for DMPLEX is currently not available");
371: break;
372: default: SETERRQ(comm,PETSC_ERR_SUP,"Only supprt for repartitioning DMDA is provided");
373: break;
374: }
376: /* setup */
377: if (sred->pctelescope_setup_type) {
378: sred->pctelescope_setup_type(pc,sred);
379: }
380: /* update */
381: if (!pc->setupcalled) {
382: if (sred->pctelescope_matcreate_type) {
383: sred->pctelescope_matcreate_type(pc,sred,MAT_INITIAL_MATRIX,&sred->Bred);
384: }
385: if (sred->pctelescope_matnullspacecreate_type) {
386: sred->pctelescope_matnullspacecreate_type(pc,sred,sred->Bred);
387: }
388: } else {
389: if (sred->pctelescope_matcreate_type) {
390: sred->pctelescope_matcreate_type(pc,sred,MAT_REUSE_MATRIX,&sred->Bred);
391: }
392: }
394: /* common - no construction */
395: if (isActiveRank(sred->psubcomm)) {
396: KSPSetOperators(sred->ksp,sred->Bred,sred->Bred);
397: if (pc->setfromoptionscalled && !pc->setupcalled){
398: KSPSetFromOptions(sred->ksp);
399: }
400: }
401: return(0);
402: }
406: static PetscErrorCode PCApply_Telescope(PC pc,Vec x,Vec y)
407: {
408: PC_Telescope sred = (PC_Telescope)pc->data;
409: PetscErrorCode ierr;
410: Vec xtmp,xred,yred;
411: PetscInt i,st,ed;
412: VecScatter scatter;
413: PetscScalar *array;
414: const PetscScalar *x_array;
417: xtmp = sred->xtmp;
418: scatter = sred->scatter;
419: xred = sred->xred;
420: yred = sred->yred;
422: /* pull in vector x->xtmp */
423: VecScatterBegin(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);
424: VecScatterEnd(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);
426: /* copy vector entires into xred */
427: VecGetArrayRead(xtmp,&x_array);
428: if (xred) {
429: PetscScalar *LA_xred;
430: VecGetOwnershipRange(xred,&st,&ed);
431: VecGetArray(xred,&LA_xred);
432: for (i=0; i<ed-st; i++) {
433: LA_xred[i] = x_array[i];
434: }
435: VecRestoreArray(xred,&LA_xred);
436: }
437: VecRestoreArrayRead(xtmp,&x_array);
438: /* solve */
439: if (isActiveRank(sred->psubcomm)) {
440: KSPSolve(sred->ksp,xred,yred);
441: }
442: /* return vector */
443: VecGetArray(xtmp,&array);
444: if (yred) {
445: const PetscScalar *LA_yred;
446: VecGetOwnershipRange(yred,&st,&ed);
447: VecGetArrayRead(yred,&LA_yred);
448: for (i=0; i<ed-st; i++) {
449: array[i] = LA_yred[i];
450: }
451: VecRestoreArrayRead(yred,&LA_yred);
452: }
453: VecRestoreArray(xtmp,&array);
454: VecScatterBegin(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);
455: VecScatterEnd(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);
456: return(0);
457: }
461: static PetscErrorCode PCApplyRichardson_Telescope(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt *outits,PCRichardsonConvergedReason *reason)
462: {
463: PC_Telescope sred = (PC_Telescope)pc->data;
464: PetscErrorCode ierr;
465: Vec xtmp,yred;
466: PetscInt i,st,ed;
467: VecScatter scatter;
468: const PetscScalar *x_array;
469: PetscBool default_init_guess_value;
472: xtmp = sred->xtmp;
473: scatter = sred->scatter;
474: yred = sred->yred;
476: if (its > 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PCApplyRichardson_Telescope only supports max_it = 1");
477: *reason = (PCRichardsonConvergedReason)0;
479: if (!zeroguess) {
480: PetscInfo(pc,"PCTelescope: Scattering y for non-zero initial guess\n");
481: /* pull in vector y->xtmp */
482: VecScatterBegin(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);
483: VecScatterEnd(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);
485: /* copy vector entires into xred */
486: VecGetArrayRead(xtmp,&x_array);
487: if (yred) {
488: PetscScalar *LA_yred;
489: VecGetOwnershipRange(yred,&st,&ed);
490: VecGetArray(yred,&LA_yred);
491: for (i=0; i<ed-st; i++) {
492: LA_yred[i] = x_array[i];
493: }
494: VecRestoreArray(yred,&LA_yred);
495: }
496: VecRestoreArrayRead(xtmp,&x_array);
497: }
499: if (isActiveRank(sred->psubcomm)) {
500: KSPGetInitialGuessNonzero(sred->ksp,&default_init_guess_value);
501: if (!zeroguess) KSPSetInitialGuessNonzero(sred->ksp,PETSC_TRUE);
502: }
504: PCApply_Telescope(pc,x,y);
506: if (isActiveRank(sred->psubcomm)) {
507: KSPSetInitialGuessNonzero(sred->ksp,default_init_guess_value);
508: }
510: if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
511: *outits = 1;
512: return(0);
513: }
517: static PetscErrorCode PCReset_Telescope(PC pc)
518: {
519: PC_Telescope sred = (PC_Telescope)pc->data;
522: ISDestroy(&sred->isin);
523: VecScatterDestroy(&sred->scatter);
524: VecDestroy(&sred->xred);
525: VecDestroy(&sred->yred);
526: VecDestroy(&sred->xtmp);
527: MatDestroy(&sred->Bred);
528: KSPReset(sred->ksp);
529: if (sred->pctelescope_reset_type) {
530: sred->pctelescope_reset_type(pc);
531: }
532: return(0);
533: }
537: static PetscErrorCode PCDestroy_Telescope(PC pc)
538: {
539: PC_Telescope sred = (PC_Telescope)pc->data;
540: PetscErrorCode ierr;
543: PCReset_Telescope(pc);
544: KSPDestroy(&sred->ksp);
545: PetscSubcommDestroy(&sred->psubcomm);
546: PetscFree(sred->dm_ctx);
547: PetscFree(pc->data);
548: return(0);
549: }
553: static PetscErrorCode PCSetFromOptions_Telescope(PetscOptionItems *PetscOptionsObject,PC pc)
554: {
555: PC_Telescope sred = (PC_Telescope)pc->data;
556: PetscErrorCode ierr;
557: MPI_Comm comm;
558: PetscMPIInt size;
561: PetscObjectGetComm((PetscObject)pc,&comm);
562: MPI_Comm_size(comm,&size);
563: PetscOptionsHead(PetscOptionsObject,"Telescope options");
564: PetscOptionsInt("-pc_telescope_reduction_factor","Factor to reduce comm size by","PCTelescopeSetReductionFactor",sred->redfactor,&sred->redfactor,0);
565: if (sred->redfactor > size) SETERRQ(comm,PETSC_ERR_ARG_WRONG,"-pc_telescope_reduction_factor <= comm size");
566: PetscOptionsBool("-pc_telescope_ignore_dm","Ignore any DM attached to the PC","PCTelescopeSetIgnoreDM",sred->ignore_dm,&sred->ignore_dm,0);
567: PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators","Ignore method used to compute A","PCTelescopeSetIgnoreKSPComputeOperators",sred->ignore_kspcomputeoperators,&sred->ignore_kspcomputeoperators,0);
568: PetscOptionsTail();
569: return(0);
570: }
572: /* PC simplementation specific API's */
574: static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc,KSP *ksp)
575: {
576: PC_Telescope red = (PC_Telescope)pc->data;
578: if (ksp) *ksp = red->ksp;
579: return(0);
580: }
582: static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc,PetscInt *fact)
583: {
584: PC_Telescope red = (PC_Telescope)pc->data;
586: if (fact) *fact = red->redfactor;
587: return(0);
588: }
590: static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc,PetscInt fact)
591: {
592: PC_Telescope red = (PC_Telescope)pc->data;
593: PetscMPIInt size;
594: PetscErrorCode ierr;
597: MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
598: if (fact <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be positive",fact);
599: if (fact > size) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be <= comm.size",fact);
600: red->redfactor = fact;
601: return(0);
602: }
604: static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc,PetscBool *v)
605: {
606: PC_Telescope red = (PC_Telescope)pc->data;
608: if (v) *v = red->ignore_dm;
609: return(0);
610: }
611: static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc,PetscBool v)
612: {
613: PC_Telescope red = (PC_Telescope)pc->data;
615: red->ignore_dm = v;
616: return(0);
617: }
619: static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool *v)
620: {
621: PC_Telescope red = (PC_Telescope)pc->data;
623: if (v) *v = red->ignore_kspcomputeoperators;
624: return(0);
625: }
626: static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool v)
627: {
628: PC_Telescope red = (PC_Telescope)pc->data;
630: red->ignore_kspcomputeoperators = v;
631: return(0);
632: }
634: static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc,DM *dm)
635: {
636: PC_Telescope red = (PC_Telescope)pc->data;
638: *dm = private_PCTelescopeGetSubDM(red);
639: return(0);
640: }
642: /*@
643: PCTelescopeGetKSP - Gets the KSP created by the telescoping PC.
645: Not Collective
647: Input Parameter:
648: . pc - the preconditioner context
650: Output Parameter:
651: . subksp - the KSP defined the smaller set of processes
653: Level: advanced
655: .keywords: PC, telescopting solve
656: @*/
657: PetscErrorCode PCTelescopeGetKSP(PC pc,KSP *subksp)
658: {
661: PetscUseMethod(pc,"PCTelescopeGetKSP_C",(PC,KSP*),(pc,subksp));
662: return(0);
663: }
665: /*@
666: PCTelescopeGetReductionFactor - Gets the factor by which the original number of processes has been reduced by.
668: Not Collective
670: Input Parameter:
671: . pc - the preconditioner context
673: Output Parameter:
674: . fact - the reduction factor
676: Level: advanced
678: .keywords: PC, telescoping solve
679: @*/
680: PetscErrorCode PCTelescopeGetReductionFactor(PC pc,PetscInt *fact)
681: {
684: PetscUseMethod(pc,"PCTelescopeGetReductionFactor_C",(PC,PetscInt*),(pc,fact));
685: return(0);
686: }
688: /*@
689: PCTelescopeSetReductionFactor - Sets the factor by which the original number of processes has been reduced by.
691: Not Collective
693: Input Parameter:
694: . pc - the preconditioner context
696: Output Parameter:
697: . fact - the reduction factor
699: Level: advanced
701: .keywords: PC, telescoping solve
702: @*/
703: PetscErrorCode PCTelescopeSetReductionFactor(PC pc,PetscInt fact)
704: {
707: PetscTryMethod(pc,"PCTelescopeSetReductionFactor_C",(PC,PetscInt),(pc,fact));
708: return(0);
709: }
711: /*@
712: PCTelescopeGetIgnoreDM - Get the flag indicating if any DM attached to the PC will be used.
714: Not Collective
716: Input Parameter:
717: . pc - the preconditioner context
719: Output Parameter:
720: . v - the flag
722: Level: advanced
724: .keywords: PC, telescoping solve
725: @*/
726: PetscErrorCode PCTelescopeGetIgnoreDM(PC pc,PetscBool *v)
727: {
730: PetscUseMethod(pc,"PCTelescopeGetIgnoreDM_C",(PC,PetscBool*),(pc,v));
731: return(0);
732: }
734: /*@
735: PCTelescopeSetIgnoreDM - Set a flag to ignore any DM attached to the PC.
737: Not Collective
739: Input Parameter:
740: . pc - the preconditioner context
742: Output Parameter:
743: . v - Use PETSC_TRUE to ignore any DM
745: Level: advanced
747: .keywords: PC, telescoping solve
748: @*/
749: PetscErrorCode PCTelescopeSetIgnoreDM(PC pc,PetscBool v)
750: {
753: PetscTryMethod(pc,"PCTelescopeSetIgnoreDM_C",(PC,PetscBool),(pc,v));
754: return(0);
755: }
757: /*@
758: PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if KSPComputeOperators will be used.
760: Not Collective
762: Input Parameter:
763: . pc - the preconditioner context
765: Output Parameter:
766: . v - the flag
768: Level: advanced
770: .keywords: PC, telescoping solve
771: @*/
772: PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc,PetscBool *v)
773: {
776: PetscUseMethod(pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",(PC,PetscBool*),(pc,v));
777: return(0);
778: }
780: /*@
781: PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to ignore KSPComputeOperators.
783: Not Collective
785: Input Parameter:
786: . pc - the preconditioner context
788: Output Parameter:
789: . v - Use PETSC_TRUE to ignore the method (if defined) set via KSPSetComputeOperators on pc
791: Level: advanced
793: .keywords: PC, telescoping solve
794: @*/
795: PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc,PetscBool v)
796: {
799: PetscTryMethod(pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",(PC,PetscBool),(pc,v));
800: return(0);
801: }
803: /*@
804: PCTelescopeGetDM - Get the re-partitioned DM attached to the sub KSP.
806: Not Collective
808: Input Parameter:
809: . pc - the preconditioner context
811: Output Parameter:
812: . subdm - The re-partitioned DM
814: Level: advanced
816: .keywords: PC, telescoping solve
817: @*/
818: PetscErrorCode PCTelescopeGetDM(PC pc,DM *subdm)
819: {
822: PetscUseMethod(pc,"PCTelescopeGetDM_C",(PC,DM*),(pc,subdm));
823: return(0);
824: }
826: /* -------------------------------------------------------------------------------------*/
827: /*MC
828: PCTELESCOPE - Runs a KSP solver on a sub-group of processors. MPI processes not in the sub-communicator are idle during the solve.
830: Options Database:
831: + -pc_telescope_reduction_factor <n> - factor to use communicator size by, for example if you are using 64 MPI processes and
832: use an n of 4, the new sub-communicator will be 4 defined with 64/4 processes
833: - -pc_telescope_ignore_dm <false> - flag to indicate whether an attached DM should be ignored
835: Level: advanced
837: Notes:
838: The preconditioner is deemed telescopic as it only calls KSPSolve() on a single
839: sub-communicator in contrast with PCREDUNDANT which calls KSPSolve() on N sub-communicators.
840: This means there will be MPI processes within c, which will be idle during the application of this preconditioner.
842: The default KSP is PREONLY. If a DM is attached to the PC, it is re-partitioned on the sub-communicator.
843: Both the B mat operator and the right hand side vector are permuted into the new DOF ordering defined by the re-partitioned DM.
844: Currently only support for re-partitioning a DMDA is provided.
845: Any nullspace attached to the original Bmat operator are extracted, re-partitioned and set on the repartitioned Bmat operator.
846: KSPSetComputeOperators() is not propagated to the sub KSP.
847: Currently there is no support for the flag -pc_use_amat
849: Assuming that the parent preconditioner (PC) is defined on a communicator c, this implementation
850: creates a child sub-communicator (c') containing less MPI processes than the original parent preconditioner (PC).
852: Developer Notes:
853: During PCSetup, the B operator is scattered onto c'.
854: Within PCApply, the RHS vector (x) is scattered into a redundant vector, xred (defined on c').
855: Then KSPSolve() is executed on the c' communicator.
857: The communicator used within the telescoping preconditioner is defined by a PetscSubcomm using the INTERLACED
858: creation routine. We run the sub KSP on only the ranks within the communicator which have a color equal to zero.
860: The telescoping preconditioner is aware of nullspaces which are attached to the only B operator.
861: In case where B has a n nullspace attached, these nullspaces vectors are extract from B and mapped into
862: a new nullspace (defined on the sub-communicator) which is attached to B' (the B operator which was scattered to c')
864: The telescoping preconditioner is aware of an attached DM. In the event that the DM is of type DMDA (2D or 3D -
865: 1D support for 1D DMDAs is not provided), a new DMDA is created on c' (e.g. it is re-partitioned), and this new DM
866: is attached the sub KSPSolve(). The design of telescope is such that it should be possible to extend support
867: for re-partitioning other DM's (e.g. DMPLEX). The user can supply a flag to ignore attached DMs.
869: By default, B' is defined by simply fusing rows from different MPI processes
871: When a DMDA is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permuting of B
872: into the ordering defined by the DMDA on c', (ii) extracting the local chunks via MatGetSubMatrices(), (iii) fusing the
873: locally (sequential) matrices defined on the ranks common to c and c' into B' using MatCreateMPIMatConcatenateSeqMat()
875: Limitations/improvements
876: VecPlaceArray could be used within PCApply() to improve efficiency and reduce memory usage.
878: The symmetric permutation used when a DMDA is encountered is performed via explicitly assmbleming a permutation matrix P,
879: and performing P^T.A.P. Possibly it might be more efficient to use MatPermute(). I opted to use P^T.A.P as it appears
880: VecPermute() does not supported for the use case required here. By computing P, I can permute both the operator and RHS in a
881: consistent manner.
883: Mapping of vectors is performed this way
884: Suppose the parent comm size was 4, and we set a reduction factor of 2, thus would give a comm size on c' of 2.
885: Using the interlaced creation routine, the ranks in c with color = 0, will be rank 0 and 2.
886: We perform the scatter to the sub-comm in the following way,
887: [1] Given a vector x defined on comm c
889: rank(c) : _________ 0 ______ ________ 1 _______ ________ 2 _____________ ___________ 3 __________
890: x : [0, 1, 2, 3, 4, 5] [6, 7, 8, 9, 10, 11] [12, 13, 14, 15, 16, 17] [18, 19, 20, 21, 22, 23]
892: scatter to xtmp defined also on comm c so that we have the following values
894: rank(c) : ___________________ 0 ________________ _1_ ______________________ 2 _______________________ __3_
895: xtmp : [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] [ ] [12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23] [ ]
897: The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values
900: [2] Copy the value from rank 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'.
901: Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0.
903: rank(c') : ___________________ 0 _______________ ______________________ 1 _____________________
904: xred : [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] [12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23]
907: Contributed by Dave May
909: .seealso: PCTelescopeGetKSP(), PCTelescopeGetDM(), PCTelescopeGetReductionFactor(), PCTelescopeSetReductionFactor(), PCTelescopeGetIgnoreDM(), PCTelescopeSetIgnoreDM(), PCREDUNDANT
910: M*/
913: PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc)
914: {
915: PetscErrorCode ierr;
916: struct _PC_Telescope *sred;
919: PetscNewLog(pc,&sred);
920: sred->redfactor = 1;
921: sred->ignore_dm = PETSC_FALSE;
922: sred->ignore_kspcomputeoperators = PETSC_FALSE;
923: pc->data = (void*)sred;
925: pc->ops->apply = PCApply_Telescope;
926: pc->ops->applytranspose = NULL;
927: pc->ops->applyrichardson = PCApplyRichardson_Telescope;
928: pc->ops->setup = PCSetUp_Telescope;
929: pc->ops->destroy = PCDestroy_Telescope;
930: pc->ops->reset = PCReset_Telescope;
931: pc->ops->setfromoptions = PCSetFromOptions_Telescope;
932: pc->ops->view = PCView_Telescope;
934: sred->pctelescope_setup_type = PCTelescopeSetUp_default;
935: sred->pctelescope_matcreate_type = PCTelescopeMatCreate_default;
936: sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
937: sred->pctelescope_reset_type = NULL;
939: PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetKSP_C",PCTelescopeGetKSP_Telescope);
940: PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetReductionFactor_C",PCTelescopeGetReductionFactor_Telescope);
941: PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetReductionFactor_C",PCTelescopeSetReductionFactor_Telescope);
942: PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreDM_C",PCTelescopeGetIgnoreDM_Telescope);
943: PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreDM_C",PCTelescopeSetIgnoreDM_Telescope);
944: PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",PCTelescopeGetIgnoreKSPComputeOperators_Telescope);
945: PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",PCTelescopeSetIgnoreKSPComputeOperators_Telescope);
946: PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetDM_C",PCTelescopeGetDM_Telescope);
947: return(0);
948: }