Actual source code: redundant.c
petsc-3.11.4 2019-09-28
2: /*
3: This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
4: */
5: #include <petsc/private/pcimpl.h>
6: #include <petscksp.h>
8: typedef struct {
9: KSP ksp;
10: PC pc; /* actual preconditioner used on each processor */
11: Vec xsub,ysub; /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
12: Vec xdup,ydup; /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13: Mat pmats; /* matrix and optional preconditioner matrix belong to a subcommunicator */
14: VecScatter scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15: PetscBool useparallelmat;
16: PetscSubcomm psubcomm;
17: PetscInt nsubcomm; /* num of data structure PetscSubcomm */
18: PetscBool shifttypeset;
19: MatFactorShiftType shifttype;
20: } PC_Redundant;
22: PetscErrorCode PCFactorSetShiftType_Redundant(PC pc,MatFactorShiftType shifttype)
23: {
24: PC_Redundant *red = (PC_Redundant*)pc->data;
28: if (red->ksp) {
29: PC pc;
30: KSPGetPC(red->ksp,&pc);
31: PCFactorSetShiftType(pc,shifttype);
32: } else {
33: red->shifttypeset = PETSC_TRUE;
34: red->shifttype = shifttype;
35: }
36: return(0);
37: }
39: static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
40: {
41: PC_Redundant *red = (PC_Redundant*)pc->data;
43: PetscBool iascii,isstring;
44: PetscViewer subviewer;
47: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
48: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
49: if (iascii) {
50: if (!red->psubcomm) {
51: PetscViewerASCIIPrintf(viewer," Not yet setup\n");
52: } else {
53: PetscViewerASCIIPrintf(viewer," First (color=0) of %D PCs follows\n",red->nsubcomm);
54: PetscViewerGetSubViewer(viewer,((PetscObject)red->pc)->comm,&subviewer);
55: if (!red->psubcomm->color) { /* only view first redundant pc */
56: PetscViewerASCIIPushTab(subviewer);
57: KSPView(red->ksp,subviewer);
58: PetscViewerASCIIPopTab(subviewer);
59: }
60: PetscViewerRestoreSubViewer(viewer,((PetscObject)red->pc)->comm,&subviewer);
61: }
62: } else if (isstring) {
63: PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");
64: }
65: return(0);
66: }
68: #include <../src/mat/impls/aij/mpi/mpiaij.h>
69: static PetscErrorCode PCSetUp_Redundant(PC pc)
70: {
71: PC_Redundant *red = (PC_Redundant*)pc->data;
73: PetscInt mstart,mend,mlocal,M;
74: PetscMPIInt size;
75: MPI_Comm comm,subcomm;
76: Vec x;
79: PetscObjectGetComm((PetscObject)pc,&comm);
81: /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
82: MPI_Comm_size(comm,&size);
83: if (size == 1) red->useparallelmat = PETSC_FALSE;
85: if (!pc->setupcalled) {
86: PetscInt mloc_sub;
87: if (!red->psubcomm) { /* create red->psubcomm, new ksp and pc over subcomm */
88: KSP ksp;
89: PCRedundantGetKSP(pc,&ksp);
90: }
91: subcomm = PetscSubcommChild(red->psubcomm);
93: if (red->useparallelmat) {
94: /* grab the parallel matrix and put it into processors of a subcomminicator */
95: MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,subcomm,MAT_INITIAL_MATRIX,&red->pmats);
97: MPI_Comm_size(subcomm,&size);
98: if (size > 1) {
99: PetscBool foundpack;
100: MatGetFactorAvailable(red->pmats,NULL,MAT_FACTOR_LU,&foundpack);
101: if (!foundpack) { /* reset default ksp and pc */
102: KSPSetType(red->ksp,KSPGMRES);
103: PCSetType(red->pc,PCBJACOBI);
104: } else {
105: PCFactorSetMatSolverType(red->pc,NULL);
106: }
107: }
109: KSPSetOperators(red->ksp,red->pmats,red->pmats);
111: /* get working vectors xsub and ysub */
112: MatCreateVecs(red->pmats,&red->xsub,&red->ysub);
114: /* create working vectors xdup and ydup.
115: xdup concatenates all xsub's contigously to form a mpi vector over dupcomm (see PetscSubcommCreate_interlaced())
116: ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
117: Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
118: MatGetLocalSize(red->pmats,&mloc_sub,NULL);
119: VecCreateMPI(PetscSubcommContiguousParent(red->psubcomm),mloc_sub,PETSC_DECIDE,&red->xdup);
120: VecCreateMPIWithArray(PetscSubcommContiguousParent(red->psubcomm),1,mloc_sub,PETSC_DECIDE,NULL,&red->ydup);
122: /* create vecscatters */
123: if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
124: IS is1,is2;
125: PetscInt *idx1,*idx2,i,j,k;
127: MatCreateVecs(pc->pmat,&x,0);
128: VecGetSize(x,&M);
129: VecGetOwnershipRange(x,&mstart,&mend);
130: mlocal = mend - mstart;
131: PetscMalloc2(red->psubcomm->n*mlocal,&idx1,red->psubcomm->n*mlocal,&idx2);
132: j = 0;
133: for (k=0; k<red->psubcomm->n; k++) {
134: for (i=mstart; i<mend; i++) {
135: idx1[j] = i;
136: idx2[j++] = i + M*k;
137: }
138: }
139: ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);
140: ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);
141: VecScatterCreate(x,is1,red->xdup,is2,&red->scatterin);
142: ISDestroy(&is1);
143: ISDestroy(&is2);
145: /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
146: ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*M,1,&is1);
147: ISCreateStride(comm,mlocal,mstart,1,&is2);
148: VecScatterCreate(red->xdup,is1,x,is2,&red->scatterout);
149: ISDestroy(&is1);
150: ISDestroy(&is2);
151: PetscFree2(idx1,idx2);
152: VecDestroy(&x);
153: }
154: } else { /* !red->useparallelmat */
155: KSPSetOperators(red->ksp,pc->mat,pc->pmat);
156: }
157: } else { /* pc->setupcalled */
158: if (red->useparallelmat) {
159: MatReuse reuse;
160: /* grab the parallel matrix and put it into processors of a subcomminicator */
161: /*--------------------------------------------------------------------------*/
162: if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
163: /* destroy old matrices */
164: MatDestroy(&red->pmats);
165: reuse = MAT_INITIAL_MATRIX;
166: } else {
167: reuse = MAT_REUSE_MATRIX;
168: }
169: MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,PetscSubcommChild(red->psubcomm),reuse,&red->pmats);
170: KSPSetOperators(red->ksp,red->pmats,red->pmats);
171: } else { /* !red->useparallelmat */
172: KSPSetOperators(red->ksp,pc->mat,pc->pmat);
173: }
174: }
176: if (pc->setfromoptionscalled) {
177: KSPSetFromOptions(red->ksp);
178: }
179: KSPSetUp(red->ksp);
180: return(0);
181: }
183: static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
184: {
185: PC_Redundant *red = (PC_Redundant*)pc->data;
187: PetscScalar *array;
190: if (!red->useparallelmat) {
191: KSPSolve(red->ksp,x,y);
192: KSPCheckSolve(red->ksp,pc,y);
193: return(0);
194: }
196: /* scatter x to xdup */
197: VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);
198: VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);
200: /* place xdup's local array into xsub */
201: VecGetArray(red->xdup,&array);
202: VecPlaceArray(red->xsub,(const PetscScalar*)array);
204: /* apply preconditioner on each processor */
205: KSPSolve(red->ksp,red->xsub,red->ysub);
206: KSPCheckSolve(red->ksp,pc,red->ysub);
207: VecResetArray(red->xsub);
208: VecRestoreArray(red->xdup,&array);
210: /* place ysub's local array into ydup */
211: VecGetArray(red->ysub,&array);
212: VecPlaceArray(red->ydup,(const PetscScalar*)array);
214: /* scatter ydup to y */
215: VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);
216: VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);
217: VecResetArray(red->ydup);
218: VecRestoreArray(red->ysub,&array);
219: return(0);
220: }
222: static PetscErrorCode PCApplyTranspose_Redundant(PC pc,Vec x,Vec y)
223: {
224: PC_Redundant *red = (PC_Redundant*)pc->data;
226: PetscScalar *array;
229: if (!red->useparallelmat) {
230: KSPSolveTranspose(red->ksp,x,y);
231: KSPCheckSolve(red->ksp,pc,y);
232: return(0);
233: }
235: /* scatter x to xdup */
236: VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);
237: VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);
239: /* place xdup's local array into xsub */
240: VecGetArray(red->xdup,&array);
241: VecPlaceArray(red->xsub,(const PetscScalar*)array);
243: /* apply preconditioner on each processor */
244: KSPSolveTranspose(red->ksp,red->xsub,red->ysub);
245: KSPCheckSolve(red->ksp,pc,red->ysub);
246: VecResetArray(red->xsub);
247: VecRestoreArray(red->xdup,&array);
249: /* place ysub's local array into ydup */
250: VecGetArray(red->ysub,&array);
251: VecPlaceArray(red->ydup,(const PetscScalar*)array);
253: /* scatter ydup to y */
254: VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);
255: VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);
256: VecResetArray(red->ydup);
257: VecRestoreArray(red->ysub,&array);
258: return(0);
259: }
261: static PetscErrorCode PCReset_Redundant(PC pc)
262: {
263: PC_Redundant *red = (PC_Redundant*)pc->data;
267: if (red->useparallelmat) {
268: VecScatterDestroy(&red->scatterin);
269: VecScatterDestroy(&red->scatterout);
270: VecDestroy(&red->ysub);
271: VecDestroy(&red->xsub);
272: VecDestroy(&red->xdup);
273: VecDestroy(&red->ydup);
274: }
275: MatDestroy(&red->pmats);
276: KSPReset(red->ksp);
277: return(0);
278: }
280: static PetscErrorCode PCDestroy_Redundant(PC pc)
281: {
282: PC_Redundant *red = (PC_Redundant*)pc->data;
286: PCReset_Redundant(pc);
287: KSPDestroy(&red->ksp);
288: PetscSubcommDestroy(&red->psubcomm);
289: PetscFree(pc->data);
290: return(0);
291: }
293: static PetscErrorCode PCSetFromOptions_Redundant(PetscOptionItems *PetscOptionsObject,PC pc)
294: {
296: PC_Redundant *red = (PC_Redundant*)pc->data;
299: PetscOptionsHead(PetscOptionsObject,"Redundant options");
300: PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);
301: PetscOptionsTail();
302: return(0);
303: }
305: static PetscErrorCode PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
306: {
307: PC_Redundant *red = (PC_Redundant*)pc->data;
310: red->nsubcomm = nreds;
311: return(0);
312: }
314: /*@
315: PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
317: Logically Collective on PC
319: Input Parameters:
320: + pc - the preconditioner context
321: - nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
322: use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
324: Level: advanced
326: .keywords: PC, redundant solve
327: @*/
328: PetscErrorCode PCRedundantSetNumber(PC pc,PetscInt nredundant)
329: {
334: if (nredundant <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
335: PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));
336: return(0);
337: }
339: static PetscErrorCode PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
340: {
341: PC_Redundant *red = (PC_Redundant*)pc->data;
345: PetscObjectReference((PetscObject)in);
346: VecScatterDestroy(&red->scatterin);
348: red->scatterin = in;
350: PetscObjectReference((PetscObject)out);
351: VecScatterDestroy(&red->scatterout);
352: red->scatterout = out;
353: return(0);
354: }
356: /*@
357: PCRedundantSetScatter - Sets the scatter used to copy values into the
358: redundant local solve and the scatter to move them back into the global
359: vector.
361: Logically Collective on PC
363: Input Parameters:
364: + pc - the preconditioner context
365: . in - the scatter to move the values in
366: - out - the scatter to move them out
368: Level: advanced
370: .keywords: PC, redundant solve
371: @*/
372: PetscErrorCode PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
373: {
380: PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));
381: return(0);
382: }
384: static PetscErrorCode PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp)
385: {
387: PC_Redundant *red = (PC_Redundant*)pc->data;
388: MPI_Comm comm,subcomm;
389: const char *prefix;
392: if (!red->psubcomm) {
393: PCGetOptionsPrefix(pc,&prefix);
395: PetscObjectGetComm((PetscObject)pc,&comm);
396: PetscSubcommCreate(comm,&red->psubcomm);
397: PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);
398: PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);
400: PetscSubcommSetOptionsPrefix(red->psubcomm,prefix);
401: PetscSubcommSetFromOptions(red->psubcomm);
402: PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));
404: /* create a new PC that processors in each subcomm have copy of */
405: subcomm = PetscSubcommChild(red->psubcomm);
407: KSPCreate(subcomm,&red->ksp);
408: KSPSetErrorIfNotConverged(red->ksp,pc->erroriffailure);
409: PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);
410: PetscLogObjectParent((PetscObject)pc,(PetscObject)red->ksp);
411: KSPSetType(red->ksp,KSPPREONLY);
412: KSPGetPC(red->ksp,&red->pc);
413: PCSetType(red->pc,PCLU);
414: if (red->shifttypeset) {
415: PCFactorSetShiftType(red->pc,red->shifttype);
416: red->shifttypeset = PETSC_FALSE;
417: }
418: KSPSetOptionsPrefix(red->ksp,prefix);
419: KSPAppendOptionsPrefix(red->ksp,"redundant_");
420: }
421: *innerksp = red->ksp;
422: return(0);
423: }
425: /*@
426: PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC.
428: Not Collective
430: Input Parameter:
431: . pc - the preconditioner context
433: Output Parameter:
434: . innerksp - the KSP on the smaller set of processes
436: Level: advanced
438: .keywords: PC, redundant solve
439: @*/
440: PetscErrorCode PCRedundantGetKSP(PC pc,KSP *innerksp)
441: {
447: PetscUseMethod(pc,"PCRedundantGetKSP_C",(PC,KSP*),(pc,innerksp));
448: return(0);
449: }
451: static PetscErrorCode PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
452: {
453: PC_Redundant *red = (PC_Redundant*)pc->data;
456: if (mat) *mat = red->pmats;
457: if (pmat) *pmat = red->pmats;
458: return(0);
459: }
461: /*@
462: PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
464: Not Collective
466: Input Parameter:
467: . pc - the preconditioner context
469: Output Parameters:
470: + mat - the matrix
471: - pmat - the (possibly different) preconditioner matrix
473: Level: advanced
475: .keywords: PC, redundant solve
476: @*/
477: PetscErrorCode PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
478: {
485: PetscUseMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));
486: return(0);
487: }
489: /* -------------------------------------------------------------------------------------*/
490: /*MC
491: PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors
493: Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx
495: Options Database:
496: . -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
497: use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
499: Level: intermediate
501: Notes:
502: The default KSP is preonly and the default PC is LU.
504: PCFactorSetShiftType() applied to this PC will convey they shift type into the inner PC if it is factorization based.
506: Developer Notes:
507: Note that PCSetInitialGuessNonzero() is not used by this class but likely should be.
509: .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
510: PCRedundantGetKSP(), PCRedundantGetOperators(), PCRedundantSetNumber()
511: M*/
513: PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc)
514: {
516: PC_Redundant *red;
517: PetscMPIInt size;
520: PetscNewLog(pc,&red);
521: MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
523: red->nsubcomm = size;
524: red->useparallelmat = PETSC_TRUE;
525: pc->data = (void*)red;
527: pc->ops->apply = PCApply_Redundant;
528: pc->ops->applytranspose = PCApplyTranspose_Redundant;
529: pc->ops->setup = PCSetUp_Redundant;
530: pc->ops->destroy = PCDestroy_Redundant;
531: pc->ops->reset = PCReset_Redundant;
532: pc->ops->setfromoptions = PCSetFromOptions_Redundant;
533: pc->ops->view = PCView_Redundant;
535: PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetScatter_C",PCRedundantSetScatter_Redundant);
536: PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetNumber_C",PCRedundantSetNumber_Redundant);
537: PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetKSP_C",PCRedundantGetKSP_Redundant);
538: PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetOperators_C",PCRedundantGetOperators_Redundant);
539: PetscObjectComposeFunction((PetscObject)pc,"PCFactorSetShiftType_C",PCFactorSetShiftType_Redundant);
540: return(0);
541: }