Actual source code: redundant.c
petsc-3.7.3 2016-08-01
2: /*
3: This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
4: */
5: #include <petsc/private/pcimpl.h>
6: #include <petscksp.h> /*I "petscksp.h" I*/
8: typedef struct {
9: KSP ksp;
10: PC pc; /* actual preconditioner used on each processor */
11: Vec xsub,ysub; /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
12: Vec xdup,ydup; /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13: Mat pmats; /* matrix and optional preconditioner matrix belong to a subcommunicator */
14: VecScatter scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15: PetscBool useparallelmat;
16: PetscSubcomm psubcomm;
17: PetscInt nsubcomm; /* num of data structure PetscSubcomm */
18: PetscBool shifttypeset;
19: MatFactorShiftType shifttype;
20: } PC_Redundant;
24: PetscErrorCode PCFactorSetShiftType_Redundant(PC pc,MatFactorShiftType shifttype)
25: {
26: PC_Redundant *red = (PC_Redundant*)pc->data;
30: if (red->ksp) {
31: PC pc;
32: KSPGetPC(red->ksp,&pc);
33: PCFactorSetShiftType(pc,shifttype);
34: } else {
35: red->shifttypeset = PETSC_TRUE;
36: red->shifttype = shifttype;
37: }
38: return(0);
39: }
43: static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
44: {
45: PC_Redundant *red = (PC_Redundant*)pc->data;
47: PetscBool iascii,isstring;
48: PetscViewer subviewer;
51: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
52: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
53: if (iascii) {
54: if (!red->psubcomm) {
55: PetscViewerASCIIPrintf(viewer," Redundant preconditioner: Not yet setup\n");
56: } else {
57: PetscViewerASCIIPrintf(viewer," Redundant preconditioner: First (color=0) of %D PCs follows\n",red->nsubcomm);
58: PetscViewerGetSubViewer(viewer,((PetscObject)red->pc)->comm,&subviewer);
59: if (!red->psubcomm->color) { /* only view first redundant pc */
60: PetscViewerASCIIPushTab(subviewer);
61: KSPView(red->ksp,subviewer);
62: PetscViewerASCIIPopTab(subviewer);
63: }
64: PetscViewerRestoreSubViewer(viewer,((PetscObject)red->pc)->comm,&subviewer);
65: }
66: } else if (isstring) {
67: PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");
68: }
69: return(0);
70: }
74: static PetscErrorCode PCSetUp_Redundant(PC pc)
75: {
76: PC_Redundant *red = (PC_Redundant*)pc->data;
78: PetscInt mstart,mend,mlocal,M;
79: PetscMPIInt size;
80: MPI_Comm comm,subcomm;
81: Vec x;
84: PetscObjectGetComm((PetscObject)pc,&comm);
85:
86: /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
87: MPI_Comm_size(comm,&size);
88: if (size == 1) red->useparallelmat = PETSC_FALSE;
90: if (!pc->setupcalled) {
91: PetscInt mloc_sub;
92: if (!red->psubcomm) { /* create red->psubcomm, new ksp and pc over subcomm */
93: KSP ksp;
94: PCRedundantGetKSP(pc,&ksp);
95: }
96: subcomm = PetscSubcommChild(red->psubcomm);
98: if (red->useparallelmat) {
99: /* grab the parallel matrix and put it into processors of a subcomminicator */
100: MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,subcomm,MAT_INITIAL_MATRIX,&red->pmats);
102: MPI_Comm_size(subcomm,&size);
103: if (size > 1) {
104: PetscBool foundpack;
105: MatGetFactorAvailable(red->pmats,NULL,MAT_FACTOR_LU,&foundpack);
106: if (!foundpack) { /* reset default ksp and pc */
107: KSPSetType(red->ksp,KSPGMRES);
108: PCSetType(red->pc,PCBJACOBI);
109: } else {
110: PCFactorSetMatSolverPackage(red->pc,NULL);
111: }
112: }
114: KSPSetOperators(red->ksp,red->pmats,red->pmats);
115:
116: /* get working vectors xsub and ysub */
117: MatCreateVecs(red->pmats,&red->xsub,&red->ysub);
119: /* create working vectors xdup and ydup.
120: xdup concatenates all xsub's contigously to form a mpi vector over dupcomm (see PetscSubcommCreate_interlaced())
121: ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
122: Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
123: MatGetLocalSize(red->pmats,&mloc_sub,NULL);
124: VecCreateMPI(PetscSubcommContiguousParent(red->psubcomm),mloc_sub,PETSC_DECIDE,&red->xdup);
125: VecCreateMPIWithArray(PetscSubcommContiguousParent(red->psubcomm),1,mloc_sub,PETSC_DECIDE,NULL,&red->ydup);
127: /* create vecscatters */
128: if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
129: IS is1,is2;
130: PetscInt *idx1,*idx2,i,j,k;
132: MatCreateVecs(pc->pmat,&x,0);
133: VecGetSize(x,&M);
134: VecGetOwnershipRange(x,&mstart,&mend);
135: mlocal = mend - mstart;
136: PetscMalloc2(red->psubcomm->n*mlocal,&idx1,red->psubcomm->n*mlocal,&idx2);
137: j = 0;
138: for (k=0; k<red->psubcomm->n; k++) {
139: for (i=mstart; i<mend; i++) {
140: idx1[j] = i;
141: idx2[j++] = i + M*k;
142: }
143: }
144: ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);
145: ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);
146: VecScatterCreate(x,is1,red->xdup,is2,&red->scatterin);
147: ISDestroy(&is1);
148: ISDestroy(&is2);
150: /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
151: ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*M,1,&is1);
152: ISCreateStride(comm,mlocal,mstart,1,&is2);
153: VecScatterCreate(red->xdup,is1,x,is2,&red->scatterout);
154: ISDestroy(&is1);
155: ISDestroy(&is2);
156: PetscFree2(idx1,idx2);
157: VecDestroy(&x);
158: }
159: } else { /* !red->useparallelmat */
160: KSPSetOperators(red->ksp,pc->mat,pc->pmat);
161: }
162: } else { /* pc->setupcalled */
163: if (red->useparallelmat) {
164: MatReuse reuse;
165: /* grab the parallel matrix and put it into processors of a subcomminicator */
166: /*--------------------------------------------------------------------------*/
167: if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
168: /* destroy old matrices */
169: MatDestroy(&red->pmats);
170: reuse = MAT_INITIAL_MATRIX;
171: } else {
172: reuse = MAT_REUSE_MATRIX;
173: }
174: MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,PetscSubcommChild(red->psubcomm),reuse,&red->pmats);
175: KSPSetOperators(red->ksp,red->pmats,red->pmats);
176: } else { /* !red->useparallelmat */
177: KSPSetOperators(red->ksp,pc->mat,pc->pmat);
178: }
179: }
181: if (pc->setfromoptionscalled) {
182: KSPSetFromOptions(red->ksp);
183: }
184: KSPSetUp(red->ksp);
185: return(0);
186: }
190: static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
191: {
192: PC_Redundant *red = (PC_Redundant*)pc->data;
194: PetscScalar *array;
197: if (!red->useparallelmat) {
198: KSPSolve(red->ksp,x,y);
199: return(0);
200: }
202: /* scatter x to xdup */
203: VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);
204: VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);
206: /* place xdup's local array into xsub */
207: VecGetArray(red->xdup,&array);
208: VecPlaceArray(red->xsub,(const PetscScalar*)array);
210: /* apply preconditioner on each processor */
211: KSPSolve(red->ksp,red->xsub,red->ysub);
212: VecResetArray(red->xsub);
213: VecRestoreArray(red->xdup,&array);
215: /* place ysub's local array into ydup */
216: VecGetArray(red->ysub,&array);
217: VecPlaceArray(red->ydup,(const PetscScalar*)array);
219: /* scatter ydup to y */
220: VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);
221: VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);
222: VecResetArray(red->ydup);
223: VecRestoreArray(red->ysub,&array);
224: return(0);
225: }
229: static PetscErrorCode PCApplyTranspose_Redundant(PC pc,Vec x,Vec y)
230: {
231: PC_Redundant *red = (PC_Redundant*)pc->data;
233: PetscScalar *array;
236: if (!red->useparallelmat) {
237: KSPSolveTranspose(red->ksp,x,y);
238: return(0);
239: }
241: /* scatter x to xdup */
242: VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);
243: VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);
245: /* place xdup's local array into xsub */
246: VecGetArray(red->xdup,&array);
247: VecPlaceArray(red->xsub,(const PetscScalar*)array);
249: /* apply preconditioner on each processor */
250: KSPSolveTranspose(red->ksp,red->xsub,red->ysub);
251: VecResetArray(red->xsub);
252: VecRestoreArray(red->xdup,&array);
254: /* place ysub's local array into ydup */
255: VecGetArray(red->ysub,&array);
256: VecPlaceArray(red->ydup,(const PetscScalar*)array);
258: /* scatter ydup to y */
259: VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);
260: VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);
261: VecResetArray(red->ydup);
262: VecRestoreArray(red->ysub,&array);
263: return(0);
264: }
268: static PetscErrorCode PCReset_Redundant(PC pc)
269: {
270: PC_Redundant *red = (PC_Redundant*)pc->data;
274: if (red->useparallelmat) {
275: VecScatterDestroy(&red->scatterin);
276: VecScatterDestroy(&red->scatterout);
277: VecDestroy(&red->ysub);
278: VecDestroy(&red->xsub);
279: VecDestroy(&red->xdup);
280: VecDestroy(&red->ydup);
281: }
282: MatDestroy(&red->pmats);
283: KSPReset(red->ksp);
284: return(0);
285: }
289: static PetscErrorCode PCDestroy_Redundant(PC pc)
290: {
291: PC_Redundant *red = (PC_Redundant*)pc->data;
295: PCReset_Redundant(pc);
296: KSPDestroy(&red->ksp);
297: PetscSubcommDestroy(&red->psubcomm);
298: PetscFree(pc->data);
299: return(0);
300: }
304: static PetscErrorCode PCSetFromOptions_Redundant(PetscOptionItems *PetscOptionsObject,PC pc)
305: {
307: PC_Redundant *red = (PC_Redundant*)pc->data;
310: PetscOptionsHead(PetscOptionsObject,"Redundant options");
311: PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);
312: PetscOptionsTail();
313: return(0);
314: }
318: static PetscErrorCode PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
319: {
320: PC_Redundant *red = (PC_Redundant*)pc->data;
323: red->nsubcomm = nreds;
324: return(0);
325: }
329: /*@
330: PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
332: Logically Collective on PC
334: Input Parameters:
335: + pc - the preconditioner context
336: - nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
337: use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
339: Level: advanced
341: .keywords: PC, redundant solve
342: @*/
343: PetscErrorCode PCRedundantSetNumber(PC pc,PetscInt nredundant)
344: {
349: if (nredundant <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
350: PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));
351: return(0);
352: }
356: static PetscErrorCode PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
357: {
358: PC_Redundant *red = (PC_Redundant*)pc->data;
362: PetscObjectReference((PetscObject)in);
363: VecScatterDestroy(&red->scatterin);
365: red->scatterin = in;
367: PetscObjectReference((PetscObject)out);
368: VecScatterDestroy(&red->scatterout);
369: red->scatterout = out;
370: return(0);
371: }
375: /*@
376: PCRedundantSetScatter - Sets the scatter used to copy values into the
377: redundant local solve and the scatter to move them back into the global
378: vector.
380: Logically Collective on PC
382: Input Parameters:
383: + pc - the preconditioner context
384: . in - the scatter to move the values in
385: - out - the scatter to move them out
387: Level: advanced
389: .keywords: PC, redundant solve
390: @*/
391: PetscErrorCode PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
392: {
399: PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));
400: return(0);
401: }
405: static PetscErrorCode PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp)
406: {
408: PC_Redundant *red = (PC_Redundant*)pc->data;
409: MPI_Comm comm,subcomm;
410: const char *prefix;
413: if (!red->psubcomm) {
414: PCGetOptionsPrefix(pc,&prefix);
416: PetscObjectGetComm((PetscObject)pc,&comm);
417: PetscSubcommCreate(comm,&red->psubcomm);
418: PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);
419: PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);
421: PetscSubcommSetOptionsPrefix(red->psubcomm,prefix);
422: PetscSubcommSetFromOptions(red->psubcomm);
423: PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));
425: /* create a new PC that processors in each subcomm have copy of */
426: subcomm = PetscSubcommChild(red->psubcomm);
428: KSPCreate(subcomm,&red->ksp);
429: KSPSetErrorIfNotConverged(red->ksp,pc->erroriffailure);
430: PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);
431: PetscLogObjectParent((PetscObject)pc,(PetscObject)red->ksp);
432: KSPSetType(red->ksp,KSPPREONLY);
433: KSPGetPC(red->ksp,&red->pc);
434: PCSetType(red->pc,PCLU);
435: if (red->shifttypeset) {
436: PCFactorSetShiftType(red->pc,red->shifttype);
437: red->shifttypeset = PETSC_FALSE;
438: }
439: KSPSetOptionsPrefix(red->ksp,prefix);
440: KSPAppendOptionsPrefix(red->ksp,"redundant_");
441: }
442: *innerksp = red->ksp;
443: return(0);
444: }
448: /*@
449: PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC.
451: Not Collective
453: Input Parameter:
454: . pc - the preconditioner context
456: Output Parameter:
457: . innerksp - the KSP on the smaller set of processes
459: Level: advanced
461: .keywords: PC, redundant solve
462: @*/
463: PetscErrorCode PCRedundantGetKSP(PC pc,KSP *innerksp)
464: {
470: PetscUseMethod(pc,"PCRedundantGetKSP_C",(PC,KSP*),(pc,innerksp));
471: return(0);
472: }
476: static PetscErrorCode PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
477: {
478: PC_Redundant *red = (PC_Redundant*)pc->data;
481: if (mat) *mat = red->pmats;
482: if (pmat) *pmat = red->pmats;
483: return(0);
484: }
488: /*@
489: PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
491: Not Collective
493: Input Parameter:
494: . pc - the preconditioner context
496: Output Parameters:
497: + mat - the matrix
498: - pmat - the (possibly different) preconditioner matrix
500: Level: advanced
502: .keywords: PC, redundant solve
503: @*/
504: PetscErrorCode PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
505: {
512: PetscUseMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));
513: return(0);
514: }
516: /* -------------------------------------------------------------------------------------*/
517: /*MC
518: PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors
520: Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx
522: Options Database:
523: . -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
524: use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
526: Level: intermediate
528: Notes: The default KSP is preonly and the default PC is LU.
530: PCFactorSetShiftType() applied to this PC will convey they shift type into the inner PC if it is factorization based.
532: Developer Notes: Note that PCSetInitialGuessNonzero() is not used by this class but likely should be.
534: .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
535: PCRedundantGetKSP(), PCRedundantGetOperators(), PCRedundantSetNumber()
536: M*/
540: PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc)
541: {
543: PC_Redundant *red;
544: PetscMPIInt size;
547: PetscNewLog(pc,&red);
548: MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
550: red->nsubcomm = size;
551: red->useparallelmat = PETSC_TRUE;
552: pc->data = (void*)red;
554: pc->ops->apply = PCApply_Redundant;
555: pc->ops->applytranspose = PCApplyTranspose_Redundant;
556: pc->ops->setup = PCSetUp_Redundant;
557: pc->ops->destroy = PCDestroy_Redundant;
558: pc->ops->reset = PCReset_Redundant;
559: pc->ops->setfromoptions = PCSetFromOptions_Redundant;
560: pc->ops->view = PCView_Redundant;
562: PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetScatter_C",PCRedundantSetScatter_Redundant);
563: PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetNumber_C",PCRedundantSetNumber_Redundant);
564: PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetKSP_C",PCRedundantGetKSP_Redundant);
565: PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetOperators_C",PCRedundantGetOperators_Redundant);
566: PetscObjectComposeFunction((PetscObject)pc,"PCFactorSetShiftType_C",PCFactorSetShiftType_Redundant);
567: return(0);
568: }