Actual source code: redundant.c
petsc-3.8.4 2018-03-24
2: /*
3: This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
4: */
5: #include <petsc/private/pcimpl.h>
6: #include <petscksp.h>
8: typedef struct {
9: KSP ksp;
10: PC pc; /* actual preconditioner used on each processor */
11: Vec xsub,ysub; /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
12: Vec xdup,ydup; /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13: Mat pmats; /* matrix and optional preconditioner matrix belong to a subcommunicator */
14: VecScatter scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15: PetscBool useparallelmat;
16: PetscSubcomm psubcomm;
17: PetscInt nsubcomm; /* num of data structure PetscSubcomm */
18: PetscBool shifttypeset;
19: MatFactorShiftType shifttype;
20: } PC_Redundant;
22: PetscErrorCode PCFactorSetShiftType_Redundant(PC pc,MatFactorShiftType shifttype)
23: {
24: PC_Redundant *red = (PC_Redundant*)pc->data;
28: if (red->ksp) {
29: PC pc;
30: KSPGetPC(red->ksp,&pc);
31: PCFactorSetShiftType(pc,shifttype);
32: } else {
33: red->shifttypeset = PETSC_TRUE;
34: red->shifttype = shifttype;
35: }
36: return(0);
37: }
39: static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
40: {
41: PC_Redundant *red = (PC_Redundant*)pc->data;
43: PetscBool iascii,isstring;
44: PetscViewer subviewer;
47: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
48: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
49: if (iascii) {
50: if (!red->psubcomm) {
51: PetscViewerASCIIPrintf(viewer," Not yet setup\n");
52: } else {
53: PetscViewerASCIIPrintf(viewer," First (color=0) of %D PCs follows\n",red->nsubcomm);
54: PetscViewerGetSubViewer(viewer,((PetscObject)red->pc)->comm,&subviewer);
55: if (!red->psubcomm->color) { /* only view first redundant pc */
56: PetscViewerASCIIPushTab(subviewer);
57: KSPView(red->ksp,subviewer);
58: PetscViewerASCIIPopTab(subviewer);
59: }
60: PetscViewerRestoreSubViewer(viewer,((PetscObject)red->pc)->comm,&subviewer);
61: }
62: } else if (isstring) {
63: PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");
64: }
65: return(0);
66: }
68: static PetscErrorCode PCSetUp_Redundant(PC pc)
69: {
70: PC_Redundant *red = (PC_Redundant*)pc->data;
72: PetscInt mstart,mend,mlocal,M;
73: PetscMPIInt size;
74: MPI_Comm comm,subcomm;
75: Vec x;
78: PetscObjectGetComm((PetscObject)pc,&comm);
79:
80: /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
81: MPI_Comm_size(comm,&size);
82: if (size == 1) red->useparallelmat = PETSC_FALSE;
84: if (!pc->setupcalled) {
85: PetscInt mloc_sub;
86: if (!red->psubcomm) { /* create red->psubcomm, new ksp and pc over subcomm */
87: KSP ksp;
88: PCRedundantGetKSP(pc,&ksp);
89: }
90: subcomm = PetscSubcommChild(red->psubcomm);
92: if (red->useparallelmat) {
93: /* grab the parallel matrix and put it into processors of a subcomminicator */
94: MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,subcomm,MAT_INITIAL_MATRIX,&red->pmats);
96: MPI_Comm_size(subcomm,&size);
97: if (size > 1) {
98: PetscBool foundpack;
99: MatGetFactorAvailable(red->pmats,NULL,MAT_FACTOR_LU,&foundpack);
100: if (!foundpack) { /* reset default ksp and pc */
101: KSPSetType(red->ksp,KSPGMRES);
102: PCSetType(red->pc,PCBJACOBI);
103: } else {
104: PCFactorSetMatSolverPackage(red->pc,NULL);
105: }
106: }
108: KSPSetOperators(red->ksp,red->pmats,red->pmats);
109:
110: /* get working vectors xsub and ysub */
111: MatCreateVecs(red->pmats,&red->xsub,&red->ysub);
113: /* create working vectors xdup and ydup.
114: xdup concatenates all xsub's contigously to form a mpi vector over dupcomm (see PetscSubcommCreate_interlaced())
115: ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
116: Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
117: MatGetLocalSize(red->pmats,&mloc_sub,NULL);
118: VecCreateMPI(PetscSubcommContiguousParent(red->psubcomm),mloc_sub,PETSC_DECIDE,&red->xdup);
119: VecCreateMPIWithArray(PetscSubcommContiguousParent(red->psubcomm),1,mloc_sub,PETSC_DECIDE,NULL,&red->ydup);
121: /* create vecscatters */
122: if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
123: IS is1,is2;
124: PetscInt *idx1,*idx2,i,j,k;
126: MatCreateVecs(pc->pmat,&x,0);
127: VecGetSize(x,&M);
128: VecGetOwnershipRange(x,&mstart,&mend);
129: mlocal = mend - mstart;
130: PetscMalloc2(red->psubcomm->n*mlocal,&idx1,red->psubcomm->n*mlocal,&idx2);
131: j = 0;
132: for (k=0; k<red->psubcomm->n; k++) {
133: for (i=mstart; i<mend; i++) {
134: idx1[j] = i;
135: idx2[j++] = i + M*k;
136: }
137: }
138: ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);
139: ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);
140: VecScatterCreate(x,is1,red->xdup,is2,&red->scatterin);
141: ISDestroy(&is1);
142: ISDestroy(&is2);
144: /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
145: ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*M,1,&is1);
146: ISCreateStride(comm,mlocal,mstart,1,&is2);
147: VecScatterCreate(red->xdup,is1,x,is2,&red->scatterout);
148: ISDestroy(&is1);
149: ISDestroy(&is2);
150: PetscFree2(idx1,idx2);
151: VecDestroy(&x);
152: }
153: } else { /* !red->useparallelmat */
154: KSPSetOperators(red->ksp,pc->mat,pc->pmat);
155: }
156: } else { /* pc->setupcalled */
157: if (red->useparallelmat) {
158: MatReuse reuse;
159: /* grab the parallel matrix and put it into processors of a subcomminicator */
160: /*--------------------------------------------------------------------------*/
161: if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
162: /* destroy old matrices */
163: MatDestroy(&red->pmats);
164: reuse = MAT_INITIAL_MATRIX;
165: } else {
166: reuse = MAT_REUSE_MATRIX;
167: }
168: MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,PetscSubcommChild(red->psubcomm),reuse,&red->pmats);
169: KSPSetOperators(red->ksp,red->pmats,red->pmats);
170: } else { /* !red->useparallelmat */
171: KSPSetOperators(red->ksp,pc->mat,pc->pmat);
172: }
173: }
175: if (pc->setfromoptionscalled) {
176: KSPSetFromOptions(red->ksp);
177: }
178: KSPSetUp(red->ksp);
179: return(0);
180: }
182: static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
183: {
184: PC_Redundant *red = (PC_Redundant*)pc->data;
186: PetscScalar *array;
189: if (!red->useparallelmat) {
190: KSPSolve(red->ksp,x,y);
191: return(0);
192: }
194: /* scatter x to xdup */
195: VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);
196: VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);
198: /* place xdup's local array into xsub */
199: VecGetArray(red->xdup,&array);
200: VecPlaceArray(red->xsub,(const PetscScalar*)array);
202: /* apply preconditioner on each processor */
203: KSPSolve(red->ksp,red->xsub,red->ysub);
204: VecResetArray(red->xsub);
205: VecRestoreArray(red->xdup,&array);
207: /* place ysub's local array into ydup */
208: VecGetArray(red->ysub,&array);
209: VecPlaceArray(red->ydup,(const PetscScalar*)array);
211: /* scatter ydup to y */
212: VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);
213: VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);
214: VecResetArray(red->ydup);
215: VecRestoreArray(red->ysub,&array);
216: return(0);
217: }
219: static PetscErrorCode PCApplyTranspose_Redundant(PC pc,Vec x,Vec y)
220: {
221: PC_Redundant *red = (PC_Redundant*)pc->data;
223: PetscScalar *array;
226: if (!red->useparallelmat) {
227: KSPSolveTranspose(red->ksp,x,y);
228: return(0);
229: }
231: /* scatter x to xdup */
232: VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);
233: VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);
235: /* place xdup's local array into xsub */
236: VecGetArray(red->xdup,&array);
237: VecPlaceArray(red->xsub,(const PetscScalar*)array);
239: /* apply preconditioner on each processor */
240: KSPSolveTranspose(red->ksp,red->xsub,red->ysub);
241: VecResetArray(red->xsub);
242: VecRestoreArray(red->xdup,&array);
244: /* place ysub's local array into ydup */
245: VecGetArray(red->ysub,&array);
246: VecPlaceArray(red->ydup,(const PetscScalar*)array);
248: /* scatter ydup to y */
249: VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);
250: VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);
251: VecResetArray(red->ydup);
252: VecRestoreArray(red->ysub,&array);
253: return(0);
254: }
256: static PetscErrorCode PCReset_Redundant(PC pc)
257: {
258: PC_Redundant *red = (PC_Redundant*)pc->data;
262: if (red->useparallelmat) {
263: VecScatterDestroy(&red->scatterin);
264: VecScatterDestroy(&red->scatterout);
265: VecDestroy(&red->ysub);
266: VecDestroy(&red->xsub);
267: VecDestroy(&red->xdup);
268: VecDestroy(&red->ydup);
269: }
270: MatDestroy(&red->pmats);
271: KSPReset(red->ksp);
272: return(0);
273: }
275: static PetscErrorCode PCDestroy_Redundant(PC pc)
276: {
277: PC_Redundant *red = (PC_Redundant*)pc->data;
281: PCReset_Redundant(pc);
282: KSPDestroy(&red->ksp);
283: PetscSubcommDestroy(&red->psubcomm);
284: PetscFree(pc->data);
285: return(0);
286: }
288: static PetscErrorCode PCSetFromOptions_Redundant(PetscOptionItems *PetscOptionsObject,PC pc)
289: {
291: PC_Redundant *red = (PC_Redundant*)pc->data;
294: PetscOptionsHead(PetscOptionsObject,"Redundant options");
295: PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);
296: PetscOptionsTail();
297: return(0);
298: }
300: static PetscErrorCode PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
301: {
302: PC_Redundant *red = (PC_Redundant*)pc->data;
305: red->nsubcomm = nreds;
306: return(0);
307: }
309: /*@
310: PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
312: Logically Collective on PC
314: Input Parameters:
315: + pc - the preconditioner context
316: - nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
317: use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
319: Level: advanced
321: .keywords: PC, redundant solve
322: @*/
323: PetscErrorCode PCRedundantSetNumber(PC pc,PetscInt nredundant)
324: {
329: if (nredundant <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
330: PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));
331: return(0);
332: }
334: static PetscErrorCode PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
335: {
336: PC_Redundant *red = (PC_Redundant*)pc->data;
340: PetscObjectReference((PetscObject)in);
341: VecScatterDestroy(&red->scatterin);
343: red->scatterin = in;
345: PetscObjectReference((PetscObject)out);
346: VecScatterDestroy(&red->scatterout);
347: red->scatterout = out;
348: return(0);
349: }
351: /*@
352: PCRedundantSetScatter - Sets the scatter used to copy values into the
353: redundant local solve and the scatter to move them back into the global
354: vector.
356: Logically Collective on PC
358: Input Parameters:
359: + pc - the preconditioner context
360: . in - the scatter to move the values in
361: - out - the scatter to move them out
363: Level: advanced
365: .keywords: PC, redundant solve
366: @*/
367: PetscErrorCode PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
368: {
375: PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));
376: return(0);
377: }
379: static PetscErrorCode PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp)
380: {
382: PC_Redundant *red = (PC_Redundant*)pc->data;
383: MPI_Comm comm,subcomm;
384: const char *prefix;
387: if (!red->psubcomm) {
388: PCGetOptionsPrefix(pc,&prefix);
390: PetscObjectGetComm((PetscObject)pc,&comm);
391: PetscSubcommCreate(comm,&red->psubcomm);
392: PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);
393: PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);
395: PetscSubcommSetOptionsPrefix(red->psubcomm,prefix);
396: PetscSubcommSetFromOptions(red->psubcomm);
397: PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));
399: /* create a new PC that processors in each subcomm have copy of */
400: subcomm = PetscSubcommChild(red->psubcomm);
402: KSPCreate(subcomm,&red->ksp);
403: KSPSetErrorIfNotConverged(red->ksp,pc->erroriffailure);
404: PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);
405: PetscLogObjectParent((PetscObject)pc,(PetscObject)red->ksp);
406: KSPSetType(red->ksp,KSPPREONLY);
407: KSPGetPC(red->ksp,&red->pc);
408: PCSetType(red->pc,PCLU);
409: if (red->shifttypeset) {
410: PCFactorSetShiftType(red->pc,red->shifttype);
411: red->shifttypeset = PETSC_FALSE;
412: }
413: KSPSetOptionsPrefix(red->ksp,prefix);
414: KSPAppendOptionsPrefix(red->ksp,"redundant_");
415: }
416: *innerksp = red->ksp;
417: return(0);
418: }
420: /*@
421: PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC.
423: Not Collective
425: Input Parameter:
426: . pc - the preconditioner context
428: Output Parameter:
429: . innerksp - the KSP on the smaller set of processes
431: Level: advanced
433: .keywords: PC, redundant solve
434: @*/
435: PetscErrorCode PCRedundantGetKSP(PC pc,KSP *innerksp)
436: {
442: PetscUseMethod(pc,"PCRedundantGetKSP_C",(PC,KSP*),(pc,innerksp));
443: return(0);
444: }
446: static PetscErrorCode PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
447: {
448: PC_Redundant *red = (PC_Redundant*)pc->data;
451: if (mat) *mat = red->pmats;
452: if (pmat) *pmat = red->pmats;
453: return(0);
454: }
456: /*@
457: PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
459: Not Collective
461: Input Parameter:
462: . pc - the preconditioner context
464: Output Parameters:
465: + mat - the matrix
466: - pmat - the (possibly different) preconditioner matrix
468: Level: advanced
470: .keywords: PC, redundant solve
471: @*/
472: PetscErrorCode PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
473: {
480: PetscUseMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));
481: return(0);
482: }
484: /* -------------------------------------------------------------------------------------*/
485: /*MC
486: PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors
488: Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx
490: Options Database:
491: . -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
492: use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
494: Level: intermediate
496: Notes: The default KSP is preonly and the default PC is LU.
498: PCFactorSetShiftType() applied to this PC will convey they shift type into the inner PC if it is factorization based.
500: Developer Notes: Note that PCSetInitialGuessNonzero() is not used by this class but likely should be.
502: .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
503: PCRedundantGetKSP(), PCRedundantGetOperators(), PCRedundantSetNumber()
504: M*/
506: PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc)
507: {
509: PC_Redundant *red;
510: PetscMPIInt size;
513: PetscNewLog(pc,&red);
514: MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
516: red->nsubcomm = size;
517: red->useparallelmat = PETSC_TRUE;
518: pc->data = (void*)red;
520: pc->ops->apply = PCApply_Redundant;
521: pc->ops->applytranspose = PCApplyTranspose_Redundant;
522: pc->ops->setup = PCSetUp_Redundant;
523: pc->ops->destroy = PCDestroy_Redundant;
524: pc->ops->reset = PCReset_Redundant;
525: pc->ops->setfromoptions = PCSetFromOptions_Redundant;
526: pc->ops->view = PCView_Redundant;
528: PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetScatter_C",PCRedundantSetScatter_Redundant);
529: PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetNumber_C",PCRedundantSetNumber_Redundant);
530: PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetKSP_C",PCRedundantGetKSP_Redundant);
531: PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetOperators_C",PCRedundantGetOperators_Redundant);
532: PetscObjectComposeFunction((PetscObject)pc,"PCFactorSetShiftType_C",PCFactorSetShiftType_Redundant);
533: return(0);
534: }