Actual source code: precon.c
petsc-3.14.6 2021-03-30
2: /*
3: The PC (preconditioner) interface routines, callable by users.
4: */
5: #include <petsc/private/pcimpl.h>
6: #include <petscdm.h>
8: /* Logging support */
9: PetscClassId PC_CLASSID;
10: PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_MatApply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft;
11: PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks;
12: PetscInt PetscMGLevelId;
14: PetscErrorCode PCGetDefaultType_Private(PC pc,const char *type[])
15: {
17: PetscMPIInt size;
18: PetscBool hasop,flg1,flg2,set,flg3;
21: MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
22: if (pc->pmat) {
23: MatHasOperation(pc->pmat,MATOP_GET_DIAGONAL_BLOCK,&hasop);
24: if (size == 1) {
25: MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ICC,&flg1);
26: MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ILU,&flg2);
27: MatIsSymmetricKnown(pc->pmat,&set,&flg3);
28: if (flg1 && (!flg2 || (set && flg3))) {
29: *type = PCICC;
30: } else if (flg2) {
31: *type = PCILU;
32: } else if (hasop) { /* likely is a parallel matrix run on one processor */
33: *type = PCBJACOBI;
34: } else {
35: *type = PCNONE;
36: }
37: } else {
38: if (hasop) {
39: *type = PCBJACOBI;
40: } else {
41: *type = PCNONE;
42: }
43: }
44: } else {
45: if (size == 1) {
46: *type = PCILU;
47: } else {
48: *type = PCBJACOBI;
49: }
50: }
51: return(0);
52: }
54: /*@
55: PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated Vecs and Mats
57: Collective on PC
59: Input Parameter:
60: . pc - the preconditioner context
62: Level: developer
64: Notes:
65: This allows a PC to be reused for a different sized linear system but using the same options that have been previously set in the PC
67: .seealso: PCCreate(), PCSetUp()
68: @*/
69: PetscErrorCode PCReset(PC pc)
70: {
75: if (pc->ops->reset) {
76: (*pc->ops->reset)(pc);
77: }
78: VecDestroy(&pc->diagonalscaleright);
79: VecDestroy(&pc->diagonalscaleleft);
80: MatDestroy(&pc->pmat);
81: MatDestroy(&pc->mat);
83: pc->setupcalled = 0;
84: return(0);
85: }
87: /*@
88: PCDestroy - Destroys PC context that was created with PCCreate().
90: Collective on PC
92: Input Parameter:
93: . pc - the preconditioner context
95: Level: developer
97: .seealso: PCCreate(), PCSetUp()
98: @*/
99: PetscErrorCode PCDestroy(PC *pc)
100: {
104: if (!*pc) return(0);
106: if (--((PetscObject)(*pc))->refct > 0) {*pc = NULL; return(0);}
108: PCReset(*pc);
110: /* if memory was published with SAWs then destroy it */
111: PetscObjectSAWsViewOff((PetscObject)*pc);
112: if ((*pc)->ops->destroy) {(*(*pc)->ops->destroy)((*pc));}
113: DMDestroy(&(*pc)->dm);
114: PetscHeaderDestroy(pc);
115: return(0);
116: }
118: /*@C
119: PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right
120: scaling as needed by certain time-stepping codes.
122: Logically Collective on PC
124: Input Parameter:
125: . pc - the preconditioner context
127: Output Parameter:
128: . flag - PETSC_TRUE if it applies the scaling
130: Level: developer
132: Notes:
133: If this returns PETSC_TRUE then the system solved via the Krylov method is
134: $ D M A D^{-1} y = D M b for left preconditioning or
135: $ D A M D^{-1} z = D b for right preconditioning
137: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCSetDiagonalScale()
138: @*/
139: PetscErrorCode PCGetDiagonalScale(PC pc,PetscBool *flag)
140: {
144: *flag = pc->diagonalscale;
145: return(0);
146: }
148: /*@
149: PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right
150: scaling as needed by certain time-stepping codes.
152: Logically Collective on PC
154: Input Parameters:
155: + pc - the preconditioner context
156: - s - scaling vector
158: Level: intermediate
160: Notes:
161: The system solved via the Krylov method is
162: $ D M A D^{-1} y = D M b for left preconditioning or
163: $ D A M D^{-1} z = D b for right preconditioning
165: PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
167: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCGetDiagonalScale()
168: @*/
169: PetscErrorCode PCSetDiagonalScale(PC pc,Vec s)
170: {
176: pc->diagonalscale = PETSC_TRUE;
178: PetscObjectReference((PetscObject)s);
179: VecDestroy(&pc->diagonalscaleleft);
181: pc->diagonalscaleleft = s;
183: VecDuplicate(s,&pc->diagonalscaleright);
184: VecCopy(s,pc->diagonalscaleright);
185: VecReciprocal(pc->diagonalscaleright);
186: return(0);
187: }
189: /*@
190: PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes.
192: Logically Collective on PC
194: Input Parameters:
195: + pc - the preconditioner context
196: . in - input vector
197: - out - scaled vector (maybe the same as in)
199: Level: intermediate
201: Notes:
202: The system solved via the Krylov method is
203: $ D M A D^{-1} y = D M b for left preconditioning or
204: $ D A M D^{-1} z = D b for right preconditioning
206: PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
208: If diagonal scaling is turned off and in is not out then in is copied to out
210: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleSet(), PCDiagonalScaleRight(), PCDiagonalScale()
211: @*/
212: PetscErrorCode PCDiagonalScaleLeft(PC pc,Vec in,Vec out)
213: {
220: if (pc->diagonalscale) {
221: VecPointwiseMult(out,pc->diagonalscaleleft,in);
222: } else if (in != out) {
223: VecCopy(in,out);
224: }
225: return(0);
226: }
228: /*@
229: PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes.
231: Logically Collective on PC
233: Input Parameters:
234: + pc - the preconditioner context
235: . in - input vector
236: - out - scaled vector (maybe the same as in)
238: Level: intermediate
240: Notes:
241: The system solved via the Krylov method is
242: $ D M A D^{-1} y = D M b for left preconditioning or
243: $ D A M D^{-1} z = D b for right preconditioning
245: PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
247: If diagonal scaling is turned off and in is not out then in is copied to out
249: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleSet(), PCDiagonalScale()
250: @*/
251: PetscErrorCode PCDiagonalScaleRight(PC pc,Vec in,Vec out)
252: {
259: if (pc->diagonalscale) {
260: VecPointwiseMult(out,pc->diagonalscaleright,in);
261: } else if (in != out) {
262: VecCopy(in,out);
263: }
264: return(0);
265: }
267: /*@
268: PCSetUseAmat - Sets a flag to indicate that when the preconditioner needs to apply (part of) the
269: operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(),
270: TSSetIJacobian(), SNESSetJacobian(), KSPSetOperator() or PCSetOperator() not the Pmat.
272: Logically Collective on PC
274: Input Parameters:
275: + pc - the preconditioner context
276: - flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false)
278: Options Database Key:
279: . -pc_use_amat <true,false>
281: Notes:
282: For the common case in which the linear system matrix and the matrix used to construct the
283: preconditioner are identical, this routine is does nothing.
285: Level: intermediate
287: .seealso: PCGetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE
288: @*/
289: PetscErrorCode PCSetUseAmat(PC pc,PetscBool flg)
290: {
293: pc->useAmat = flg;
294: return(0);
295: }
297: /*@
298: PCSetErrorIfFailure - Causes PC to generate an error if a FPE, for example a zero pivot, is detected.
300: Logically Collective on PC
302: Input Parameters:
303: + pc - iterative context obtained from PCCreate()
304: - flg - PETSC_TRUE indicates you want the error generated
306: Level: advanced
308: Notes:
309: Normally PETSc continues if a linear solver fails due to a failed setup of a preconditioner, you can call KSPGetConvergedReason() after a KSPSolve()
310: to determine if it has converged or failed. Or use -ksp_error_if_not_converged to cause the program to terminate as soon as lack of convergence is
311: detected.
313: This is propagated into KSPs used by this PC, which then propagate it into PCs used by those KSPs
315: .seealso: PCGetInitialGuessNonzero(), PCSetInitialGuessKnoll(), PCGetInitialGuessKnoll()
316: @*/
317: PetscErrorCode PCSetErrorIfFailure(PC pc,PetscBool flg)
318: {
322: pc->erroriffailure = flg;
323: return(0);
324: }
326: /*@
327: PCGetUseAmat - Gets a flag to indicate that when the preconditioner needs to apply (part of) the
328: operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(),
329: TSSetIJacobian(), SNESSetJacobian(), KSPSetOperator() or PCSetOperator() not the Pmat.
331: Logically Collective on PC
333: Input Parameter:
334: . pc - the preconditioner context
336: Output Parameter:
337: . flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false)
339: Notes:
340: For the common case in which the linear system matrix and the matrix used to construct the
341: preconditioner are identical, this routine is does nothing.
343: Level: intermediate
345: .seealso: PCSetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE
346: @*/
347: PetscErrorCode PCGetUseAmat(PC pc,PetscBool *flg)
348: {
351: *flg = pc->useAmat;
352: return(0);
353: }
355: /*@
356: PCCreate - Creates a preconditioner context.
358: Collective
360: Input Parameter:
361: . comm - MPI communicator
363: Output Parameter:
364: . pc - location to put the preconditioner context
366: Notes:
367: The default preconditioner for sparse matrices is PCILU or PCICC with 0 fill on one process and block Jacobi with PCILU or PCICC
368: in parallel. For dense matrices it is always PCNONE.
370: Level: developer
372: .seealso: PCSetUp(), PCApply(), PCDestroy()
373: @*/
374: PetscErrorCode PCCreate(MPI_Comm comm,PC *newpc)
375: {
376: PC pc;
381: *newpc = NULL;
382: PCInitializePackage();
384: PetscHeaderCreate(pc,PC_CLASSID,"PC","Preconditioner","PC",comm,PCDestroy,PCView);
386: pc->mat = NULL;
387: pc->pmat = NULL;
388: pc->setupcalled = 0;
389: pc->setfromoptionscalled = 0;
390: pc->data = NULL;
391: pc->diagonalscale = PETSC_FALSE;
392: pc->diagonalscaleleft = NULL;
393: pc->diagonalscaleright = NULL;
395: pc->modifysubmatrices = NULL;
396: pc->modifysubmatricesP = NULL;
398: *newpc = pc;
399: return(0);
401: }
403: /* -------------------------------------------------------------------------------*/
405: /*@
406: PCApply - Applies the preconditioner to a vector.
408: Collective on PC
410: Input Parameters:
411: + pc - the preconditioner context
412: - x - input vector
414: Output Parameter:
415: . y - output vector
417: Level: developer
419: .seealso: PCApplyTranspose(), PCApplyBAorAB()
420: @*/
421: PetscErrorCode PCApply(PC pc,Vec x,Vec y)
422: {
424: PetscInt m,n,mv,nv;
430: if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
431: if (pc->erroriffailure) {VecValidValues(x,2,PETSC_TRUE);}
432: /* use pmat to check vector sizes since for KSPLQR the pmat may be of a different size than mat */
433: MatGetLocalSize(pc->pmat,&m,&n);
434: VecGetLocalSize(x,&nv);
435: VecGetLocalSize(y,&mv);
436: if (mv != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local rows %D does not equal resulting vector number of rows %D",m,mv);
437: if (nv != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local columns %D does not equal resulting vector number of rows %D",n,nv);
438: VecSetErrorIfLocked(y,3);
440: PCSetUp(pc);
441: if (!pc->ops->apply) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply");
442: VecLockReadPush(x);
443: PetscLogEventBegin(PC_Apply,pc,x,y,0);
444: (*pc->ops->apply)(pc,x,y);
445: PetscLogEventEnd(PC_Apply,pc,x,y,0);
446: if (pc->erroriffailure) {VecValidValues(y,3,PETSC_FALSE);}
447: VecLockReadPop(x);
448: return(0);
449: }
451: /*@
452: PCMatApply - Applies the preconditioner to multiple vectors stored as a MATDENSE. Like PCApply(), Y and X must be different matrices.
454: Collective on PC
456: Input Parameters:
457: + pc - the preconditioner context
458: - X - block of input vectors
460: Output Parameter:
461: . Y - block of output vectors
463: Level: developer
465: .seealso: PCApply(), KSPMatSolve()
466: @*/
467: PetscErrorCode PCMatApply(PC pc,Mat X,Mat Y)
468: {
469: Mat A;
470: Vec cy, cx;
471: PetscInt m1, M1, m2, M2, n1, N1, n2, N2;
472: PetscBool match;
481: if (Y == X) SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "Y and X must be different matrices");
482: PCGetOperators(pc, NULL, &A);
483: MatGetLocalSize(A, &m1, NULL);
484: MatGetLocalSize(Y, &m2, &n2);
485: MatGetSize(A, &M1, NULL);
486: MatGetSize(X, &M2, &N2);
487: if (m1 != m2 || M1 != M2) SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot use a block of input vectors with (m2,M2) = (%D,%D) for a preconditioner with (m1,M1) = (%D,%D)", m2, M2, m1, M1);
488: MatGetLocalSize(Y, &m1, &n1);
489: MatGetSize(Y, &M1, &N1);
490: if (m1 != m2 || M1 != M2 || n1 != n2 || N1 != N2) SETERRQ8(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible block of input vectors (m2,M2)x(n2,N2) = (%D,%D)x(%D,%D) and output vectors (m1,M1)x(n1,N1) = (%D,%D)x(%D,%D)", m2, M2, n2, N2, m1, M1, n1, N1);
491: PetscObjectBaseTypeCompareAny((PetscObject)Y, &match, MATSEQDENSE, MATMPIDENSE, "");
492: if (!match) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of output vectors not stored in a dense Mat");
493: PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, "");
494: if (!match) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of input vectors not stored in a dense Mat");
495: PCSetUp(pc);
496: if (pc->ops->matapply) {
497: PetscLogEventBegin(PC_MatApply, pc, X, Y, 0);
498: (*pc->ops->matapply)(pc, X, Y);
499: PetscLogEventEnd(PC_MatApply, pc, X, Y, 0);
500: } else {
501: PetscInfo1(pc, "PC type %s applying column by column\n", ((PetscObject)pc)->type_name);
502: for (n2 = 0; n2 < N2; ++n2) {
503: MatDenseGetColumnVecRead(X, n2, &cx);
504: MatDenseGetColumnVecWrite(Y, n2, &cy);
505: PCApply(pc, cx, cy);
506: MatDenseRestoreColumnVecWrite(Y, n2, &cy);
507: MatDenseRestoreColumnVecRead(X, n2, &cx);
508: }
509: }
510: return(0);
511: }
513: /*@
514: PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector.
516: Collective on PC
518: Input Parameters:
519: + pc - the preconditioner context
520: - x - input vector
522: Output Parameter:
523: . y - output vector
525: Notes:
526: Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.
528: Level: developer
530: .seealso: PCApply(), PCApplySymmetricRight()
531: @*/
532: PetscErrorCode PCApplySymmetricLeft(PC pc,Vec x,Vec y)
533: {
540: if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
541: if (pc->erroriffailure) {VecValidValues(x,2,PETSC_TRUE);}
542: PCSetUp(pc);
543: if (!pc->ops->applysymmetricleft) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply");
544: VecLockReadPush(x);
545: PetscLogEventBegin(PC_ApplySymmetricLeft,pc,x,y,0);
546: (*pc->ops->applysymmetricleft)(pc,x,y);
547: PetscLogEventEnd(PC_ApplySymmetricLeft,pc,x,y,0);
548: VecLockReadPop(x);
549: if (pc->erroriffailure) {VecValidValues(y,3,PETSC_FALSE);}
550: return(0);
551: }
553: /*@
554: PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector.
556: Collective on PC
558: Input Parameters:
559: + pc - the preconditioner context
560: - x - input vector
562: Output Parameter:
563: . y - output vector
565: Level: developer
567: Notes:
568: Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.
570: .seealso: PCApply(), PCApplySymmetricLeft()
571: @*/
572: PetscErrorCode PCApplySymmetricRight(PC pc,Vec x,Vec y)
573: {
580: if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
581: if (pc->erroriffailure) {VecValidValues(x,2,PETSC_TRUE);}
582: PCSetUp(pc);
583: if (!pc->ops->applysymmetricright) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply");
584: VecLockReadPush(x);
585: PetscLogEventBegin(PC_ApplySymmetricRight,pc,x,y,0);
586: (*pc->ops->applysymmetricright)(pc,x,y);
587: PetscLogEventEnd(PC_ApplySymmetricRight,pc,x,y,0);
588: VecLockReadPop(x);
589: if (pc->erroriffailure) {VecValidValues(y,3,PETSC_FALSE);}
590: return(0);
591: }
593: /*@
594: PCApplyTranspose - Applies the transpose of preconditioner to a vector.
596: Collective on PC
598: Input Parameters:
599: + pc - the preconditioner context
600: - x - input vector
602: Output Parameter:
603: . y - output vector
605: Notes:
606: For complex numbers this applies the non-Hermitian transpose.
608: Developer Notes:
609: We need to implement a PCApplyHermitianTranspose()
611: Level: developer
613: .seealso: PCApply(), PCApplyBAorAB(), PCApplyBAorABTranspose(), PCApplyTransposeExists()
614: @*/
615: PetscErrorCode PCApplyTranspose(PC pc,Vec x,Vec y)
616: {
623: if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
624: if (pc->erroriffailure) {VecValidValues(x,2,PETSC_TRUE);}
625: PCSetUp(pc);
626: if (!pc->ops->applytranspose) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply transpose");
627: VecLockReadPush(x);
628: PetscLogEventBegin(PC_Apply,pc,x,y,0);
629: (*pc->ops->applytranspose)(pc,x,y);
630: PetscLogEventEnd(PC_Apply,pc,x,y,0);
631: VecLockReadPop(x);
632: if (pc->erroriffailure) {VecValidValues(y,3,PETSC_FALSE);}
633: return(0);
634: }
636: /*@
637: PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation
639: Collective on PC
641: Input Parameters:
642: . pc - the preconditioner context
644: Output Parameter:
645: . flg - PETSC_TRUE if a transpose operation is defined
647: Level: developer
649: .seealso: PCApplyTranspose()
650: @*/
651: PetscErrorCode PCApplyTransposeExists(PC pc,PetscBool *flg)
652: {
656: if (pc->ops->applytranspose) *flg = PETSC_TRUE;
657: else *flg = PETSC_FALSE;
658: return(0);
659: }
661: /*@
662: PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x.
664: Collective on PC
666: Input Parameters:
667: + pc - the preconditioner context
668: . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
669: . x - input vector
670: - work - work vector
672: Output Parameter:
673: . y - output vector
675: Level: developer
677: Notes:
678: If the PC has had PCSetDiagonalScale() set then D M A D^{-1} for left preconditioning or D A M D^{-1} is actually applied. Note that the
679: specific KSPSolve() method must also be written to handle the post-solve "correction" for the diagonal scaling.
681: .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorABTranspose()
682: @*/
683: PetscErrorCode PCApplyBAorAB(PC pc,PCSide side,Vec x,Vec y,Vec work)
684: {
696: if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
697: if (side != PC_LEFT && side != PC_SYMMETRIC && side != PC_RIGHT) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right, left, or symmetric");
698: if (pc->diagonalscale && side == PC_SYMMETRIC) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot include diagonal scaling with symmetric preconditioner application");
699: if (pc->erroriffailure) {VecValidValues(x,3,PETSC_TRUE);}
701: PCSetUp(pc);
702: if (pc->diagonalscale) {
703: if (pc->ops->applyBA) {
704: Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */
705: VecDuplicate(x,&work2);
706: PCDiagonalScaleRight(pc,x,work2);
707: (*pc->ops->applyBA)(pc,side,work2,y,work);
708: PCDiagonalScaleLeft(pc,y,y);
709: VecDestroy(&work2);
710: } else if (side == PC_RIGHT) {
711: PCDiagonalScaleRight(pc,x,y);
712: PCApply(pc,y,work);
713: MatMult(pc->mat,work,y);
714: PCDiagonalScaleLeft(pc,y,y);
715: } else if (side == PC_LEFT) {
716: PCDiagonalScaleRight(pc,x,y);
717: MatMult(pc->mat,y,work);
718: PCApply(pc,work,y);
719: PCDiagonalScaleLeft(pc,y,y);
720: } else if (side == PC_SYMMETRIC) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot provide diagonal scaling with symmetric application of preconditioner");
721: } else {
722: if (pc->ops->applyBA) {
723: (*pc->ops->applyBA)(pc,side,x,y,work);
724: } else if (side == PC_RIGHT) {
725: PCApply(pc,x,work);
726: MatMult(pc->mat,work,y);
727: } else if (side == PC_LEFT) {
728: MatMult(pc->mat,x,work);
729: PCApply(pc,work,y);
730: } else if (side == PC_SYMMETRIC) {
731: /* There's an extra copy here; maybe should provide 2 work vectors instead? */
732: PCApplySymmetricRight(pc,x,work);
733: MatMult(pc->mat,work,y);
734: VecCopy(y,work);
735: PCApplySymmetricLeft(pc,work,y);
736: }
737: }
738: if (pc->erroriffailure) {VecValidValues(y,4,PETSC_FALSE);}
739: return(0);
740: }
742: /*@
743: PCApplyBAorABTranspose - Applies the transpose of the preconditioner
744: and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning,
745: NOT tr(B*A) = tr(A)*tr(B).
747: Collective on PC
749: Input Parameters:
750: + pc - the preconditioner context
751: . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
752: . x - input vector
753: - work - work vector
755: Output Parameter:
756: . y - output vector
759: Notes:
760: this routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner
761: defined by B'. This is why this has the funny form that it computes tr(B) * tr(A)
763: Level: developer
765: .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorAB()
766: @*/
767: PetscErrorCode PCApplyBAorABTranspose(PC pc,PCSide side,Vec x,Vec y,Vec work)
768: {
776: if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
777: if (pc->erroriffailure) {VecValidValues(x,3,PETSC_TRUE);}
778: if (pc->ops->applyBAtranspose) {
779: (*pc->ops->applyBAtranspose)(pc,side,x,y,work);
780: if (pc->erroriffailure) {VecValidValues(y,4,PETSC_FALSE);}
781: return(0);
782: }
783: if (side != PC_LEFT && side != PC_RIGHT) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right or left");
785: PCSetUp(pc);
786: if (side == PC_RIGHT) {
787: PCApplyTranspose(pc,x,work);
788: MatMultTranspose(pc->mat,work,y);
789: } else if (side == PC_LEFT) {
790: MatMultTranspose(pc->mat,x,work);
791: PCApplyTranspose(pc,work,y);
792: }
793: /* add support for PC_SYMMETRIC */
794: if (pc->erroriffailure) {VecValidValues(y,4,PETSC_FALSE);}
795: return(0);
796: }
798: /* -------------------------------------------------------------------------------*/
800: /*@
801: PCApplyRichardsonExists - Determines whether a particular preconditioner has a
802: built-in fast application of Richardson's method.
804: Not Collective
806: Input Parameter:
807: . pc - the preconditioner
809: Output Parameter:
810: . exists - PETSC_TRUE or PETSC_FALSE
812: Level: developer
814: .seealso: PCApplyRichardson()
815: @*/
816: PetscErrorCode PCApplyRichardsonExists(PC pc,PetscBool *exists)
817: {
821: if (pc->ops->applyrichardson) *exists = PETSC_TRUE;
822: else *exists = PETSC_FALSE;
823: return(0);
824: }
826: /*@
827: PCApplyRichardson - Applies several steps of Richardson iteration with
828: the particular preconditioner. This routine is usually used by the
829: Krylov solvers and not the application code directly.
831: Collective on PC
833: Input Parameters:
834: + pc - the preconditioner context
835: . b - the right hand side
836: . w - one work vector
837: . rtol - relative decrease in residual norm convergence criteria
838: . abstol - absolute residual norm convergence criteria
839: . dtol - divergence residual norm increase criteria
840: . its - the number of iterations to apply.
841: - guesszero - if the input x contains nonzero initial guess
843: Output Parameter:
844: + outits - number of iterations actually used (for SOR this always equals its)
845: . reason - the reason the apply terminated
846: - y - the solution (also contains initial guess if guesszero is PETSC_FALSE
848: Notes:
849: Most preconditioners do not support this function. Use the command
850: PCApplyRichardsonExists() to determine if one does.
852: Except for the multigrid PC this routine ignores the convergence tolerances
853: and always runs for the number of iterations
855: Level: developer
857: .seealso: PCApplyRichardsonExists()
858: @*/
859: PetscErrorCode PCApplyRichardson(PC pc,Vec b,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool guesszero,PetscInt *outits,PCRichardsonConvergedReason *reason)
860: {
868: if (b == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"b and y must be different vectors");
869: PCSetUp(pc);
870: if (!pc->ops->applyrichardson) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply richardson");
871: (*pc->ops->applyrichardson)(pc,b,y,w,rtol,abstol,dtol,its,guesszero,outits,reason);
872: return(0);
873: }
875: /*@
876: PCSetFailedReason - Sets the reason a PCSetUp() failed or PC_NOERROR if it did not fail
878: Logically Collective on PC
880: Input Parameter:
881: + pc - the preconditioner context
882: - reason - the reason it failedx
884: Level: advanced
886: .seealso: PCCreate(), PCApply(), PCDestroy(), PCFailedReason
887: @*/
888: PetscErrorCode PCSetFailedReason(PC pc,PCFailedReason reason)
889: {
891: pc->failedreason = reason;
892: return(0);
893: }
895: /*@
896: PCGetFailedReason - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail
898: Logically Collective on PC
900: Input Parameter:
901: . pc - the preconditioner context
903: Output Parameter:
904: . reason - the reason it failed
906: Level: advanced
908: Notes: This is the maximum over reason over all ranks in the PC communicator. It is only valid after
909: a call KSPCheckDot() or KSPCheckNorm() inside a KSPSolve(). It is not valid immediately after a PCSetUp()
910: or PCApply(), then use PCGetFailedReasonRank()
912: .seealso: PCCreate(), PCApply(), PCDestroy(), PCGetFailedReasonRank(), PCSetFailedReason()
913: @*/
914: PetscErrorCode PCGetFailedReason(PC pc,PCFailedReason *reason)
915: {
917: if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled;
918: else *reason = pc->failedreason;
919: return(0);
920: }
922: /*@
923: PCGetFailedReasonRank - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail on this MPI rank
925: Not Collective on PC
927: Input Parameter:
928: . pc - the preconditioner context
930: Output Parameter:
931: . reason - the reason it failed
933: Notes:
934: Different ranks may have different reasons or no reason, see PCGetFailedReason()
936: Level: advanced
938: .seealso: PCCreate(), PCApply(), PCDestroy(), PCGetFailedReason(), PCSetFailedReason()
939: @*/
940: PetscErrorCode PCGetFailedReasonRank(PC pc,PCFailedReason *reason)
941: {
943: if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled;
944: else *reason = pc->failedreason;
945: return(0);
946: }
948: /*
949: a setupcall of 0 indicates never setup,
950: 1 indicates has been previously setup
951: -1 indicates a PCSetUp() was attempted and failed
952: */
953: /*@
954: PCSetUp - Prepares for the use of a preconditioner.
956: Collective on PC
958: Input Parameter:
959: . pc - the preconditioner context
961: Level: developer
963: .seealso: PCCreate(), PCApply(), PCDestroy()
964: @*/
965: PetscErrorCode PCSetUp(PC pc)
966: {
967: PetscErrorCode ierr;
968: const char *def;
969: PetscObjectState matstate, matnonzerostate;
973: if (!pc->mat) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Matrix must be set first");
975: if (pc->setupcalled && pc->reusepreconditioner) {
976: PetscInfo(pc,"Leaving PC with identical preconditioner since reuse preconditioner is set\n");
977: return(0);
978: }
980: PetscObjectStateGet((PetscObject)pc->pmat,&matstate);
981: MatGetNonzeroState(pc->pmat,&matnonzerostate);
982: if (!pc->setupcalled) {
983: PetscInfo(pc,"Setting up PC for first time\n");
984: pc->flag = DIFFERENT_NONZERO_PATTERN;
985: } else if (matstate == pc->matstate) {
986: PetscInfo(pc,"Leaving PC with identical preconditioner since operator is unchanged\n");
987: return(0);
988: } else {
989: if (matnonzerostate > pc->matnonzerostate) {
990: PetscInfo(pc,"Setting up PC with different nonzero pattern\n");
991: pc->flag = DIFFERENT_NONZERO_PATTERN;
992: } else {
993: PetscInfo(pc,"Setting up PC with same nonzero pattern\n");
994: pc->flag = SAME_NONZERO_PATTERN;
995: }
996: }
997: pc->matstate = matstate;
998: pc->matnonzerostate = matnonzerostate;
1000: if (!((PetscObject)pc)->type_name) {
1001: PCGetDefaultType_Private(pc,&def);
1002: PCSetType(pc,def);
1003: }
1005: MatSetErrorIfFailure(pc->pmat,pc->erroriffailure);
1006: MatSetErrorIfFailure(pc->mat,pc->erroriffailure);
1007: PetscLogEventBegin(PC_SetUp,pc,0,0,0);
1008: if (pc->ops->setup) {
1009: (*pc->ops->setup)(pc);
1010: }
1011: PetscLogEventEnd(PC_SetUp,pc,0,0,0);
1012: if (!pc->setupcalled) pc->setupcalled = 1;
1013: return(0);
1014: }
1016: /*@
1017: PCSetUpOnBlocks - Sets up the preconditioner for each block in
1018: the block Jacobi, block Gauss-Seidel, and overlapping Schwarz
1019: methods.
1021: Collective on PC
1023: Input Parameters:
1024: . pc - the preconditioner context
1026: Level: developer
1028: .seealso: PCCreate(), PCApply(), PCDestroy(), PCSetUp()
1029: @*/
1030: PetscErrorCode PCSetUpOnBlocks(PC pc)
1031: {
1036: if (!pc->ops->setuponblocks) return(0);
1037: PetscLogEventBegin(PC_SetUpOnBlocks,pc,0,0,0);
1038: (*pc->ops->setuponblocks)(pc);
1039: PetscLogEventEnd(PC_SetUpOnBlocks,pc,0,0,0);
1040: return(0);
1041: }
1043: /*@C
1044: PCSetModifySubMatrices - Sets a user-defined routine for modifying the
1045: submatrices that arise within certain subdomain-based preconditioners.
1046: The basic submatrices are extracted from the preconditioner matrix as
1047: usual; the user can then alter these (for example, to set different boundary
1048: conditions for each submatrix) before they are used for the local solves.
1050: Logically Collective on PC
1052: Input Parameters:
1053: + pc - the preconditioner context
1054: . func - routine for modifying the submatrices
1055: - ctx - optional user-defined context (may be null)
1057: Calling sequence of func:
1058: $ func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx);
1060: + row - an array of index sets that contain the global row numbers
1061: that comprise each local submatrix
1062: . col - an array of index sets that contain the global column numbers
1063: that comprise each local submatrix
1064: . submat - array of local submatrices
1065: - ctx - optional user-defined context for private data for the
1066: user-defined func routine (may be null)
1068: Notes:
1069: PCSetModifySubMatrices() MUST be called before KSPSetUp() and
1070: KSPSolve().
1072: A routine set by PCSetModifySubMatrices() is currently called within
1073: the block Jacobi (PCBJACOBI) and additive Schwarz (PCASM)
1074: preconditioners. All other preconditioners ignore this routine.
1076: Level: advanced
1078: .seealso: PCModifySubMatrices()
1079: @*/
1080: PetscErrorCode PCSetModifySubMatrices(PC pc,PetscErrorCode (*func)(PC,PetscInt,const IS[],const IS[],Mat[],void*),void *ctx)
1081: {
1084: pc->modifysubmatrices = func;
1085: pc->modifysubmatricesP = ctx;
1086: return(0);
1087: }
1089: /*@C
1090: PCModifySubMatrices - Calls an optional user-defined routine within
1091: certain preconditioners if one has been set with PCSetModifySubMatrices().
1093: Collective on PC
1095: Input Parameters:
1096: + pc - the preconditioner context
1097: . nsub - the number of local submatrices
1098: . row - an array of index sets that contain the global row numbers
1099: that comprise each local submatrix
1100: . col - an array of index sets that contain the global column numbers
1101: that comprise each local submatrix
1102: . submat - array of local submatrices
1103: - ctx - optional user-defined context for private data for the
1104: user-defined routine (may be null)
1106: Output Parameter:
1107: . submat - array of local submatrices (the entries of which may
1108: have been modified)
1110: Notes:
1111: The user should NOT generally call this routine, as it will
1112: automatically be called within certain preconditioners (currently
1113: block Jacobi, additive Schwarz) if set.
1115: The basic submatrices are extracted from the preconditioner matrix
1116: as usual; the user can then alter these (for example, to set different
1117: boundary conditions for each submatrix) before they are used for the
1118: local solves.
1120: Level: developer
1122: .seealso: PCSetModifySubMatrices()
1123: @*/
1124: PetscErrorCode PCModifySubMatrices(PC pc,PetscInt nsub,const IS row[],const IS col[],Mat submat[],void *ctx)
1125: {
1130: if (!pc->modifysubmatrices) return(0);
1131: PetscLogEventBegin(PC_ModifySubMatrices,pc,0,0,0);
1132: (*pc->modifysubmatrices)(pc,nsub,row,col,submat,ctx);
1133: PetscLogEventEnd(PC_ModifySubMatrices,pc,0,0,0);
1134: return(0);
1135: }
1137: /*@
1138: PCSetOperators - Sets the matrix associated with the linear system and
1139: a (possibly) different one associated with the preconditioner.
1141: Logically Collective on PC
1143: Input Parameters:
1144: + pc - the preconditioner context
1145: . Amat - the matrix that defines the linear system
1146: - Pmat - the matrix to be used in constructing the preconditioner, usually the same as Amat.
1148: Notes:
1149: Passing a NULL for Amat or Pmat removes the matrix that is currently used.
1151: If you wish to replace either Amat or Pmat but leave the other one untouched then
1152: first call KSPGetOperators() to get the one you wish to keep, call PetscObjectReference()
1153: on it and then pass it back in in your call to KSPSetOperators().
1155: More Notes about Repeated Solution of Linear Systems:
1156: PETSc does NOT reset the matrix entries of either Amat or Pmat
1157: to zero after a linear solve; the user is completely responsible for
1158: matrix assembly. See the routine MatZeroEntries() if desiring to
1159: zero all elements of a matrix.
1161: Level: intermediate
1163: .seealso: PCGetOperators(), MatZeroEntries()
1164: @*/
1165: PetscErrorCode PCSetOperators(PC pc,Mat Amat,Mat Pmat)
1166: {
1167: PetscErrorCode ierr;
1168: PetscInt m1,n1,m2,n2;
1176: if (pc->setupcalled && pc->mat && pc->pmat && Amat && Pmat) {
1177: MatGetLocalSize(Amat,&m1,&n1);
1178: MatGetLocalSize(pc->mat,&m2,&n2);
1179: if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Amat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1);
1180: MatGetLocalSize(Pmat,&m1,&n1);
1181: MatGetLocalSize(pc->pmat,&m2,&n2);
1182: if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Pmat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1);
1183: }
1185: if (Pmat != pc->pmat) {
1186: /* changing the operator that defines the preconditioner thus reneed to clear current states so new preconditioner is built */
1187: pc->matnonzerostate = -1;
1188: pc->matstate = -1;
1189: }
1191: /* reference first in case the matrices are the same */
1192: if (Amat) {PetscObjectReference((PetscObject)Amat);}
1193: MatDestroy(&pc->mat);
1194: if (Pmat) {PetscObjectReference((PetscObject)Pmat);}
1195: MatDestroy(&pc->pmat);
1196: pc->mat = Amat;
1197: pc->pmat = Pmat;
1198: return(0);
1199: }
1201: /*@
1202: PCSetReusePreconditioner - reuse the current preconditioner even if the operator in the preconditioner has changed.
1204: Logically Collective on PC
1206: Input Parameters:
1207: + pc - the preconditioner context
1208: - flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner
1210: Level: intermediate
1212: .seealso: PCGetOperators(), MatZeroEntries(), PCGetReusePreconditioner(), KSPSetReusePreconditioner()
1213: @*/
1214: PetscErrorCode PCSetReusePreconditioner(PC pc,PetscBool flag)
1215: {
1219: pc->reusepreconditioner = flag;
1220: return(0);
1221: }
1223: /*@
1224: PCGetReusePreconditioner - Determines if the PC reuses the current preconditioner even if the operator in the preconditioner has changed.
1226: Not Collective
1228: Input Parameter:
1229: . pc - the preconditioner context
1231: Output Parameter:
1232: . flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner
1234: Level: intermediate
1236: .seealso: PCGetOperators(), MatZeroEntries(), PCSetReusePreconditioner()
1237: @*/
1238: PetscErrorCode PCGetReusePreconditioner(PC pc,PetscBool *flag)
1239: {
1243: *flag = pc->reusepreconditioner;
1244: return(0);
1245: }
1247: /*@
1248: PCGetOperators - Gets the matrix associated with the linear system and
1249: possibly a different one associated with the preconditioner.
1251: Not collective, though parallel Mats are returned if the PC is parallel
1253: Input Parameter:
1254: . pc - the preconditioner context
1256: Output Parameters:
1257: + Amat - the matrix defining the linear system
1258: - Pmat - the matrix from which the preconditioner is constructed, usually the same as Amat.
1260: Level: intermediate
1262: Notes:
1263: Does not increase the reference count of the matrices, so you should not destroy them
1265: Alternative usage: If the operators have NOT been set with KSP/PCSetOperators() then the operators
1266: are created in PC and returned to the user. In this case, if both operators
1267: mat and pmat are requested, two DIFFERENT operators will be returned. If
1268: only one is requested both operators in the PC will be the same (i.e. as
1269: if one had called KSP/PCSetOperators() with the same argument for both Mats).
1270: The user must set the sizes of the returned matrices and their type etc just
1271: as if the user created them with MatCreate(). For example,
1273: $ KSP/PCGetOperators(ksp/pc,&Amat,NULL); is equivalent to
1274: $ set size, type, etc of Amat
1276: $ MatCreate(comm,&mat);
1277: $ KSP/PCSetOperators(ksp/pc,Amat,Amat);
1278: $ PetscObjectDereference((PetscObject)mat);
1279: $ set size, type, etc of Amat
1281: and
1283: $ KSP/PCGetOperators(ksp/pc,&Amat,&Pmat); is equivalent to
1284: $ set size, type, etc of Amat and Pmat
1286: $ MatCreate(comm,&Amat);
1287: $ MatCreate(comm,&Pmat);
1288: $ KSP/PCSetOperators(ksp/pc,Amat,Pmat);
1289: $ PetscObjectDereference((PetscObject)Amat);
1290: $ PetscObjectDereference((PetscObject)Pmat);
1291: $ set size, type, etc of Amat and Pmat
1293: The rational for this support is so that when creating a TS, SNES, or KSP the hierarchy
1294: of underlying objects (i.e. SNES, KSP, PC, Mat) and their livespans can be completely
1295: managed by the top most level object (i.e. the TS, SNES, or KSP). Another way to look
1296: at this is when you create a SNES you do not NEED to create a KSP and attach it to
1297: the SNES object (the SNES object manages it for you). Similarly when you create a KSP
1298: you do not need to attach a PC to it (the KSP object manages the PC object for you).
1299: Thus, why should YOU have to create the Mat and attach it to the SNES/KSP/PC, when
1300: it can be created for you?
1303: .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperatorsSet()
1304: @*/
1305: PetscErrorCode PCGetOperators(PC pc,Mat *Amat,Mat *Pmat)
1306: {
1311: if (Amat) {
1312: if (!pc->mat) {
1313: if (pc->pmat && !Pmat) { /* Apmat has been set, but user did not request it, so use for Amat */
1314: pc->mat = pc->pmat;
1315: PetscObjectReference((PetscObject)pc->mat);
1316: } else { /* both Amat and Pmat are empty */
1317: MatCreate(PetscObjectComm((PetscObject)pc),&pc->mat);
1318: if (!Pmat) { /* user did NOT request Pmat, so make same as Amat */
1319: pc->pmat = pc->mat;
1320: PetscObjectReference((PetscObject)pc->pmat);
1321: }
1322: }
1323: }
1324: *Amat = pc->mat;
1325: }
1326: if (Pmat) {
1327: if (!pc->pmat) {
1328: if (pc->mat && !Amat) { /* Amat has been set but was not requested, so use for pmat */
1329: pc->pmat = pc->mat;
1330: PetscObjectReference((PetscObject)pc->pmat);
1331: } else {
1332: MatCreate(PetscObjectComm((PetscObject)pc),&pc->pmat);
1333: if (!Amat) { /* user did NOT request Amat, so make same as Pmat */
1334: pc->mat = pc->pmat;
1335: PetscObjectReference((PetscObject)pc->mat);
1336: }
1337: }
1338: }
1339: *Pmat = pc->pmat;
1340: }
1341: return(0);
1342: }
1344: /*@C
1345: PCGetOperatorsSet - Determines if the matrix associated with the linear system and
1346: possibly a different one associated with the preconditioner have been set in the PC.
1348: Not collective, though the results on all processes should be the same
1350: Input Parameter:
1351: . pc - the preconditioner context
1353: Output Parameters:
1354: + mat - the matrix associated with the linear system was set
1355: - pmat - matrix associated with the preconditioner was set, usually the same
1357: Level: intermediate
1359: .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperators()
1360: @*/
1361: PetscErrorCode PCGetOperatorsSet(PC pc,PetscBool *mat,PetscBool *pmat)
1362: {
1365: if (mat) *mat = (pc->mat) ? PETSC_TRUE : PETSC_FALSE;
1366: if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE;
1367: return(0);
1368: }
1370: /*@
1371: PCFactorGetMatrix - Gets the factored matrix from the
1372: preconditioner context. This routine is valid only for the LU,
1373: incomplete LU, Cholesky, and incomplete Cholesky methods.
1375: Not Collective on PC though Mat is parallel if PC is parallel
1377: Input Parameters:
1378: . pc - the preconditioner context
1380: Output parameters:
1381: . mat - the factored matrix
1383: Level: advanced
1385: Notes:
1386: Does not increase the reference count for the matrix so DO NOT destroy it
1388: @*/
1389: PetscErrorCode PCFactorGetMatrix(PC pc,Mat *mat)
1390: {
1396: if (pc->ops->getfactoredmatrix) {
1397: (*pc->ops->getfactoredmatrix)(pc,mat);
1398: } else SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC type does not support getting factor matrix");
1399: return(0);
1400: }
1402: /*@C
1403: PCSetOptionsPrefix - Sets the prefix used for searching for all
1404: PC options in the database.
1406: Logically Collective on PC
1408: Input Parameters:
1409: + pc - the preconditioner context
1410: - prefix - the prefix string to prepend to all PC option requests
1412: Notes:
1413: A hyphen (-) must NOT be given at the beginning of the prefix name.
1414: The first character of all runtime options is AUTOMATICALLY the
1415: hyphen.
1417: Level: advanced
1419: .seealso: PCAppendOptionsPrefix(), PCGetOptionsPrefix()
1420: @*/
1421: PetscErrorCode PCSetOptionsPrefix(PC pc,const char prefix[])
1422: {
1427: PetscObjectSetOptionsPrefix((PetscObject)pc,prefix);
1428: return(0);
1429: }
1431: /*@C
1432: PCAppendOptionsPrefix - Appends to the prefix used for searching for all
1433: PC options in the database.
1435: Logically Collective on PC
1437: Input Parameters:
1438: + pc - the preconditioner context
1439: - prefix - the prefix string to prepend to all PC option requests
1441: Notes:
1442: A hyphen (-) must NOT be given at the beginning of the prefix name.
1443: The first character of all runtime options is AUTOMATICALLY the
1444: hyphen.
1446: Level: advanced
1448: .seealso: PCSetOptionsPrefix(), PCGetOptionsPrefix()
1449: @*/
1450: PetscErrorCode PCAppendOptionsPrefix(PC pc,const char prefix[])
1451: {
1456: PetscObjectAppendOptionsPrefix((PetscObject)pc,prefix);
1457: return(0);
1458: }
1460: /*@C
1461: PCGetOptionsPrefix - Gets the prefix used for searching for all
1462: PC options in the database.
1464: Not Collective
1466: Input Parameters:
1467: . pc - the preconditioner context
1469: Output Parameters:
1470: . prefix - pointer to the prefix string used, is returned
1472: Notes:
1473: On the fortran side, the user should pass in a string 'prifix' of
1474: sufficient length to hold the prefix.
1476: Level: advanced
1478: .seealso: PCSetOptionsPrefix(), PCAppendOptionsPrefix()
1479: @*/
1480: PetscErrorCode PCGetOptionsPrefix(PC pc,const char *prefix[])
1481: {
1487: PetscObjectGetOptionsPrefix((PetscObject)pc,prefix);
1488: return(0);
1489: }
1491: /*
1492: Indicates the right hand side will be changed by KSPSolve(), this occurs for a few
1493: preconditioners including BDDC and Eisentat that transform the equations before applying
1494: the Krylov methods
1495: */
1496: PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC pc,PetscBool *change)
1497: {
1503: *change = PETSC_FALSE;
1504: PetscTryMethod(pc,"PCPreSolveChangeRHS_C",(PC,PetscBool*),(pc,change));
1505: return(0);
1506: }
1508: /*@
1509: PCPreSolve - Optional pre-solve phase, intended for any
1510: preconditioner-specific actions that must be performed before
1511: the iterative solve itself.
1513: Collective on PC
1515: Input Parameters:
1516: + pc - the preconditioner context
1517: - ksp - the Krylov subspace context
1519: Level: developer
1521: Sample of Usage:
1522: .vb
1523: PCPreSolve(pc,ksp);
1524: KSPSolve(ksp,b,x);
1525: PCPostSolve(pc,ksp);
1526: .ve
1528: Notes:
1529: The pre-solve phase is distinct from the PCSetUp() phase.
1531: KSPSolve() calls this directly, so is rarely called by the user.
1533: .seealso: PCPostSolve()
1534: @*/
1535: PetscErrorCode PCPreSolve(PC pc,KSP ksp)
1536: {
1538: Vec x,rhs;
1543: pc->presolvedone++;
1544: if (pc->presolvedone > 2) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot embed PCPreSolve() more than twice");
1545: KSPGetSolution(ksp,&x);
1546: KSPGetRhs(ksp,&rhs);
1548: if (pc->ops->presolve) {
1549: (*pc->ops->presolve)(pc,ksp,rhs,x);
1550: }
1551: return(0);
1552: }
1554: /*@
1555: PCPostSolve - Optional post-solve phase, intended for any
1556: preconditioner-specific actions that must be performed after
1557: the iterative solve itself.
1559: Collective on PC
1561: Input Parameters:
1562: + pc - the preconditioner context
1563: - ksp - the Krylov subspace context
1565: Sample of Usage:
1566: .vb
1567: PCPreSolve(pc,ksp);
1568: KSPSolve(ksp,b,x);
1569: PCPostSolve(pc,ksp);
1570: .ve
1572: Note:
1573: KSPSolve() calls this routine directly, so it is rarely called by the user.
1575: Level: developer
1577: .seealso: PCPreSolve(), KSPSolve()
1578: @*/
1579: PetscErrorCode PCPostSolve(PC pc,KSP ksp)
1580: {
1582: Vec x,rhs;
1587: pc->presolvedone--;
1588: KSPGetSolution(ksp,&x);
1589: KSPGetRhs(ksp,&rhs);
1590: if (pc->ops->postsolve) {
1591: (*pc->ops->postsolve)(pc,ksp,rhs,x);
1592: }
1593: return(0);
1594: }
1596: /*@C
1597: PCLoad - Loads a PC that has been stored in binary with PCView().
1599: Collective on PetscViewer
1601: Input Parameters:
1602: + newdm - the newly loaded PC, this needs to have been created with PCCreate() or
1603: some related function before a call to PCLoad().
1604: - viewer - binary file viewer, obtained from PetscViewerBinaryOpen()
1606: Level: intermediate
1608: Notes:
1609: The type is determined by the data in the file, any type set into the PC before this call is ignored.
1611: Notes for advanced users:
1612: Most users should not need to know the details of the binary storage
1613: format, since PCLoad() and PCView() completely hide these details.
1614: But for anyone who's interested, the standard binary matrix storage
1615: format is
1616: .vb
1617: has not yet been determined
1618: .ve
1620: .seealso: PetscViewerBinaryOpen(), PCView(), MatLoad(), VecLoad()
1621: @*/
1622: PetscErrorCode PCLoad(PC newdm, PetscViewer viewer)
1623: {
1625: PetscBool isbinary;
1626: PetscInt classid;
1627: char type[256];
1632: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
1633: if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
1635: PetscViewerBinaryRead(viewer,&classid,1,NULL,PETSC_INT);
1636: if (classid != PC_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not PC next in file");
1637: PetscViewerBinaryRead(viewer,type,256,NULL,PETSC_CHAR);
1638: PCSetType(newdm, type);
1639: if (newdm->ops->load) {
1640: (*newdm->ops->load)(newdm,viewer);
1641: }
1642: return(0);
1643: }
1645: #include <petscdraw.h>
1646: #if defined(PETSC_HAVE_SAWS)
1647: #include <petscviewersaws.h>
1648: #endif
1650: /*@C
1651: PCViewFromOptions - View from Options
1653: Collective on PC
1655: Input Parameters:
1656: + A - the PC context
1657: . obj - Optional object
1658: - name - command line option
1660: Level: intermediate
1661: .seealso: PC, PCView, PetscObjectViewFromOptions(), PCCreate()
1662: @*/
1663: PetscErrorCode PCViewFromOptions(PC A,PetscObject obj,const char name[])
1664: {
1669: PetscObjectViewFromOptions((PetscObject)A,obj,name);
1670: return(0);
1671: }
1673: /*@C
1674: PCView - Prints the PC data structure.
1676: Collective on PC
1678: Input Parameters:
1679: + PC - the PC context
1680: - viewer - optional visualization context
1682: Note:
1683: The available visualization contexts include
1684: + PETSC_VIEWER_STDOUT_SELF - standard output (default)
1685: - PETSC_VIEWER_STDOUT_WORLD - synchronized standard
1686: output where only the first processor opens
1687: the file. All other processors send their
1688: data to the first processor to print.
1690: The user can open an alternative visualization contexts with
1691: PetscViewerASCIIOpen() (output to a specified file).
1693: Level: developer
1695: .seealso: KSPView(), PetscViewerASCIIOpen()
1696: @*/
1697: PetscErrorCode PCView(PC pc,PetscViewer viewer)
1698: {
1699: PCType cstr;
1701: PetscBool iascii,isstring,isbinary,isdraw;
1702: #if defined(PETSC_HAVE_SAWS)
1703: PetscBool issaws;
1704: #endif
1708: if (!viewer) {
1709: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&viewer);
1710: }
1714: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
1715: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
1716: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
1717: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
1718: #if defined(PETSC_HAVE_SAWS)
1719: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSAWS,&issaws);
1720: #endif
1722: if (iascii) {
1723: PetscObjectPrintClassNamePrefixType((PetscObject)pc,viewer);
1724: if (!pc->setupcalled) {
1725: PetscViewerASCIIPrintf(viewer," PC has not been set up so information may be incomplete\n");
1726: }
1727: if (pc->ops->view) {
1728: PetscViewerASCIIPushTab(viewer);
1729: (*pc->ops->view)(pc,viewer);
1730: PetscViewerASCIIPopTab(viewer);
1731: }
1732: if (pc->mat) {
1733: PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO);
1734: if (pc->pmat == pc->mat) {
1735: PetscViewerASCIIPrintf(viewer," linear system matrix = precond matrix:\n");
1736: PetscViewerASCIIPushTab(viewer);
1737: MatView(pc->mat,viewer);
1738: PetscViewerASCIIPopTab(viewer);
1739: } else {
1740: if (pc->pmat) {
1741: PetscViewerASCIIPrintf(viewer," linear system matrix followed by preconditioner matrix:\n");
1742: } else {
1743: PetscViewerASCIIPrintf(viewer," linear system matrix:\n");
1744: }
1745: PetscViewerASCIIPushTab(viewer);
1746: MatView(pc->mat,viewer);
1747: if (pc->pmat) {MatView(pc->pmat,viewer);}
1748: PetscViewerASCIIPopTab(viewer);
1749: }
1750: PetscViewerPopFormat(viewer);
1751: }
1752: } else if (isstring) {
1753: PCGetType(pc,&cstr);
1754: PetscViewerStringSPrintf(viewer," PCType: %-7.7s",cstr);
1755: if (pc->ops->view) {(*pc->ops->view)(pc,viewer);}
1756: if (pc->mat) {MatView(pc->mat,viewer);}
1757: if (pc->pmat && pc->pmat != pc->mat) {MatView(pc->pmat,viewer);}
1758: } else if (isbinary) {
1759: PetscInt classid = PC_FILE_CLASSID;
1760: MPI_Comm comm;
1761: PetscMPIInt rank;
1762: char type[256];
1764: PetscObjectGetComm((PetscObject)pc,&comm);
1765: MPI_Comm_rank(comm,&rank);
1766: if (!rank) {
1767: PetscViewerBinaryWrite(viewer,&classid,1,PETSC_INT);
1768: PetscStrncpy(type,((PetscObject)pc)->type_name,256);
1769: PetscViewerBinaryWrite(viewer,type,256,PETSC_CHAR);
1770: }
1771: if (pc->ops->view) {
1772: (*pc->ops->view)(pc,viewer);
1773: }
1774: } else if (isdraw) {
1775: PetscDraw draw;
1776: char str[25];
1777: PetscReal x,y,bottom,h;
1778: PetscInt n;
1780: PetscViewerDrawGetDraw(viewer,0,&draw);
1781: PetscDrawGetCurrentPoint(draw,&x,&y);
1782: if (pc->mat) {
1783: MatGetSize(pc->mat,&n,NULL);
1784: PetscSNPrintf(str,25,"PC: %s (%D)",((PetscObject)pc)->type_name,n);
1785: } else {
1786: PetscSNPrintf(str,25,"PC: %s",((PetscObject)pc)->type_name);
1787: }
1788: PetscDrawStringBoxed(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);
1789: bottom = y - h;
1790: PetscDrawPushCurrentPoint(draw,x,bottom);
1791: if (pc->ops->view) {
1792: (*pc->ops->view)(pc,viewer);
1793: }
1794: PetscDrawPopCurrentPoint(draw);
1795: #if defined(PETSC_HAVE_SAWS)
1796: } else if (issaws) {
1797: PetscMPIInt rank;
1799: PetscObjectName((PetscObject)pc);
1800: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
1801: if (!((PetscObject)pc)->amsmem && !rank) {
1802: PetscObjectViewSAWs((PetscObject)pc,viewer);
1803: }
1804: if (pc->mat) {MatView(pc->mat,viewer);}
1805: if (pc->pmat && pc->pmat != pc->mat) {MatView(pc->pmat,viewer);}
1806: #endif
1807: }
1808: return(0);
1809: }
1811: /*@C
1812: PCRegister - Adds a method to the preconditioner package.
1814: Not collective
1816: Input Parameters:
1817: + name_solver - name of a new user-defined solver
1818: - routine_create - routine to create method context
1820: Notes:
1821: PCRegister() may be called multiple times to add several user-defined preconditioners.
1823: Sample usage:
1824: .vb
1825: PCRegister("my_solver", MySolverCreate);
1826: .ve
1828: Then, your solver can be chosen with the procedural interface via
1829: $ PCSetType(pc,"my_solver")
1830: or at runtime via the option
1831: $ -pc_type my_solver
1833: Level: advanced
1835: .seealso: PCRegisterAll()
1836: @*/
1837: PetscErrorCode PCRegister(const char sname[],PetscErrorCode (*function)(PC))
1838: {
1842: PCInitializePackage();
1843: PetscFunctionListAdd(&PCList,sname,function);
1844: return(0);
1845: }
1847: static PetscErrorCode MatMult_PC(Mat A,Vec X,Vec Y)
1848: {
1849: PC pc;
1853: MatShellGetContext(A,&pc);
1854: PCApply(pc,X,Y);
1855: return(0);
1856: }
1858: /*@
1859: PCComputeOperator - Computes the explicit preconditioned operator.
1861: Collective on PC
1863: Input Parameter:
1864: + pc - the preconditioner object
1865: - mattype - the matrix type to be used for the operator
1867: Output Parameter:
1868: . mat - the explict preconditioned operator
1870: Notes:
1871: This computation is done by applying the operators to columns of the identity matrix.
1872: This routine is costly in general, and is recommended for use only with relatively small systems.
1873: Currently, this routine uses a dense matrix format when mattype == NULL
1875: Level: advanced
1877: .seealso: KSPComputeOperator(), MatType
1879: @*/
1880: PetscErrorCode PCComputeOperator(PC pc,MatType mattype,Mat *mat)
1881: {
1883: PetscInt N,M,m,n;
1884: Mat A,Apc;
1889: PCGetOperators(pc,&A,NULL);
1890: MatGetLocalSize(A,&m,&n);
1891: MatGetSize(A,&M,&N);
1892: MatCreateShell(PetscObjectComm((PetscObject)pc),m,n,M,N,pc,&Apc);
1893: MatShellSetOperation(Apc,MATOP_MULT,(void (*)(void))MatMult_PC);
1894: MatComputeOperator(Apc,mattype,mat);
1895: MatDestroy(&Apc);
1896: return(0);
1897: }
1899: /*@
1900: PCSetCoordinates - sets the coordinates of all the nodes on the local process
1902: Collective on PC
1904: Input Parameters:
1905: + pc - the solver context
1906: . dim - the dimension of the coordinates 1, 2, or 3
1907: . nloc - the blocked size of the coordinates array
1908: - coords - the coordinates array
1910: Level: intermediate
1912: Notes:
1913: coords is an array of the dim coordinates for the nodes on
1914: the local processor, of size dim*nloc.
1915: If there are 108 equation on a processor
1916: for a displacement finite element discretization of elasticity (so
1917: that there are nloc = 36 = 108/3 nodes) then the array must have 108
1918: double precision values (ie, 3 * 36). These x y z coordinates
1919: should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x,
1920: ... , N-1.z ].
1922: .seealso: MatSetNearNullSpace()
1923: @*/
1924: PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
1925: {
1931: PetscTryMethod(pc,"PCSetCoordinates_C",(PC,PetscInt,PetscInt,PetscReal*),(pc,dim,nloc,coords));
1932: return(0);
1933: }
1935: /*@
1936: PCGetInterpolations - Gets interpolation matrices for all levels (except level 0)
1938: Logically Collective on PC
1940: Input Parameters:
1941: + pc - the precondition context
1943: Output Parameter:
1944: - num_levels - the number of levels
1945: . interpolations - the interpolation matrices (size of num_levels-1)
1947: Level: advanced
1949: .keywords: MG, GAMG, BoomerAMG, multigrid, interpolation, level
1951: .seealso: PCMGGetRestriction(), PCMGSetInterpolation(), PCMGGetInterpolation(), PCGetCoarseOperators()
1952: @*/
1953: PetscErrorCode PCGetInterpolations(PC pc,PetscInt *num_levels,Mat *interpolations[])
1954: {
1961: PetscUseMethod(pc,"PCGetInterpolations_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,interpolations));
1962: return(0);
1963: }
1965: /*@
1966: PCGetCoarseOperators - Gets coarse operator matrices for all levels (except the finest level)
1968: Logically Collective on PC
1970: Input Parameters:
1971: + pc - the precondition context
1973: Output Parameter:
1974: - num_levels - the number of levels
1975: . coarseOperators - the coarse operator matrices (size of num_levels-1)
1977: Level: advanced
1979: .keywords: MG, GAMG, BoomerAMG, get, multigrid, interpolation, level
1981: .seealso: PCMGGetRestriction(), PCMGSetInterpolation(), PCMGGetRScale(), PCMGGetInterpolation(), PCGetInterpolations()
1982: @*/
1983: PetscErrorCode PCGetCoarseOperators(PC pc,PetscInt *num_levels,Mat *coarseOperators[])
1984: {
1991: PetscUseMethod(pc,"PCGetCoarseOperators_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,coarseOperators));
1992: return(0);
1993: }