Actual source code: precon.c
petsc-3.4.5 2014-06-29
2: /*
3: The PC (preconditioner) interface routines, callable by users.
4: */
5: #include <petsc-private/pcimpl.h> /*I "petscksp.h" I*/
6: #include <petscdm.h>
8: /* Logging support */
9: PetscClassId PC_CLASSID;
10: PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft;
11: PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks, PC_ApplyOnMproc;
15: PetscErrorCode PCGetDefaultType_Private(PC pc,const char *type[])
16: {
18: PetscMPIInt size;
19: PetscBool flg1,flg2,set,flg3;
22: MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
23: if (pc->pmat) {
24: PetscErrorCode (*f)(Mat,MatReuse,Mat*);
25: PetscObjectQueryFunction((PetscObject)pc->pmat,"MatGetDiagonalBlock_C",&f);
26: if (size == 1) {
27: MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ICC,&flg1);
28: MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ILU,&flg2);
29: MatIsSymmetricKnown(pc->pmat,&set,&flg3);
30: if (flg1 && (!flg2 || (set && flg3))) {
31: *type = PCICC;
32: } else if (flg2) {
33: *type = PCILU;
34: } else if (f) { /* likely is a parallel matrix run on one processor */
35: *type = PCBJACOBI;
36: } else {
37: *type = PCNONE;
38: }
39: } else {
40: if (f) {
41: *type = PCBJACOBI;
42: } else {
43: *type = PCNONE;
44: }
45: }
46: } else {
47: if (size == 1) {
48: *type = PCILU;
49: } else {
50: *type = PCBJACOBI;
51: }
52: }
53: return(0);
54: }
58: /*@
59: PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated Vecs and Mats
61: Collective on PC
63: Input Parameter:
64: . pc - the preconditioner context
66: Level: developer
68: Notes: This allows a PC to be reused for a different sized linear system but using the same options that have been previously set in the PC
70: .keywords: PC, destroy
72: .seealso: PCCreate(), PCSetUp()
73: @*/
74: PetscErrorCode PCReset(PC pc)
75: {
80: if (pc->ops->reset) {
81: (*pc->ops->reset)(pc);
82: }
83: VecDestroy(&pc->diagonalscaleright);
84: VecDestroy(&pc->diagonalscaleleft);
85: MatDestroy(&pc->pmat);
86: MatDestroy(&pc->mat);
88: pc->setupcalled = 0;
89: return(0);
90: }
94: /*@
95: PCDestroy - Destroys PC context that was created with PCCreate().
97: Collective on PC
99: Input Parameter:
100: . pc - the preconditioner context
102: Level: developer
104: .keywords: PC, destroy
106: .seealso: PCCreate(), PCSetUp()
107: @*/
108: PetscErrorCode PCDestroy(PC *pc)
109: {
113: if (!*pc) return(0);
115: if (--((PetscObject)(*pc))->refct > 0) {*pc = 0; return(0);}
117: PCReset(*pc);
119: /* if memory was published with AMS then destroy it */
120: PetscObjectAMSViewOff((PetscObject)*pc);
121: if ((*pc)->ops->destroy) {(*(*pc)->ops->destroy)((*pc));}
122: DMDestroy(&(*pc)->dm);
123: PetscHeaderDestroy(pc);
124: return(0);
125: }
129: /*@C
130: PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right
131: scaling as needed by certain time-stepping codes.
133: Logically Collective on PC
135: Input Parameter:
136: . pc - the preconditioner context
138: Output Parameter:
139: . flag - PETSC_TRUE if it applies the scaling
141: Level: developer
143: Notes: If this returns PETSC_TRUE then the system solved via the Krylov method is
144: $ D M A D^{-1} y = D M b for left preconditioning or
145: $ D A M D^{-1} z = D b for right preconditioning
147: .keywords: PC
149: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCSetDiagonalScale()
150: @*/
151: PetscErrorCode PCGetDiagonalScale(PC pc,PetscBool *flag)
152: {
156: *flag = pc->diagonalscale;
157: return(0);
158: }
162: /*@
163: PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right
164: scaling as needed by certain time-stepping codes.
166: Logically Collective on PC
168: Input Parameters:
169: + pc - the preconditioner context
170: - s - scaling vector
172: Level: intermediate
174: Notes: The system solved via the Krylov method is
175: $ D M A D^{-1} y = D M b for left preconditioning or
176: $ D A M D^{-1} z = D b for right preconditioning
178: PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
180: .keywords: PC
182: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCGetDiagonalScale()
183: @*/
184: PetscErrorCode PCSetDiagonalScale(PC pc,Vec s)
185: {
191: pc->diagonalscale = PETSC_TRUE;
193: PetscObjectReference((PetscObject)s);
194: VecDestroy(&pc->diagonalscaleleft);
196: pc->diagonalscaleleft = s;
198: VecDuplicate(s,&pc->diagonalscaleright);
199: VecCopy(s,pc->diagonalscaleright);
200: VecReciprocal(pc->diagonalscaleright);
201: return(0);
202: }
206: /*@
207: PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes.
209: Logically Collective on PC
211: Input Parameters:
212: + pc - the preconditioner context
213: . in - input vector
214: + out - scaled vector (maybe the same as in)
216: Level: intermediate
218: Notes: The system solved via the Krylov method is
219: $ D M A D^{-1} y = D M b for left preconditioning or
220: $ D A M D^{-1} z = D b for right preconditioning
222: PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
224: If diagonal scaling is turned off and in is not out then in is copied to out
226: .keywords: PC
228: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleSet(), PCDiagonalScaleRight(), PCDiagonalScale()
229: @*/
230: PetscErrorCode PCDiagonalScaleLeft(PC pc,Vec in,Vec out)
231: {
238: if (pc->diagonalscale) {
239: VecPointwiseMult(out,pc->diagonalscaleleft,in);
240: } else if (in != out) {
241: VecCopy(in,out);
242: }
243: return(0);
244: }
248: /*@
249: PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes.
251: Logically Collective on PC
253: Input Parameters:
254: + pc - the preconditioner context
255: . in - input vector
256: + out - scaled vector (maybe the same as in)
258: Level: intermediate
260: Notes: The system solved via the Krylov method is
261: $ D M A D^{-1} y = D M b for left preconditioning or
262: $ D A M D^{-1} z = D b for right preconditioning
264: PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
266: If diagonal scaling is turned off and in is not out then in is copied to out
268: .keywords: PC
270: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleSet(), PCDiagonalScale()
271: @*/
272: PetscErrorCode PCDiagonalScaleRight(PC pc,Vec in,Vec out)
273: {
280: if (pc->diagonalscale) {
281: VecPointwiseMult(out,pc->diagonalscaleright,in);
282: } else if (in != out) {
283: VecCopy(in,out);
284: }
285: return(0);
286: }
290: /*@
291: PCSetUseAmat - Sets a flag to indicate that when the preconditioner needs to apply (part of) the
292: operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(),
293: TSSetIJacobian(), SNESSetJacobian(), KSPSetOperator() or PCSetOperator() not the Pmat.
295: Logically Collective on PC
297: Input Parameters:
298: + pc - the preconditioner context
299: - flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false)
301: Options Database Key:
302: . -pc_use_amat <true,false>
304: Notes:
305: For the common case in which the linear system matrix and the matrix used to construct the
306: preconditioner are identical, this routine is does nothing.
308: Level: intermediate
310: .seealso: PCGetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE
311: @*/
312: PetscErrorCode PCSetUseAmat(PC pc,PetscBool flg)
313: {
316: pc->useAmat = flg;
317: return(0);
318: }
322: /*@
323: PCGetUseAmat - Gets a flag to indicate that when the preconditioner needs to apply (part of) the
324: operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(),
325: TSSetIJacobian(), SNESSetJacobian(), KSPSetOperator() or PCSetOperator() not the Pmat.
327: Logically Collective on PC
329: Input Parameter:
330: . pc - the preconditioner context
332: Output Parameter:
333: . flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false)
335: Notes:
336: For the common case in which the linear system matrix and the matrix used to construct the
337: preconditioner are identical, this routine is does nothing.
339: Level: intermediate
341: .seealso: PCSetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE
342: @*/
343: PetscErrorCode PCGetUseAmat(PC pc,PetscBool *flg)
344: {
347: *flg = pc->useAmat;
348: return(0);
349: }
353: /*@
354: PCCreate - Creates a preconditioner context.
356: Collective on MPI_Comm
358: Input Parameter:
359: . comm - MPI communicator
361: Output Parameter:
362: . pc - location to put the preconditioner context
364: Notes:
365: The default preconditioner for sparse matrices is PCILU or PCICC with 0 fill on one process and block Jacobi with PCILU or ICC
366: in parallel. For dense matrices it is always PCNONE.
368: Level: developer
370: .keywords: PC, create, context
372: .seealso: PCSetUp(), PCApply(), PCDestroy()
373: @*/
374: PetscErrorCode PCCreate(MPI_Comm comm,PC *newpc)
375: {
376: PC pc;
381: *newpc = 0;
382: #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
383: PCInitializePackage();
384: #endif
386: PetscHeaderCreate(pc,_p_PC,struct _PCOps,PC_CLASSID,"PC","Preconditioner","PC",comm,PCDestroy,PCView);
388: pc->mat = 0;
389: pc->pmat = 0;
390: pc->setupcalled = 0;
391: pc->setfromoptionscalled = 0;
392: pc->data = 0;
393: pc->diagonalscale = PETSC_FALSE;
394: pc->diagonalscaleleft = 0;
395: pc->diagonalscaleright = 0;
397: pc->modifysubmatrices = 0;
398: pc->modifysubmatricesP = 0;
400: *newpc = pc;
401: return(0);
403: }
405: /* -------------------------------------------------------------------------------*/
409: /*@
410: PCApply - Applies the preconditioner to a vector.
412: Collective on PC and Vec
414: Input Parameters:
415: + pc - the preconditioner context
416: - x - input vector
418: Output Parameter:
419: . y - output vector
421: Level: developer
423: .keywords: PC, apply
425: .seealso: PCApplyTranspose(), PCApplyBAorAB()
426: @*/
427: PetscErrorCode PCApply(PC pc,Vec x,Vec y)
428: {
435: if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
436: VecValidValues(x,2,PETSC_TRUE);
437: if (pc->setupcalled < 2) {
438: PCSetUp(pc);
439: }
440: if (!pc->ops->apply) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply");
441: PetscLogEventBegin(PC_Apply,pc,x,y,0);
442: (*pc->ops->apply)(pc,x,y);
443: PetscLogEventEnd(PC_Apply,pc,x,y,0);
444: VecValidValues(y,3,PETSC_FALSE);
445: return(0);
446: }
450: /*@
451: PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector.
453: Collective on PC and Vec
455: Input Parameters:
456: + pc - the preconditioner context
457: - x - input vector
459: Output Parameter:
460: . y - output vector
462: Notes:
463: Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.
465: Level: developer
467: .keywords: PC, apply, symmetric, left
469: .seealso: PCApply(), PCApplySymmetricRight()
470: @*/
471: PetscErrorCode PCApplySymmetricLeft(PC pc,Vec x,Vec y)
472: {
479: if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
480: VecValidValues(x,2,PETSC_TRUE);
481: if (pc->setupcalled < 2) {
482: PCSetUp(pc);
483: }
484: if (!pc->ops->applysymmetricleft) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply");
485: PetscLogEventBegin(PC_ApplySymmetricLeft,pc,x,y,0);
486: (*pc->ops->applysymmetricleft)(pc,x,y);
487: PetscLogEventEnd(PC_ApplySymmetricLeft,pc,x,y,0);
488: VecValidValues(y,3,PETSC_FALSE);
489: return(0);
490: }
494: /*@
495: PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector.
497: Collective on PC and Vec
499: Input Parameters:
500: + pc - the preconditioner context
501: - x - input vector
503: Output Parameter:
504: . y - output vector
506: Level: developer
508: Notes:
509: Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.
511: .keywords: PC, apply, symmetric, right
513: .seealso: PCApply(), PCApplySymmetricLeft()
514: @*/
515: PetscErrorCode PCApplySymmetricRight(PC pc,Vec x,Vec y)
516: {
523: if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
524: VecValidValues(x,2,PETSC_TRUE);
525: if (pc->setupcalled < 2) {
526: PCSetUp(pc);
527: }
528: if (!pc->ops->applysymmetricright) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply");
529: PetscLogEventBegin(PC_ApplySymmetricRight,pc,x,y,0);
530: (*pc->ops->applysymmetricright)(pc,x,y);
531: PetscLogEventEnd(PC_ApplySymmetricRight,pc,x,y,0);
532: VecValidValues(y,3,PETSC_FALSE);
533: return(0);
534: }
538: /*@
539: PCApplyTranspose - Applies the transpose of preconditioner to a vector.
541: Collective on PC and Vec
543: Input Parameters:
544: + pc - the preconditioner context
545: - x - input vector
547: Output Parameter:
548: . y - output vector
550: Notes: For complex numbers this applies the non-Hermitian transpose.
552: Developer Notes: We need to implement a PCApplyHermitianTranspose()
554: Level: developer
556: .keywords: PC, apply, transpose
558: .seealso: PCApply(), PCApplyBAorAB(), PCApplyBAorABTranspose(), PCApplyTransposeExists()
559: @*/
560: PetscErrorCode PCApplyTranspose(PC pc,Vec x,Vec y)
561: {
568: if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
569: VecValidValues(x,2,PETSC_TRUE);
570: if (pc->setupcalled < 2) {
571: PCSetUp(pc);
572: }
573: if (!pc->ops->applytranspose) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply transpose");
574: PetscLogEventBegin(PC_Apply,pc,x,y,0);
575: (*pc->ops->applytranspose)(pc,x,y);
576: PetscLogEventEnd(PC_Apply,pc,x,y,0);
577: VecValidValues(y,3,PETSC_FALSE);
578: return(0);
579: }
583: /*@
584: PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation
586: Collective on PC and Vec
588: Input Parameters:
589: . pc - the preconditioner context
591: Output Parameter:
592: . flg - PETSC_TRUE if a transpose operation is defined
594: Level: developer
596: .keywords: PC, apply, transpose
598: .seealso: PCApplyTranspose()
599: @*/
600: PetscErrorCode PCApplyTransposeExists(PC pc,PetscBool *flg)
601: {
605: if (pc->ops->applytranspose) *flg = PETSC_TRUE;
606: else *flg = PETSC_FALSE;
607: return(0);
608: }
612: /*@
613: PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x.
615: Collective on PC and Vec
617: Input Parameters:
618: + pc - the preconditioner context
619: . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
620: . x - input vector
621: - work - work vector
623: Output Parameter:
624: . y - output vector
626: Level: developer
628: Notes: If the PC has had PCSetDiagonalScale() set then D M A D^{-1} for left preconditioning or D A M D^{-1} is actually applied. Note that the
629: specific KSPSolve() method must also be written to handle the post-solve "correction" for the diagonal scaling.
631: .keywords: PC, apply, operator
633: .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorABTranspose()
634: @*/
635: PetscErrorCode PCApplyBAorAB(PC pc,PCSide side,Vec x,Vec y,Vec work)
636: {
644: if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
645: VecValidValues(x,3,PETSC_TRUE);
646: if (side != PC_LEFT && side != PC_SYMMETRIC && side != PC_RIGHT) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right, left, or symmetric");
647: if (pc->diagonalscale && side == PC_SYMMETRIC) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot include diagonal scaling with symmetric preconditioner application");
649: if (pc->setupcalled < 2) {
650: PCSetUp(pc);
651: }
653: if (pc->diagonalscale) {
654: if (pc->ops->applyBA) {
655: Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */
656: VecDuplicate(x,&work2);
657: PCDiagonalScaleRight(pc,x,work2);
658: (*pc->ops->applyBA)(pc,side,work2,y,work);
659: PCDiagonalScaleLeft(pc,y,y);
660: VecDestroy(&work2);
661: } else if (side == PC_RIGHT) {
662: PCDiagonalScaleRight(pc,x,y);
663: PCApply(pc,y,work);
664: MatMult(pc->mat,work,y);
665: PCDiagonalScaleLeft(pc,y,y);
666: } else if (side == PC_LEFT) {
667: PCDiagonalScaleRight(pc,x,y);
668: MatMult(pc->mat,y,work);
669: PCApply(pc,work,y);
670: PCDiagonalScaleLeft(pc,y,y);
671: } else if (side == PC_SYMMETRIC) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot provide diagonal scaling with symmetric application of preconditioner");
672: } else {
673: if (pc->ops->applyBA) {
674: (*pc->ops->applyBA)(pc,side,x,y,work);
675: } else if (side == PC_RIGHT) {
676: PCApply(pc,x,work);
677: MatMult(pc->mat,work,y);
678: } else if (side == PC_LEFT) {
679: MatMult(pc->mat,x,work);
680: PCApply(pc,work,y);
681: } else if (side == PC_SYMMETRIC) {
682: /* There's an extra copy here; maybe should provide 2 work vectors instead? */
683: PCApplySymmetricRight(pc,x,work);
684: MatMult(pc->mat,work,y);
685: VecCopy(y,work);
686: PCApplySymmetricLeft(pc,work,y);
687: }
688: }
689: VecValidValues(y,4,PETSC_FALSE);
690: return(0);
691: }
695: /*@
696: PCApplyBAorABTranspose - Applies the transpose of the preconditioner
697: and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning,
698: NOT tr(B*A) = tr(A)*tr(B).
700: Collective on PC and Vec
702: Input Parameters:
703: + pc - the preconditioner context
704: . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
705: . x - input vector
706: - work - work vector
708: Output Parameter:
709: . y - output vector
712: Notes: this routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner
713: defined by B'. This is why this has the funny form that it computes tr(B) * tr(A)
715: Level: developer
717: .keywords: PC, apply, operator, transpose
719: .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorAB()
720: @*/
721: PetscErrorCode PCApplyBAorABTranspose(PC pc,PCSide side,Vec x,Vec y,Vec work)
722: {
730: if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
731: VecValidValues(x,3,PETSC_TRUE);
732: if (pc->ops->applyBAtranspose) {
733: (*pc->ops->applyBAtranspose)(pc,side,x,y,work);
734: VecValidValues(y,4,PETSC_FALSE);
735: return(0);
736: }
737: if (side != PC_LEFT && side != PC_RIGHT) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right or left");
739: if (pc->setupcalled < 2) {
740: PCSetUp(pc);
741: }
743: if (side == PC_RIGHT) {
744: PCApplyTranspose(pc,x,work);
745: MatMultTranspose(pc->mat,work,y);
746: } else if (side == PC_LEFT) {
747: MatMultTranspose(pc->mat,x,work);
748: PCApplyTranspose(pc,work,y);
749: }
750: /* add support for PC_SYMMETRIC */
751: VecValidValues(y,4,PETSC_FALSE);
752: return(0);
753: }
755: /* -------------------------------------------------------------------------------*/
759: /*@
760: PCApplyRichardsonExists - Determines whether a particular preconditioner has a
761: built-in fast application of Richardson's method.
763: Not Collective
765: Input Parameter:
766: . pc - the preconditioner
768: Output Parameter:
769: . exists - PETSC_TRUE or PETSC_FALSE
771: Level: developer
773: .keywords: PC, apply, Richardson, exists
775: .seealso: PCApplyRichardson()
776: @*/
777: PetscErrorCode PCApplyRichardsonExists(PC pc,PetscBool *exists)
778: {
782: if (pc->ops->applyrichardson) *exists = PETSC_TRUE;
783: else *exists = PETSC_FALSE;
784: return(0);
785: }
789: /*@
790: PCApplyRichardson - Applies several steps of Richardson iteration with
791: the particular preconditioner. This routine is usually used by the
792: Krylov solvers and not the application code directly.
794: Collective on PC
796: Input Parameters:
797: + pc - the preconditioner context
798: . b - the right hand side
799: . w - one work vector
800: . rtol - relative decrease in residual norm convergence criteria
801: . abstol - absolute residual norm convergence criteria
802: . dtol - divergence residual norm increase criteria
803: . its - the number of iterations to apply.
804: - guesszero - if the input x contains nonzero initial guess
806: Output Parameter:
807: + outits - number of iterations actually used (for SOR this always equals its)
808: . reason - the reason the apply terminated
809: - y - the solution (also contains initial guess if guesszero is PETSC_FALSE
811: Notes:
812: Most preconditioners do not support this function. Use the command
813: PCApplyRichardsonExists() to determine if one does.
815: Except for the multigrid PC this routine ignores the convergence tolerances
816: and always runs for the number of iterations
818: Level: developer
820: .keywords: PC, apply, Richardson
822: .seealso: PCApplyRichardsonExists()
823: @*/
824: PetscErrorCode PCApplyRichardson(PC pc,Vec b,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool guesszero,PetscInt *outits,PCRichardsonConvergedReason *reason)
825: {
833: if (b == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"b and y must be different vectors");
834: if (pc->setupcalled < 2) {
835: PCSetUp(pc);
836: }
837: if (!pc->ops->applyrichardson) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply richardson");
838: (*pc->ops->applyrichardson)(pc,b,y,w,rtol,abstol,dtol,its,guesszero,outits,reason);
839: return(0);
840: }
842: /*
843: a setupcall of 0 indicates never setup,
844: 1 needs to be resetup,
845: 2 does not need any changes.
846: */
849: /*@
850: PCSetUp - Prepares for the use of a preconditioner.
852: Collective on PC
854: Input Parameter:
855: . pc - the preconditioner context
857: Level: developer
859: .keywords: PC, setup
861: .seealso: PCCreate(), PCApply(), PCDestroy()
862: @*/
863: PetscErrorCode PCSetUp(PC pc)
864: {
866: const char *def;
870: if (!pc->mat) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Matrix must be set first");
872: if (pc->setupcalled > 1) {
873: PetscInfo(pc,"Setting PC with identical preconditioner\n");
874: return(0);
875: } else if (!pc->setupcalled) {
876: PetscInfo(pc,"Setting up new PC\n");
877: } else if (pc->flag == SAME_NONZERO_PATTERN) {
878: PetscInfo(pc,"Setting up PC with same nonzero pattern\n");
879: } else {
880: PetscInfo(pc,"Setting up PC with different nonzero pattern\n");
881: }
883: if (!((PetscObject)pc)->type_name) {
884: PCGetDefaultType_Private(pc,&def);
885: PCSetType(pc,def);
886: }
888: PetscLogEventBegin(PC_SetUp,pc,0,0,0);
889: if (pc->ops->setup) {
890: (*pc->ops->setup)(pc);
891: }
892: pc->setupcalled = 2;
894: PetscLogEventEnd(PC_SetUp,pc,0,0,0);
895: return(0);
896: }
900: /*@
901: PCSetUpOnBlocks - Sets up the preconditioner for each block in
902: the block Jacobi, block Gauss-Seidel, and overlapping Schwarz
903: methods.
905: Collective on PC
907: Input Parameters:
908: . pc - the preconditioner context
910: Level: developer
912: .keywords: PC, setup, blocks
914: .seealso: PCCreate(), PCApply(), PCDestroy(), PCSetUp()
915: @*/
916: PetscErrorCode PCSetUpOnBlocks(PC pc)
917: {
922: if (!pc->ops->setuponblocks) return(0);
923: PetscLogEventBegin(PC_SetUpOnBlocks,pc,0,0,0);
924: (*pc->ops->setuponblocks)(pc);
925: PetscLogEventEnd(PC_SetUpOnBlocks,pc,0,0,0);
926: return(0);
927: }
931: /*@C
932: PCSetModifySubMatrices - Sets a user-defined routine for modifying the
933: submatrices that arise within certain subdomain-based preconditioners.
934: The basic submatrices are extracted from the preconditioner matrix as
935: usual; the user can then alter these (for example, to set different boundary
936: conditions for each submatrix) before they are used for the local solves.
938: Logically Collective on PC
940: Input Parameters:
941: + pc - the preconditioner context
942: . func - routine for modifying the submatrices
943: - ctx - optional user-defined context (may be null)
945: Calling sequence of func:
946: $ func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx);
948: . row - an array of index sets that contain the global row numbers
949: that comprise each local submatrix
950: . col - an array of index sets that contain the global column numbers
951: that comprise each local submatrix
952: . submat - array of local submatrices
953: - ctx - optional user-defined context for private data for the
954: user-defined func routine (may be null)
956: Notes:
957: PCSetModifySubMatrices() MUST be called before KSPSetUp() and
958: KSPSolve().
960: A routine set by PCSetModifySubMatrices() is currently called within
961: the block Jacobi (PCBJACOBI) and additive Schwarz (PCASM)
962: preconditioners. All other preconditioners ignore this routine.
964: Level: advanced
966: .keywords: PC, set, modify, submatrices
968: .seealso: PCModifySubMatrices(), PCASMGetSubMatrices()
969: @*/
970: PetscErrorCode PCSetModifySubMatrices(PC pc,PetscErrorCode (*func)(PC,PetscInt,const IS[],const IS[],Mat[],void*),void *ctx)
971: {
974: pc->modifysubmatrices = func;
975: pc->modifysubmatricesP = ctx;
976: return(0);
977: }
981: /*@C
982: PCModifySubMatrices - Calls an optional user-defined routine within
983: certain preconditioners if one has been set with PCSetModifySubMarices().
985: Collective on PC
987: Input Parameters:
988: + pc - the preconditioner context
989: . nsub - the number of local submatrices
990: . row - an array of index sets that contain the global row numbers
991: that comprise each local submatrix
992: . col - an array of index sets that contain the global column numbers
993: that comprise each local submatrix
994: . submat - array of local submatrices
995: - ctx - optional user-defined context for private data for the
996: user-defined routine (may be null)
998: Output Parameter:
999: . submat - array of local submatrices (the entries of which may
1000: have been modified)
1002: Notes:
1003: The user should NOT generally call this routine, as it will
1004: automatically be called within certain preconditioners (currently
1005: block Jacobi, additive Schwarz) if set.
1007: The basic submatrices are extracted from the preconditioner matrix
1008: as usual; the user can then alter these (for example, to set different
1009: boundary conditions for each submatrix) before they are used for the
1010: local solves.
1012: Level: developer
1014: .keywords: PC, modify, submatrices
1016: .seealso: PCSetModifySubMatrices()
1017: @*/
1018: PetscErrorCode PCModifySubMatrices(PC pc,PetscInt nsub,const IS row[],const IS col[],Mat submat[],void *ctx)
1019: {
1024: if (!pc->modifysubmatrices) return(0);
1025: PetscLogEventBegin(PC_ModifySubMatrices,pc,0,0,0);
1026: (*pc->modifysubmatrices)(pc,nsub,row,col,submat,ctx);
1027: PetscLogEventEnd(PC_ModifySubMatrices,pc,0,0,0);
1028: return(0);
1029: }
1033: /*@
1034: PCSetOperators - Sets the matrix associated with the linear system and
1035: a (possibly) different one associated with the preconditioner.
1037: Logically Collective on PC and Mat
1039: Input Parameters:
1040: + pc - the preconditioner context
1041: . Amat - the matrix that defines the linear system
1042: . Pmat - the matrix to be used in constructing the preconditioner, usually the same as Amat.
1043: - flag - flag indicating information about the preconditioner matrix structure
1044: during successive linear solves. This flag is ignored the first time a
1045: linear system is solved, and thus is irrelevant when solving just one linear
1046: system.
1048: Notes:
1049: The flag can be used to eliminate unnecessary work in the preconditioner
1050: during the repeated solution of linear systems of the same size. The
1051: available options are
1052: + SAME_PRECONDITIONER -
1053: Pmat is identical during successive linear solves.
1054: This option is intended for folks who are using
1055: different Amat and Pmat matrices and wish to reuse the
1056: same preconditioner matrix. For example, this option
1057: saves work by not recomputing incomplete factorization
1058: for ILU/ICC preconditioners.
1059: . SAME_NONZERO_PATTERN -
1060: Pmat has the same nonzero structure during
1061: successive linear solves.
1062: - DIFFERENT_NONZERO_PATTERN -
1063: Pmat does not have the same nonzero structure.
1065: Passing a NULL for Amat or Pmat removes the matrix that is currently used.
1067: If you wish to replace either Amat or Pmat but leave the other one untouched then
1068: first call KSPGetOperators() to get the one you wish to keep, call PetscObjectReference()
1069: on it and then pass it back in in your call to KSPSetOperators().
1071: Caution:
1072: If you specify SAME_NONZERO_PATTERN, PETSc believes your assertion
1073: and does not check the structure of the matrix. If you erroneously
1074: claim that the structure is the same when it actually is not, the new
1075: preconditioner will not function correctly. Thus, use this optimization
1076: feature carefully!
1078: If in doubt about whether your preconditioner matrix has changed
1079: structure or not, use the flag DIFFERENT_NONZERO_PATTERN.
1081: More Notes about Repeated Solution of Linear Systems:
1082: PETSc does NOT reset the matrix entries of either Amat or Pmat
1083: to zero after a linear solve; the user is completely responsible for
1084: matrix assembly. See the routine MatZeroEntries() if desiring to
1085: zero all elements of a matrix.
1087: Level: intermediate
1089: .keywords: PC, set, operators, matrix, linear system
1091: .seealso: PCGetOperators(), MatZeroEntries()
1092: @*/
1093: PetscErrorCode PCSetOperators(PC pc,Mat Amat,Mat Pmat,MatStructure flag)
1094: {
1096: PetscInt m1,n1,m2,n2;
1104: if (pc->setupcalled && Amat && Pmat) {
1105: MatGetLocalSize(Amat,&m1,&n1);
1106: MatGetLocalSize(pc->mat,&m2,&n2);
1107: if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Amat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1);
1108: MatGetLocalSize(Pmat,&m1,&n1);
1109: MatGetLocalSize(pc->pmat,&m2,&n2);
1110: if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Pmat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1);
1111: }
1113: /* reference first in case the matrices are the same */
1114: if (Amat) {PetscObjectReference((PetscObject)Amat);}
1115: MatDestroy(&pc->mat);
1116: if (Pmat) {PetscObjectReference((PetscObject)Pmat);}
1117: MatDestroy(&pc->pmat);
1118: pc->mat = Amat;
1119: pc->pmat = Pmat;
1121: if (pc->setupcalled == 2 && flag != SAME_PRECONDITIONER) {
1122: pc->setupcalled = 1;
1123: }
1124: pc->flag = flag;
1125: return(0);
1126: }
1130: /*@C
1131: PCGetOperators - Gets the matrix associated with the linear system and
1132: possibly a different one associated with the preconditioner.
1134: Not collective, though parallel Mats are returned if the PC is parallel
1136: Input Parameter:
1137: . pc - the preconditioner context
1139: Output Parameters:
1140: + Amat - the matrix defining the linear system
1141: . Pmat - the matrix from which the preconditioner is constructed, usually the same as Amat.
1142: - flag - flag indicating information about the preconditioner
1143: matrix structure. See PCSetOperators() for details.
1145: Level: intermediate
1147: Notes: Does not increase the reference count of the matrices, so you should not destroy them
1149: Alternative usage: If the operators have NOT been set with KSP/PCSetOperators() then the operators
1150: are created in PC and returned to the user. In this case, if both operators
1151: mat and pmat are requested, two DIFFERENT operators will be returned. If
1152: only one is requested both operators in the PC will be the same (i.e. as
1153: if one had called KSP/PCSetOperators() with the same argument for both Mats).
1154: The user must set the sizes of the returned matrices and their type etc just
1155: as if the user created them with MatCreate(). For example,
1157: $ KSP/PCGetOperators(ksp/pc,&Amat,NULL,NULL); is equivalent to
1158: $ set size, type, etc of Amat
1160: $ MatCreate(comm,&mat);
1161: $ KSP/PCSetOperators(ksp/pc,Amat,Amat,SAME_NONZERO_PATTERN);
1162: $ PetscObjectDereference((PetscObject)mat);
1163: $ set size, type, etc of Amat
1165: and
1167: $ KSP/PCGetOperators(ksp/pc,&Amat,&Pmat,NULL); is equivalent to
1168: $ set size, type, etc of Amat and Pmat
1170: $ MatCreate(comm,&Amat);
1171: $ MatCreate(comm,&Pmat);
1172: $ KSP/PCSetOperators(ksp/pc,Amat,Pmat,SAME_NONZERO_PATTERN);
1173: $ PetscObjectDereference((PetscObject)Amat);
1174: $ PetscObjectDereference((PetscObject)Pmat);
1175: $ set size, type, etc of Amat and Pmat
1177: The rational for this support is so that when creating a TS, SNES, or KSP the hierarchy
1178: of underlying objects (i.e. SNES, KSP, PC, Mat) and their livespans can be completely
1179: managed by the top most level object (i.e. the TS, SNES, or KSP). Another way to look
1180: at this is when you create a SNES you do not NEED to create a KSP and attach it to
1181: the SNES object (the SNES object manages it for you). Similarly when you create a KSP
1182: you do not need to attach a PC to it (the KSP object manages the PC object for you).
1183: Thus, why should YOU have to create the Mat and attach it to the SNES/KSP/PC, when
1184: it can be created for you?
1187: .keywords: PC, get, operators, matrix, linear system
1189: .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperatorsSet()
1190: @*/
1191: PetscErrorCode PCGetOperators(PC pc,Mat *Amat,Mat *Pmat,MatStructure *flag)
1192: {
1197: if (Amat) {
1198: if (!pc->mat) {
1199: if (pc->pmat && !Pmat) { /* Apmat has been set, but user did not request it, so use for Amat */
1200: pc->mat = pc->pmat;
1201: PetscObjectReference((PetscObject)pc->mat);
1202: } else { /* both Amat and Pmat are empty */
1203: MatCreate(PetscObjectComm((PetscObject)pc),&pc->mat);
1204: if (!Pmat) { /* user did NOT request Pmat, so make same as Amat */
1205: pc->pmat = pc->mat;
1206: PetscObjectReference((PetscObject)pc->pmat);
1207: }
1208: }
1209: }
1210: *Amat = pc->mat;
1211: }
1212: if (Pmat) {
1213: if (!pc->pmat) {
1214: if (pc->mat && !Amat) { /* Amat has been set but was not requested, so use for pmat */
1215: pc->pmat = pc->mat;
1216: PetscObjectReference((PetscObject)pc->pmat);
1217: } else {
1218: MatCreate(PetscObjectComm((PetscObject)pc),&pc->pmat);
1219: if (!Amat) { /* user did NOT request Amat, so make same as Pmat */
1220: pc->mat = pc->pmat;
1221: PetscObjectReference((PetscObject)pc->mat);
1222: }
1223: }
1224: }
1225: *Pmat = pc->pmat;
1226: }
1227: if (flag) *flag = pc->flag;
1228: return(0);
1229: }
1233: /*@C
1234: PCGetOperatorsSet - Determines if the matrix associated with the linear system and
1235: possibly a different one associated with the preconditioner have been set in the PC.
1237: Not collective, though the results on all processes should be the same
1239: Input Parameter:
1240: . pc - the preconditioner context
1242: Output Parameters:
1243: + mat - the matrix associated with the linear system was set
1244: - pmat - matrix associated with the preconditioner was set, usually the same
1246: Level: intermediate
1248: .keywords: PC, get, operators, matrix, linear system
1250: .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperators()
1251: @*/
1252: PetscErrorCode PCGetOperatorsSet(PC pc,PetscBool *mat,PetscBool *pmat)
1253: {
1256: if (mat) *mat = (pc->mat) ? PETSC_TRUE : PETSC_FALSE;
1257: if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE;
1258: return(0);
1259: }
1263: /*@
1264: PCFactorGetMatrix - Gets the factored matrix from the
1265: preconditioner context. This routine is valid only for the LU,
1266: incomplete LU, Cholesky, and incomplete Cholesky methods.
1268: Not Collective on PC though Mat is parallel if PC is parallel
1270: Input Parameters:
1271: . pc - the preconditioner context
1273: Output parameters:
1274: . mat - the factored matrix
1276: Level: advanced
1278: Notes: Does not increase the reference count for the matrix so DO NOT destroy it
1280: .keywords: PC, get, factored, matrix
1281: @*/
1282: PetscErrorCode PCFactorGetMatrix(PC pc,Mat *mat)
1283: {
1289: if (pc->ops->getfactoredmatrix) {
1290: (*pc->ops->getfactoredmatrix)(pc,mat);
1291: } else SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC type does not support getting factor matrix");
1292: return(0);
1293: }
1297: /*@C
1298: PCSetOptionsPrefix - Sets the prefix used for searching for all
1299: PC options in the database.
1301: Logically Collective on PC
1303: Input Parameters:
1304: + pc - the preconditioner context
1305: - prefix - the prefix string to prepend to all PC option requests
1307: Notes:
1308: A hyphen (-) must NOT be given at the beginning of the prefix name.
1309: The first character of all runtime options is AUTOMATICALLY the
1310: hyphen.
1312: Level: advanced
1314: .keywords: PC, set, options, prefix, database
1316: .seealso: PCAppendOptionsPrefix(), PCGetOptionsPrefix()
1317: @*/
1318: PetscErrorCode PCSetOptionsPrefix(PC pc,const char prefix[])
1319: {
1324: PetscObjectSetOptionsPrefix((PetscObject)pc,prefix);
1325: return(0);
1326: }
1330: /*@C
1331: PCAppendOptionsPrefix - Appends to the prefix used for searching for all
1332: PC options in the database.
1334: Logically Collective on PC
1336: Input Parameters:
1337: + pc - the preconditioner context
1338: - prefix - the prefix string to prepend to all PC option requests
1340: Notes:
1341: A hyphen (-) must NOT be given at the beginning of the prefix name.
1342: The first character of all runtime options is AUTOMATICALLY the
1343: hyphen.
1345: Level: advanced
1347: .keywords: PC, append, options, prefix, database
1349: .seealso: PCSetOptionsPrefix(), PCGetOptionsPrefix()
1350: @*/
1351: PetscErrorCode PCAppendOptionsPrefix(PC pc,const char prefix[])
1352: {
1357: PetscObjectAppendOptionsPrefix((PetscObject)pc,prefix);
1358: return(0);
1359: }
1363: /*@C
1364: PCGetOptionsPrefix - Gets the prefix used for searching for all
1365: PC options in the database.
1367: Not Collective
1369: Input Parameters:
1370: . pc - the preconditioner context
1372: Output Parameters:
1373: . prefix - pointer to the prefix string used, is returned
1375: Notes: On the fortran side, the user should pass in a string 'prifix' of
1376: sufficient length to hold the prefix.
1378: Level: advanced
1380: .keywords: PC, get, options, prefix, database
1382: .seealso: PCSetOptionsPrefix(), PCAppendOptionsPrefix()
1383: @*/
1384: PetscErrorCode PCGetOptionsPrefix(PC pc,const char *prefix[])
1385: {
1391: PetscObjectGetOptionsPrefix((PetscObject)pc,prefix);
1392: return(0);
1393: }
1397: /*@
1398: PCPreSolve - Optional pre-solve phase, intended for any
1399: preconditioner-specific actions that must be performed before
1400: the iterative solve itself.
1402: Collective on PC
1404: Input Parameters:
1405: + pc - the preconditioner context
1406: - ksp - the Krylov subspace context
1408: Level: developer
1410: Sample of Usage:
1411: .vb
1412: PCPreSolve(pc,ksp);
1413: KSPSolve(ksp,b,x);
1414: PCPostSolve(pc,ksp);
1415: .ve
1417: Notes:
1418: The pre-solve phase is distinct from the PCSetUp() phase.
1420: KSPSolve() calls this directly, so is rarely called by the user.
1422: .keywords: PC, pre-solve
1424: .seealso: PCPostSolve()
1425: @*/
1426: PetscErrorCode PCPreSolve(PC pc,KSP ksp)
1427: {
1429: Vec x,rhs;
1434: pc->presolvedone++;
1435: if (pc->presolvedone > 2) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot embed PCPreSolve() more than twice");
1436: KSPGetSolution(ksp,&x);
1437: KSPGetRhs(ksp,&rhs);
1439: if (pc->ops->presolve) {
1440: (*pc->ops->presolve)(pc,ksp,rhs,x);
1441: }
1442: return(0);
1443: }
1447: /*@
1448: PCPostSolve - Optional post-solve phase, intended for any
1449: preconditioner-specific actions that must be performed after
1450: the iterative solve itself.
1452: Collective on PC
1454: Input Parameters:
1455: + pc - the preconditioner context
1456: - ksp - the Krylov subspace context
1458: Sample of Usage:
1459: .vb
1460: PCPreSolve(pc,ksp);
1461: KSPSolve(ksp,b,x);
1462: PCPostSolve(pc,ksp);
1463: .ve
1465: Note:
1466: KSPSolve() calls this routine directly, so it is rarely called by the user.
1468: Level: developer
1470: .keywords: PC, post-solve
1472: .seealso: PCPreSolve(), KSPSolve()
1473: @*/
1474: PetscErrorCode PCPostSolve(PC pc,KSP ksp)
1475: {
1477: Vec x,rhs;
1482: pc->presolvedone--;
1483: KSPGetSolution(ksp,&x);
1484: KSPGetRhs(ksp,&rhs);
1485: if (pc->ops->postsolve) {
1486: (*pc->ops->postsolve)(pc,ksp,rhs,x);
1487: }
1488: return(0);
1489: }
1493: /*@C
1494: PCLoad - Loads a PC that has been stored in binary with PCView().
1496: Collective on PetscViewer
1498: Input Parameters:
1499: + newdm - the newly loaded PC, this needs to have been created with PCCreate() or
1500: some related function before a call to PCLoad().
1501: - viewer - binary file viewer, obtained from PetscViewerBinaryOpen()
1503: Level: intermediate
1505: Notes:
1506: The type is determined by the data in the file, any type set into the PC before this call is ignored.
1508: Notes for advanced users:
1509: Most users should not need to know the details of the binary storage
1510: format, since PCLoad() and PCView() completely hide these details.
1511: But for anyone who's interested, the standard binary matrix storage
1512: format is
1513: .vb
1514: has not yet been determined
1515: .ve
1517: .seealso: PetscViewerBinaryOpen(), PCView(), MatLoad(), VecLoad()
1518: @*/
1519: PetscErrorCode PCLoad(PC newdm, PetscViewer viewer)
1520: {
1522: PetscBool isbinary;
1523: PetscInt classid;
1524: char type[256];
1529: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
1530: if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
1532: PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);
1533: if (classid != PC_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not PC next in file");
1534: PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);
1535: PCSetType(newdm, type);
1536: if (newdm->ops->load) {
1537: (*newdm->ops->load)(newdm,viewer);
1538: }
1539: return(0);
1540: }
1542: #include <petscdraw.h>
1543: #if defined(PETSC_HAVE_AMS)
1544: #include <petscviewerams.h>
1545: #endif
1548: /*@C
1549: PCView - Prints the PC data structure.
1551: Collective on PC
1553: Input Parameters:
1554: + PC - the PC context
1555: - viewer - optional visualization context
1557: Note:
1558: The available visualization contexts include
1559: + PETSC_VIEWER_STDOUT_SELF - standard output (default)
1560: - PETSC_VIEWER_STDOUT_WORLD - synchronized standard
1561: output where only the first processor opens
1562: the file. All other processors send their
1563: data to the first processor to print.
1565: The user can open an alternative visualization contexts with
1566: PetscViewerASCIIOpen() (output to a specified file).
1568: Level: developer
1570: .keywords: PC, view
1572: .seealso: KSPView(), PetscViewerASCIIOpen()
1573: @*/
1574: PetscErrorCode PCView(PC pc,PetscViewer viewer)
1575: {
1576: PCType cstr;
1577: PetscErrorCode ierr;
1578: PetscBool iascii,isstring,isbinary,isdraw;
1579: PetscViewerFormat format;
1580: #if defined(PETSC_HAVE_AMS)
1581: PetscBool isams;
1582: #endif
1586: if (!viewer) {
1587: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&viewer);
1588: }
1592: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
1593: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
1594: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
1595: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
1596: #if defined(PETSC_HAVE_AMS)
1597: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERAMS,&isams);
1598: #endif
1600: if (iascii) {
1601: PetscViewerGetFormat(viewer,&format);
1602: PetscObjectPrintClassNamePrefixType((PetscObject)pc,viewer,"PC Object");
1603: if (!pc->setupcalled) {
1604: PetscViewerASCIIPrintf(viewer," PC has not been set up so information may be incomplete\n");
1605: }
1606: if (pc->ops->view) {
1607: PetscViewerASCIIPushTab(viewer);
1608: (*pc->ops->view)(pc,viewer);
1609: PetscViewerASCIIPopTab(viewer);
1610: }
1611: if (pc->mat) {
1612: PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO);
1613: if (pc->pmat == pc->mat) {
1614: PetscViewerASCIIPrintf(viewer," linear system matrix = precond matrix:\n");
1615: PetscViewerASCIIPushTab(viewer);
1616: MatView(pc->mat,viewer);
1617: PetscViewerASCIIPopTab(viewer);
1618: } else {
1619: if (pc->pmat) {
1620: PetscViewerASCIIPrintf(viewer," linear system matrix followed by preconditioner matrix:\n");
1621: } else {
1622: PetscViewerASCIIPrintf(viewer," linear system matrix:\n");
1623: }
1624: PetscViewerASCIIPushTab(viewer);
1625: MatView(pc->mat,viewer);
1626: if (pc->pmat) {MatView(pc->pmat,viewer);}
1627: PetscViewerASCIIPopTab(viewer);
1628: }
1629: PetscViewerPopFormat(viewer);
1630: }
1631: } else if (isstring) {
1632: PCGetType(pc,&cstr);
1633: PetscViewerStringSPrintf(viewer," %-7.7s",cstr);
1634: if (pc->ops->view) {(*pc->ops->view)(pc,viewer);}
1635: } else if (isbinary) {
1636: PetscInt classid = PC_FILE_CLASSID;
1637: MPI_Comm comm;
1638: PetscMPIInt rank;
1639: char type[256];
1641: PetscObjectGetComm((PetscObject)pc,&comm);
1642: MPI_Comm_rank(comm,&rank);
1643: if (!rank) {
1644: PetscViewerBinaryWrite(viewer,&classid,1,PETSC_INT,PETSC_FALSE);
1645: PetscStrncpy(type,((PetscObject)pc)->type_name,256);
1646: PetscViewerBinaryWrite(viewer,type,256,PETSC_CHAR,PETSC_FALSE);
1647: }
1648: if (pc->ops->view) {
1649: (*pc->ops->view)(pc,viewer);
1650: }
1651: } else if (isdraw) {
1652: PetscDraw draw;
1653: char str[25];
1654: PetscReal x,y,bottom,h;
1655: PetscInt n;
1657: PetscViewerDrawGetDraw(viewer,0,&draw);
1658: PetscDrawGetCurrentPoint(draw,&x,&y);
1659: if (pc->mat) {
1660: MatGetSize(pc->mat,&n,NULL);
1661: PetscSNPrintf(str,25,"PC: %s (%D)",((PetscObject)pc)->type_name,n);
1662: } else {
1663: PetscSNPrintf(str,25,"PC: %s",((PetscObject)pc)->type_name);
1664: }
1665: PetscDrawBoxedString(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);
1666: bottom = y - h;
1667: PetscDrawPushCurrentPoint(draw,x,bottom);
1668: if (pc->ops->view) {
1669: (*pc->ops->view)(pc,viewer);
1670: }
1671: PetscDrawPopCurrentPoint(draw);
1672: #if defined(PETSC_HAVE_AMS)
1673: } else if (isams) {
1674: if (((PetscObject)pc)->amsmem == -1) {
1675: PetscObjectViewAMS((PetscObject)pc,viewer);
1676: }
1677: if (pc->mat) {MatView(pc->mat,viewer);}
1678: if (pc->pmat && pc->pmat != pc->mat) {MatView(pc->pmat,viewer);}
1679: #endif
1680: }
1681: return(0);
1682: }
1687: /*@
1688: PCSetInitialGuessNonzero - Tells the iterative solver that the
1689: initial guess is nonzero; otherwise PC assumes the initial guess
1690: is to be zero (and thus zeros it out before solving).
1692: Logically Collective on PC
1694: Input Parameters:
1695: + pc - iterative context obtained from PCCreate()
1696: - flg - PETSC_TRUE indicates the guess is non-zero, PETSC_FALSE indicates the guess is zero
1698: Level: Developer
1700: Notes:
1701: This is a weird function. Since PC's are linear operators on the right hand side they
1702: CANNOT use an initial guess. This function is for the "pass-through" preconditioners
1703: PCKSP, PCREDUNDANT and PCHMPI and causes the inner KSP object to use the nonzero
1704: initial guess. Not currently working for PCREDUNDANT, that has to be rewritten to use KSP.
1707: .keywords: PC, set, initial guess, nonzero
1709: .seealso: PCGetInitialGuessNonzero(), PCSetInitialGuessKnoll(), PCGetInitialGuessKnoll()
1710: @*/
1711: PetscErrorCode PCSetInitialGuessNonzero(PC pc,PetscBool flg)
1712: {
1715: pc->nonzero_guess = flg;
1716: return(0);
1717: }
1721: /*@C
1722: PCRegister - Adds a method to the preconditioner package.
1724: Not collective
1726: Input Parameters:
1727: + name_solver - name of a new user-defined solver
1728: - routine_create - routine to create method context
1730: Notes:
1731: PCRegister() may be called multiple times to add several user-defined preconditioners.
1733: Sample usage:
1734: .vb
1735: PCRegister("my_solver", MySolverCreate);
1736: .ve
1738: Then, your solver can be chosen with the procedural interface via
1739: $ PCSetType(pc,"my_solver")
1740: or at runtime via the option
1741: $ -pc_type my_solver
1743: Level: advanced
1745: .keywords: PC, register
1747: .seealso: PCRegisterAll(), PCRegisterDestroy()
1748: @*/
1749: PetscErrorCode PCRegister(const char sname[],PetscErrorCode (*function)(PC))
1750: {
1754: PetscFunctionListAdd(&PCList,sname,function);
1755: return(0);
1756: }
1760: /*@
1761: PCComputeExplicitOperator - Computes the explicit preconditioned operator.
1763: Collective on PC
1765: Input Parameter:
1766: . pc - the preconditioner object
1768: Output Parameter:
1769: . mat - the explict preconditioned operator
1771: Notes:
1772: This computation is done by applying the operators to columns of the
1773: identity matrix.
1775: Currently, this routine uses a dense matrix format when 1 processor
1776: is used and a sparse format otherwise. This routine is costly in general,
1777: and is recommended for use only with relatively small systems.
1779: Level: advanced
1781: .keywords: PC, compute, explicit, operator
1783: .seealso: KSPComputeExplicitOperator()
1785: @*/
1786: PetscErrorCode PCComputeExplicitOperator(PC pc,Mat *mat)
1787: {
1788: Vec in,out;
1790: PetscInt i,M,m,*rows,start,end;
1791: PetscMPIInt size;
1792: MPI_Comm comm;
1793: PetscScalar *array,one = 1.0;
1799: PetscObjectGetComm((PetscObject)pc,&comm);
1800: MPI_Comm_size(comm,&size);
1802: if (!pc->pmat) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"You must call KSPSetOperators() or PCSetOperators() before this call");
1803: MatGetVecs(pc->pmat,&in,0);
1804: VecDuplicate(in,&out);
1805: VecGetOwnershipRange(in,&start,&end);
1806: VecGetSize(in,&M);
1807: VecGetLocalSize(in,&m);
1808: PetscMalloc((m+1)*sizeof(PetscInt),&rows);
1809: for (i=0; i<m; i++) rows[i] = start + i;
1811: MatCreate(comm,mat);
1812: MatSetSizes(*mat,m,m,M,M);
1813: if (size == 1) {
1814: MatSetType(*mat,MATSEQDENSE);
1815: MatSeqDenseSetPreallocation(*mat,NULL);
1816: } else {
1817: MatSetType(*mat,MATMPIAIJ);
1818: MatMPIAIJSetPreallocation(*mat,0,NULL,0,NULL);
1819: }
1821: for (i=0; i<M; i++) {
1823: VecSet(in,0.0);
1824: VecSetValues(in,1,&i,&one,INSERT_VALUES);
1825: VecAssemblyBegin(in);
1826: VecAssemblyEnd(in);
1828: /* should fix, allowing user to choose side */
1829: PCApply(pc,in,out);
1831: VecGetArray(out,&array);
1832: MatSetValues(*mat,m,rows,1,&i,array,INSERT_VALUES);
1833: VecRestoreArray(out,&array);
1835: }
1836: PetscFree(rows);
1837: VecDestroy(&out);
1838: MatAssemblyBegin(*mat,MAT_FINAL_ASSEMBLY);
1839: MatAssemblyEnd(*mat,MAT_FINAL_ASSEMBLY);
1840: return(0);
1841: }
1845: /*@
1846: PCSetCoordinates - sets the coordinates of all the nodes on the local process
1848: Collective on PC
1850: Input Parameters:
1851: + pc - the solver context
1852: . dim - the dimension of the coordinates 1, 2, or 3
1853: - coords - the coordinates
1855: Level: intermediate
1857: Notes: coords is an array of the 3D coordinates for the nodes on
1858: the local processor. So if there are 108 equation on a processor
1859: for a displacement finite element discretization of elasticity (so
1860: that there are 36 = 108/3 nodes) then the array must have 108
1861: double precision values (ie, 3 * 36). These x y z coordinates
1862: should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x,
1863: ... , N-1.z ].
1865: .seealso: MatSetNearNullSpace
1866: @*/
1867: PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal *coords)
1868: {
1872: PetscTryMethod(pc,"PCSetCoordinates_C",(PC,PetscInt,PetscInt,PetscReal*),(pc,dim,nloc,coords));
1873: return(0);
1874: }