Actual source code: itfunc.c
1: /*
2: Interface KSP routines that the user calls.
3: */
5: #include <petsc/private/kspimpl.h>
6: #include <petsc/private/matimpl.h>
7: #include <petscdm.h>
9: /* number of nested levels of KSPSetUp/Solve(). This is used to determine if KSP_DIVERGED_ITS should be fatal. */
10: static PetscInt level = 0;
12: static inline PetscErrorCode ObjectView(PetscObject obj, PetscViewer viewer, PetscViewerFormat format)
13: {
14: PetscCall(PetscViewerPushFormat(viewer, format));
15: PetscCall(PetscObjectView(obj, viewer));
16: PetscCall(PetscViewerPopFormat(viewer));
17: return PETSC_SUCCESS;
18: }
20: /*@
21: KSPComputeExtremeSingularValues - Computes the extreme singular values
22: for the preconditioned operator. Called after or during `KSPSolve()`.
24: Not Collective
26: Input Parameter:
27: . ksp - iterative solver obtained from `KSPCreate()`
29: Output Parameters:
30: + emax - maximum estimated singular value
31: - emin - minimum estimated singular value
33: Options Database Key:
34: . -ksp_view_singularvalues - compute extreme singular values and print when `KSPSolve()` completes.
36: Level: advanced
38: Notes:
39: One must call `KSPSetComputeSingularValues()` before calling `KSPSetUp()`
40: (or use the option `-ksp_view_singularvalues`) in order for this routine to work correctly.
42: Many users may just want to use the monitoring routine
43: `KSPMonitorSingularValue()` (which can be set with option `-ksp_monitor_singular_value`)
44: to print the extreme singular values at each iteration of the linear solve.
46: Estimates of the smallest singular value may be very inaccurate, especially if the Krylov method has not converged.
47: The largest singular value is usually accurate to within a few percent if the method has converged, but is still not
48: intended for eigenanalysis. Consider the excellent package SLEPc if accurate values are required.
50: Disable restarts if using `KSPGMRES`, otherwise this estimate will only be using those iterations after the last
51: restart. See `KSPGMRESSetRestart()` for more details.
53: .seealso: [](ch_ksp), `KSPSetComputeSingularValues()`, `KSPMonitorSingularValue()`, `KSPComputeEigenvalues()`, `KSP`, `KSPComputeRitz()`
54: @*/
55: PetscErrorCode KSPComputeExtremeSingularValues(KSP ksp, PetscReal *emax, PetscReal *emin)
56: {
57: PetscFunctionBegin;
59: PetscAssertPointer(emax, 2);
60: PetscAssertPointer(emin, 3);
61: PetscCheck(ksp->calc_sings, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Singular values not requested before KSPSetUp()");
63: if (ksp->ops->computeextremesingularvalues) PetscUseTypeMethod(ksp, computeextremesingularvalues, emax, emin);
64: else {
65: *emin = -1.0;
66: *emax = -1.0;
67: }
68: PetscFunctionReturn(PETSC_SUCCESS);
69: }
71: /*@
72: KSPComputeEigenvalues - Computes the extreme eigenvalues for the
73: preconditioned operator. Called after or during `KSPSolve()`.
75: Not Collective
77: Input Parameters:
78: + ksp - iterative solver obtained from `KSPCreate()`
79: - n - size of arrays `r` and `c`. The number of eigenvalues computed `neig` will, in general, be less than this.
81: Output Parameters:
82: + r - real part of computed eigenvalues, provided by user with a dimension of at least `n`
83: . c - complex part of computed eigenvalues, provided by user with a dimension of at least `n`
84: - neig - actual number of eigenvalues computed (will be less than or equal to `n`)
86: Options Database Key:
87: . -ksp_view_eigenvalues - Prints eigenvalues to stdout
89: Level: advanced
91: Notes:
92: The number of eigenvalues estimated depends on the size of the Krylov space
93: generated during the `KSPSolve()` ; for example, with
94: `KSPCG` it corresponds to the number of CG iterations, for `KSPGMRES` it is the number
95: of GMRES iterations SINCE the last restart. Any extra space in `r` and `c`
96: will be ignored.
98: `KSPComputeEigenvalues()` does not usually provide accurate estimates; it is
99: intended only for assistance in understanding the convergence of iterative
100: methods, not for eigenanalysis. For accurate computation of eigenvalues we recommend using
101: the excellent package SLEPc.
103: One must call `KSPSetComputeEigenvalues()` before calling `KSPSetUp()`
104: in order for this routine to work correctly.
106: Many users may just want to use the monitoring routine
107: `KSPMonitorSingularValue()` (which can be set with option `-ksp_monitor_singular_value`)
108: to print the singular values at each iteration of the linear solve.
110: `KSPComputeRitz()` provides estimates for both the eigenvalues and their corresponding eigenvectors.
112: .seealso: [](ch_ksp), `KSPSetComputeEigenvalues()`, `KSPSetComputeSingularValues()`, `KSPMonitorSingularValue()`, `KSPComputeExtremeSingularValues()`, `KSP`, `KSPComputeRitz()`
113: @*/
114: PetscErrorCode KSPComputeEigenvalues(KSP ksp, PetscInt n, PetscReal r[], PetscReal c[], PetscInt *neig)
115: {
116: PetscFunctionBegin;
118: if (n) PetscAssertPointer(r, 3);
119: if (n) PetscAssertPointer(c, 4);
120: PetscCheck(n >= 0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Requested < 0 Eigenvalues");
121: PetscAssertPointer(neig, 5);
122: PetscCheck(ksp->calc_sings, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Eigenvalues not requested before KSPSetUp()");
124: if (n && ksp->ops->computeeigenvalues) PetscUseTypeMethod(ksp, computeeigenvalues, n, r, c, neig);
125: else *neig = 0;
126: PetscFunctionReturn(PETSC_SUCCESS);
127: }
129: /*@
130: KSPComputeRitz - Computes the Ritz or harmonic Ritz pairs associated with the
131: smallest or largest in modulus, for the preconditioned operator.
133: Not Collective
135: Input Parameters:
136: + ksp - iterative solver obtained from `KSPCreate()`
137: . ritz - `PETSC_TRUE` or `PETSC_FALSE` for Ritz pairs or harmonic Ritz pairs, respectively
138: - small - `PETSC_TRUE` or `PETSC_FALSE` for smallest or largest (harmonic) Ritz values, respectively
140: Output Parameters:
141: + nrit - On input number of (harmonic) Ritz pairs to compute; on output, actual number of computed (harmonic) Ritz pairs
142: . S - an array of the Ritz vectors, pass in an array of vectors of size `nrit`
143: . tetar - real part of the Ritz values, pass in an array of size `nrit`
144: - tetai - imaginary part of the Ritz values, pass in an array of size `nrit`
146: Level: advanced
148: Notes:
149: This only works with a `KSPType` of `KSPGMRES`.
151: One must call `KSPSetComputeRitz()` before calling `KSPSetUp()` in order for this routine to work correctly.
153: This routine must be called after `KSPSolve()`.
155: In `KSPGMRES`, the (harmonic) Ritz pairs are computed from the Hessenberg matrix obtained during
156: the last complete cycle of the GMRES solve, or during the partial cycle if the solve ended before
157: a restart (that is a complete GMRES cycle was never achieved).
159: The number of actual (harmonic) Ritz pairs computed is less than or equal to the restart
160: parameter for GMRES if a complete cycle has been performed or less or equal to the number of GMRES
161: iterations.
163: `KSPComputeEigenvalues()` provides estimates for only the eigenvalues (Ritz values).
165: For real matrices, the (harmonic) Ritz pairs can be complex-valued. In such a case,
166: the routine selects the complex (harmonic) Ritz value and its conjugate, and two successive entries of the
167: vectors `S` are equal to the real and the imaginary parts of the associated vectors.
168: When PETSc has been built with complex scalars, the real and imaginary parts of the Ritz
169: values are still returned in `tetar` and `tetai`, as is done in `KSPComputeEigenvalues()`, but
170: the Ritz vectors S are complex.
172: The (harmonic) Ritz pairs are given in order of increasing (harmonic) Ritz values in modulus.
174: The Ritz pairs do not necessarily accurately reflect the eigenvalues and eigenvectors of the operator, consider the
175: excellent package SLEPc if accurate values are required.
177: .seealso: [](ch_ksp), `KSPSetComputeRitz()`, `KSP`, `KSPGMRES`, `KSPComputeEigenvalues()`, `KSPSetComputeSingularValues()`, `KSPMonitorSingularValue()`
178: @*/
179: PetscErrorCode KSPComputeRitz(KSP ksp, PetscBool ritz, PetscBool small, PetscInt *nrit, Vec S[], PetscReal tetar[], PetscReal tetai[])
180: {
181: PetscFunctionBegin;
183: PetscCheck(ksp->calc_ritz, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Ritz pairs not requested before KSPSetUp()");
184: PetscTryTypeMethod(ksp, computeritz, ritz, small, nrit, S, tetar, tetai);
185: PetscFunctionReturn(PETSC_SUCCESS);
186: }
188: /*@
189: KSPSetUpOnBlocks - Sets up the preconditioner for each block in
190: the block Jacobi `PCJACOBI`, overlapping Schwarz `PCASM`, and fieldsplit `PCFIELDSPLIT` preconditioners
192: Collective
194: Input Parameter:
195: . ksp - the `KSP` context
197: Level: advanced
199: Notes:
200: `KSPSetUpOnBlocks()` is a routine that the user can optionally call for
201: more precise profiling (via `-log_view`) of the setup phase for these
202: block preconditioners. If the user does not call `KSPSetUpOnBlocks()`,
203: it will automatically be called from within `KSPSolve()`.
205: Calling `KSPSetUpOnBlocks()` is the same as calling `PCSetUpOnBlocks()`
206: on the `PC` context within the `KSP` context.
208: .seealso: [](ch_ksp), `PCSetUpOnBlocks()`, `KSPSetUp()`, `PCSetUp()`, `KSP`
209: @*/
210: PetscErrorCode KSPSetUpOnBlocks(KSP ksp)
211: {
212: PC pc;
213: PCFailedReason pcreason;
215: PetscFunctionBegin;
217: level++;
218: PetscCall(KSPGetPC(ksp, &pc));
219: PetscCall(PCSetUpOnBlocks(pc));
220: PetscCall(PCGetFailedReason(pc, &pcreason));
221: level--;
222: /*
223: This is tricky since only a subset of MPI ranks may set this; each KSPSolve_*() is responsible for checking
224: this flag and initializing an appropriate vector with VecFlag() so that the first norm computation can
225: produce a result at KSPCheckNorm() thus communicating the known problem to all MPI ranks so they may
226: terminate the Krylov solve. For many KSP implementations this is handled within KSPInitialResidual()
227: */
228: if (pcreason) ksp->reason = KSP_DIVERGED_PC_FAILED;
229: PetscFunctionReturn(PETSC_SUCCESS);
230: }
232: /*@
233: KSPSetReusePreconditioner - reuse the current preconditioner for future `KSPSolve()`, do not construct a new preconditioner even if the `Mat` operator
234: in the `KSP` has different values
236: Collective
238: Input Parameters:
239: + ksp - iterative solver obtained from `KSPCreate()`
240: - flag - `PETSC_TRUE` to reuse the current preconditioner, or `PETSC_FALSE` to construct a new preconditioner
242: Options Database Key:
243: . -ksp_reuse_preconditioner (true|false) - reuse the previously computed preconditioner
245: Level: intermediate
247: Notes:
248: When using `SNES` one can use `SNESSetLagPreconditioner()` to determine when preconditioners are reused.
250: Reusing the preconditioner reduces the time needed to form new preconditioners but may (significantly) increase the number
251: of iterations needed for future solves depending on how much the matrix entries have changed.
253: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `KSP`, `KSPGetReusePreconditioner()`,
254: `SNESSetLagPreconditioner()`, `SNES`
255: @*/
256: PetscErrorCode KSPSetReusePreconditioner(KSP ksp, PetscBool flag)
257: {
258: PC pc;
260: PetscFunctionBegin;
262: PetscCall(KSPGetPC(ksp, &pc));
263: PetscCall(PCSetReusePreconditioner(pc, flag));
264: PetscFunctionReturn(PETSC_SUCCESS);
265: }
267: /*@
268: KSPGetReusePreconditioner - Determines if the `KSP` reuses the current preconditioner even if the `Mat` operator in the `KSP` has changed.
270: Collective
272: Input Parameter:
273: . ksp - iterative solver obtained from `KSPCreate()`
275: Output Parameter:
276: . flag - the boolean flag indicating if the current preconditioner should be reused
278: Level: intermediate
280: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `KSPSetReusePreconditioner()`, `KSP`
281: @*/
282: PetscErrorCode KSPGetReusePreconditioner(KSP ksp, PetscBool *flag)
283: {
284: PetscFunctionBegin;
286: PetscAssertPointer(flag, 2);
287: *flag = PETSC_FALSE;
288: if (ksp->pc) PetscCall(PCGetReusePreconditioner(ksp->pc, flag));
289: PetscFunctionReturn(PETSC_SUCCESS);
290: }
292: /*@
293: KSPSetSkipPCSetFromOptions - prevents `KSPSetFromOptions()` from calling `PCSetFromOptions()`.
294: This is used if the same `PC` is shared by more than one `KSP` so its options are not reset for each `KSP`
296: Collective
298: Input Parameters:
299: + ksp - iterative solver obtained from `KSPCreate()`
300: - flag - `PETSC_TRUE` to skip calling the `PCSetFromOptions()`
302: Level: developer
304: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `PCSetReusePreconditioner()`, `KSP`
305: @*/
306: PetscErrorCode KSPSetSkipPCSetFromOptions(KSP ksp, PetscBool flag)
307: {
308: PetscFunctionBegin;
310: ksp->skippcsetfromoptions = flag;
311: PetscFunctionReturn(PETSC_SUCCESS);
312: }
314: /*@
315: KSPSetUp - Sets up the internal data structures for the
316: later use `KSPSolve()` the `KSP` linear iterative solver.
318: Collective
320: Input Parameter:
321: . ksp - iterative solver, `KSP`, obtained from `KSPCreate()`
323: Level: developer
325: Note:
326: This is called automatically by `KSPSolve()` so usually does not need to be called directly.
328: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `KSP`, `KSPSetUpOnBlocks()`
329: @*/
330: PetscErrorCode KSPSetUp(KSP ksp)
331: {
332: Mat A, B;
333: Mat mat, pmat;
334: MatNullSpace nullsp;
335: PCFailedReason pcreason;
336: PC pc;
337: PetscBool pcmpi;
339: PetscFunctionBegin;
341: PetscCall(KSPGetPC(ksp, &pc));
342: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCMPI, &pcmpi));
343: if (pcmpi) {
344: PetscBool ksppreonly;
345: PetscCall(PetscObjectTypeCompare((PetscObject)ksp, KSPPREONLY, &ksppreonly));
346: if (!ksppreonly) PetscCall(KSPSetType(ksp, KSPPREONLY));
347: }
348: level++;
350: /* reset the convergence flag from the previous solves */
351: ksp->reason = KSP_CONVERGED_ITERATING;
353: if (!((PetscObject)ksp)->type_name) PetscCall(KSPSetType(ksp, KSPGMRES));
354: PetscCall(KSPSetUpNorms_Private(ksp, PETSC_TRUE, &ksp->normtype, &ksp->pc_side));
356: if ((ksp->dmActive & KSP_DMACTIVE_OPERATOR) && !ksp->setupstage) {
357: /* first time in so build matrix and vector data structures using DM */
358: if (!ksp->vec_rhs) PetscCall(DMCreateGlobalVector(ksp->dm, &ksp->vec_rhs));
359: if (!ksp->vec_sol) PetscCall(DMCreateGlobalVector(ksp->dm, &ksp->vec_sol));
360: PetscCall(DMCreateMatrix(ksp->dm, &A));
361: PetscCall(KSPSetOperators(ksp, A, A));
362: PetscCall(PetscObjectDereference((PetscObject)A));
363: }
365: if (ksp->dmActive) {
366: DMKSP kdm;
367: PetscCall(DMGetDMKSP(ksp->dm, &kdm));
369: if (kdm->ops->computeinitialguess && ksp->setupstage != KSP_SETUP_NEWRHS && (ksp->dmActive & KSP_DMACTIVE_INITIAL_GUESS)) {
370: /* only computes initial guess the first time through */
371: PetscCallBack("KSP callback initial guess", (*kdm->ops->computeinitialguess)(ksp, ksp->vec_sol, kdm->initialguessctx));
372: PetscCall(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE));
373: }
374: if (kdm->ops->computerhs && (ksp->dmActive & KSP_DMACTIVE_RHS)) PetscCallBack("KSP callback rhs", (*kdm->ops->computerhs)(ksp, ksp->vec_rhs, kdm->rhsctx));
375: if ((ksp->setupstage != KSP_SETUP_NEWRHS) && (ksp->dmActive & KSP_DMACTIVE_OPERATOR)) {
376: PetscCheck(kdm->ops->computeoperators, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "You called KSPSetDM() but did not use DMKSPSetComputeOperators() or KSPSetDMActive(ksp, KSP_DMACTIVE_ALL, PETSC_FALSE);");
377: PetscCall(KSPGetOperators(ksp, &A, &B));
378: PetscCallBack("KSP callback operators", (*kdm->ops->computeoperators)(ksp, A, B, kdm->operatorsctx));
379: }
380: }
382: if (ksp->setupstage == KSP_SETUP_NEWRHS) {
383: level--;
384: PetscFunctionReturn(PETSC_SUCCESS);
385: }
386: PetscCall(PetscLogEventBegin(KSP_SetUp, ksp, ksp->vec_rhs, ksp->vec_sol, 0));
388: switch (ksp->setupstage) {
389: case KSP_SETUP_NEW:
390: PetscUseTypeMethod(ksp, setup);
391: break;
392: case KSP_SETUP_NEWMATRIX: /* This should be replaced with a more general mechanism */
393: if (ksp->setupnewmatrix) PetscUseTypeMethod(ksp, setup);
394: break;
395: default:
396: break;
397: }
399: if (!ksp->pc) PetscCall(KSPGetPC(ksp, &ksp->pc));
400: PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
401: /* scale the matrix if requested */
402: if (ksp->dscale) {
403: PetscScalar *xx;
404: PetscInt i, n;
405: PetscBool zeroflag = PETSC_FALSE;
407: if (!ksp->diagonal) { /* allocate vector to hold diagonal */
408: PetscCall(MatCreateVecs(pmat, &ksp->diagonal, NULL));
409: }
410: PetscCall(MatGetDiagonal(pmat, ksp->diagonal));
411: PetscCall(VecGetLocalSize(ksp->diagonal, &n));
412: PetscCall(VecGetArray(ksp->diagonal, &xx));
413: for (i = 0; i < n; i++) {
414: if (xx[i] != 0.0) xx[i] = 1.0 / PetscSqrtReal(PetscAbsScalar(xx[i]));
415: else {
416: xx[i] = 1.0;
417: zeroflag = PETSC_TRUE;
418: }
419: }
420: PetscCall(VecRestoreArray(ksp->diagonal, &xx));
421: if (zeroflag) PetscCall(PetscInfo(ksp, "Zero detected in diagonal of matrix, using 1 at those locations\n"));
422: PetscCall(MatDiagonalScale(pmat, ksp->diagonal, ksp->diagonal));
423: if (mat != pmat) PetscCall(MatDiagonalScale(mat, ksp->diagonal, ksp->diagonal));
424: ksp->dscalefix2 = PETSC_FALSE;
425: }
426: PetscCall(PetscLogEventEnd(KSP_SetUp, ksp, ksp->vec_rhs, ksp->vec_sol, 0));
427: PetscCall(PCSetErrorIfFailure(ksp->pc, ksp->errorifnotconverged));
428: PetscCall(PCSetUp(ksp->pc));
429: PetscCall(PCGetFailedReason(ksp->pc, &pcreason));
430: /* TODO: this code was wrong and is still wrong, there is no way to propagate the failure to all processes; their is no code to handle a ksp->reason on only some ranks */
431: if (pcreason) ksp->reason = KSP_DIVERGED_PC_FAILED;
433: PetscCall(MatGetNullSpace(mat, &nullsp));
434: if (nullsp) {
435: PetscBool test = PETSC_FALSE;
436: PetscCall(PetscOptionsGetBool(((PetscObject)ksp)->options, ((PetscObject)ksp)->prefix, "-ksp_test_null_space", &test, NULL));
437: if (test) PetscCall(MatNullSpaceTest(nullsp, mat, NULL));
438: }
439: ksp->setupstage = KSP_SETUP_NEWRHS;
440: level--;
441: PetscFunctionReturn(PETSC_SUCCESS);
442: }
444: /*@
445: KSPConvergedReasonView - Displays the reason a `KSP` solve converged or diverged, `KSPConvergedReason` to a `PetscViewer`
447: Collective
449: Input Parameters:
450: + ksp - iterative solver obtained from `KSPCreate()`
451: - viewer - the `PetscViewer` on which to display the reason
453: Options Database Keys:
454: + -ksp_converged_reason - print reason for converged or diverged, also prints number of iterations
455: - -ksp_converged_reason ::failed - only print reason and number of iterations when diverged
457: Level: beginner
459: Note:
460: Use `KSPConvergedReasonViewFromOptions()` to display the reason based on values in the PETSc options database.
462: To change the format of the output call `PetscViewerPushFormat`(`viewer`,`format`) before this call. Use `PETSC_VIEWER_DEFAULT` for the default,
463: use `PETSC_VIEWER_FAILED` to only display a reason if it fails.
465: .seealso: [](ch_ksp), `KSPConvergedReasonViewFromOptions()`, `KSPCreate()`, `KSPSetUp()`, `KSPDestroy()`, `KSPSetTolerances()`, `KSPConvergedDefault()`,
466: `KSPSolveTranspose()`, `KSPGetIterationNumber()`, `KSP`, `KSPGetConvergedReason()`, `PetscViewerPushFormat()`, `PetscViewerPopFormat()`
467: @*/
468: PetscErrorCode KSPConvergedReasonView(KSP ksp, PetscViewer viewer)
469: {
470: PetscBool isAscii;
471: PetscViewerFormat format;
473: PetscFunctionBegin;
474: if (!viewer) viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)ksp));
475: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isAscii));
476: if (isAscii) {
477: PetscCall(PetscViewerGetFormat(viewer, &format));
478: PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ksp)->tablevel + 1));
479: if (ksp->reason > 0 && format != PETSC_VIEWER_FAILED) {
480: if (((PetscObject)ksp)->prefix) {
481: PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve converged due to %s iterations %" PetscInt_FMT "\n", ((PetscObject)ksp)->prefix, KSPConvergedReasons[ksp->reason], ksp->its));
482: } else {
483: PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve converged due to %s iterations %" PetscInt_FMT "\n", KSPConvergedReasons[ksp->reason], ksp->its));
484: }
485: } else if (ksp->reason <= 0) {
486: if (((PetscObject)ksp)->prefix) {
487: PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve did not converge due to %s iterations %" PetscInt_FMT "\n", ((PetscObject)ksp)->prefix, KSPConvergedReasons[ksp->reason], ksp->its));
488: } else {
489: PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve did not converge due to %s iterations %" PetscInt_FMT "\n", KSPConvergedReasons[ksp->reason], ksp->its));
490: }
491: if (ksp->reason == KSP_DIVERGED_PC_FAILED) {
492: PCFailedReason reason;
493: PetscCall(PCGetFailedReason(ksp->pc, &reason));
494: PetscCall(PetscViewerASCIIPrintf(viewer, " PC failed due to %s\n", PCFailedReasons[reason]));
495: }
496: }
497: PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ksp)->tablevel + 1));
498: }
499: PetscFunctionReturn(PETSC_SUCCESS);
500: }
502: /*@C
503: KSPConvergedReasonViewSet - Sets an ADDITIONAL function that is to be used at the
504: end of the linear solver to display the convergence reason of the linear solver.
506: Logically Collective
508: Input Parameters:
509: + ksp - the `KSP` context
510: . f - the `ksp` converged reason view function, see `KSPConvergedReasonViewFn`
511: . ctx - [optional] context for private data for the `KSPConvergedReason` view routine (use `NULL` if context is not needed)
512: - reasonviewdestroy - [optional] routine that frees `ctx` (may be `NULL`), see `PetscCtxDestroyFn` for the calling sequence
514: Options Database Keys:
515: + -ksp_converged_reason - sets a default `KSPConvergedReasonView()`
516: - -ksp_converged_reason_view_cancel - cancels all converged reason viewers that have been hardwired into a code by
517: calls to `KSPConvergedReasonViewSet()`, but does not cancel those set via the options database.
519: Level: intermediate
521: Note:
522: Several different converged reason view routines may be set by calling
523: `KSPConvergedReasonViewSet()` multiple times; all will be called in the
524: order in which they were set.
526: Developer Note:
527: Should be named KSPConvergedReasonViewAdd().
529: .seealso: [](ch_ksp), `KSPConvergedReasonView()`, `KSPConvergedReasonViewFn`, `KSPConvergedReasonViewCancel()`, `PetscCtxDestroyFn`
530: @*/
531: PetscErrorCode KSPConvergedReasonViewSet(KSP ksp, KSPConvergedReasonViewFn *f, PetscCtx ctx, PetscCtxDestroyFn *reasonviewdestroy)
532: {
533: PetscFunctionBegin;
535: for (PetscInt i = 0; i < ksp->numberreasonviews; i++) {
536: PetscBool identical;
538: PetscCall(PetscMonitorCompare((PetscErrorCode (*)(void))(PetscVoidFn *)f, ctx, reasonviewdestroy, (PetscErrorCode (*)(void))(PetscVoidFn *)ksp->reasonview[i], ksp->reasonviewcontext[i], ksp->reasonviewdestroy[i], &identical));
539: if (identical) PetscFunctionReturn(PETSC_SUCCESS);
540: }
541: PetscCheck(ksp->numberreasonviews < MAXKSPREASONVIEWS, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many KSP reasonview set");
542: ksp->reasonview[ksp->numberreasonviews] = f;
543: ksp->reasonviewdestroy[ksp->numberreasonviews] = reasonviewdestroy;
544: ksp->reasonviewcontext[ksp->numberreasonviews++] = ctx;
545: PetscFunctionReturn(PETSC_SUCCESS);
546: }
548: /*@
549: KSPConvergedReasonViewCancel - Clears all the `KSPConvergedReason` view functions for a `KSP` object set with `KSPConvergedReasonViewSet()`
550: as well as the default viewer.
552: Collective
554: Input Parameter:
555: . ksp - iterative solver obtained from `KSPCreate()`
557: Level: intermediate
559: .seealso: [](ch_ksp), `KSPCreate()`, `KSPDestroy()`, `KSPReset()`, `KSPConvergedReasonViewSet()`
560: @*/
561: PetscErrorCode KSPConvergedReasonViewCancel(KSP ksp)
562: {
563: PetscInt i;
565: PetscFunctionBegin;
567: for (i = 0; i < ksp->numberreasonviews; i++) {
568: if (ksp->reasonviewdestroy[i]) PetscCall((*ksp->reasonviewdestroy[i])(&ksp->reasonviewcontext[i]));
569: }
570: ksp->numberreasonviews = 0;
571: PetscCall(PetscViewerDestroy(&ksp->convergedreasonviewer));
572: PetscFunctionReturn(PETSC_SUCCESS);
573: }
575: /*@
576: KSPConvergedReasonViewFromOptions - Processes command line options to determine if/how a `KSPConvergedReason` is to be viewed.
578: Collective
580: Input Parameter:
581: . ksp - the `KSP` object
583: Level: intermediate
585: Notes:
586: This function has a different API and behavior than `PetscObjectViewFromOptions()`
588: This is called automatically at the conclusion of `KSPSolve()` so is rarely called directly by user code.
590: .seealso: [](ch_ksp), `KSPConvergedReasonView()`, `KSPConvergedReasonViewSet()`
591: @*/
592: PetscErrorCode KSPConvergedReasonViewFromOptions(KSP ksp)
593: {
594: PetscFunctionBegin;
595: /* Call all user-provided reason review routines */
596: for (PetscInt i = 0; i < ksp->numberreasonviews; i++) PetscCall((*ksp->reasonview[i])(ksp, ksp->reasonviewcontext[i]));
598: /* Call the default PETSc routine */
599: if (ksp->convergedreasonviewer) {
600: PetscCall(PetscViewerPushFormat(ksp->convergedreasonviewer, ksp->convergedreasonformat));
601: PetscCall(KSPConvergedReasonView(ksp, ksp->convergedreasonviewer));
602: PetscCall(PetscViewerPopFormat(ksp->convergedreasonviewer));
603: }
604: PetscFunctionReturn(PETSC_SUCCESS);
605: }
607: /*@
608: KSPConvergedRateView - Displays the convergence rate <https://en.wikipedia.org/wiki/Coefficient_of_determination> of `KSPSolve()` to a viewer
610: Collective
612: Input Parameters:
613: + ksp - iterative solver obtained from `KSPCreate()`
614: - viewer - the `PetscViewer` to display the reason
616: Options Database Key:
617: . -ksp_converged_rate - print reason for convergence or divergence and the convergence rate (or 0.0 for divergence)
619: Level: intermediate
621: Notes:
622: To change the format of the output, call `PetscViewerPushFormat`(`viewer`,`format`) before this call.
624: Suppose that the residual is reduced linearly, $r_k = c^k r_0$, which means $\log r_k = \log r_0 + k \log c$. After linear regression,
625: the slope is $\log c$. The coefficient of determination is given by $1 - \frac{\sum_i (y_i - f(x_i))^2}{\sum_i (y_i - \bar y)}$,
627: .seealso: [](ch_ksp), `KSPConvergedReasonView()`, `KSPGetConvergedRate()`, `KSPSetTolerances()`, `KSPConvergedDefault()`
628: @*/
629: PetscErrorCode KSPConvergedRateView(KSP ksp, PetscViewer viewer)
630: {
631: PetscViewerFormat format;
632: PetscBool isAscii;
633: PetscReal rrate, rRsq, erate = 0.0, eRsq = 0.0;
634: PetscInt its;
635: const char *prefix, *reason = KSPConvergedReasons[ksp->reason];
637: PetscFunctionBegin;
638: PetscCall(KSPGetIterationNumber(ksp, &its));
639: PetscCall(KSPComputeConvergenceRate(ksp, &rrate, &rRsq, &erate, &eRsq));
640: if (!viewer) viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)ksp));
641: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isAscii));
642: if (isAscii) {
643: PetscCall(KSPGetOptionsPrefix(ksp, &prefix));
644: PetscCall(PetscViewerGetFormat(viewer, &format));
645: PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ksp)->tablevel));
646: if (ksp->reason > 0) {
647: if (prefix) PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve converged due to %s iterations %" PetscInt_FMT, prefix, reason, its));
648: else PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve converged due to %s iterations %" PetscInt_FMT, reason, its));
649: PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
650: if (rRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " res rate %g R^2 %g", (double)rrate, (double)rRsq));
651: if (eRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " error rate %g R^2 %g", (double)erate, (double)eRsq));
652: PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
653: PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
654: } else if (ksp->reason <= 0) {
655: if (prefix) PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve did not converge due to %s iterations %" PetscInt_FMT, prefix, reason, its));
656: else PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve did not converge due to %s iterations %" PetscInt_FMT, reason, its));
657: PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
658: if (rRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " res rate %g R^2 %g", (double)rrate, (double)rRsq));
659: if (eRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " error rate %g R^2 %g", (double)erate, (double)eRsq));
660: PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
661: PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
662: if (ksp->reason == KSP_DIVERGED_PC_FAILED) {
663: PCFailedReason reason;
664: PetscCall(PCGetFailedReason(ksp->pc, &reason));
665: PetscCall(PetscViewerASCIIPrintf(viewer, " PC failed due to %s\n", PCFailedReasons[reason]));
666: }
667: }
668: PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ksp)->tablevel));
669: }
670: PetscFunctionReturn(PETSC_SUCCESS);
671: }
673: #include <petscdraw.h>
675: static PetscErrorCode KSPViewEigenvalues_Internal(KSP ksp, PetscBool isExplicit, PetscViewer viewer, PetscViewerFormat format)
676: {
677: PetscReal *r, *c;
678: PetscInt n, i, neig;
679: PetscBool isascii, isdraw;
680: PetscMPIInt rank;
682: PetscFunctionBegin;
683: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)ksp), &rank));
684: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
685: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
686: if (isExplicit) {
687: PetscCall(VecGetSize(ksp->vec_sol, &n));
688: PetscCall(PetscMalloc2(n, &r, n, &c));
689: PetscCall(KSPComputeEigenvaluesExplicitly(ksp, n, r, c));
690: neig = n;
691: } else {
692: PetscInt nits;
694: PetscCall(KSPGetIterationNumber(ksp, &nits));
695: n = nits + 2;
696: if (!nits) {
697: PetscCall(PetscViewerASCIIPrintf(viewer, "Zero iterations in solver, cannot approximate any eigenvalues\n"));
698: PetscFunctionReturn(PETSC_SUCCESS);
699: }
700: PetscCall(PetscMalloc2(n, &r, n, &c));
701: PetscCall(KSPComputeEigenvalues(ksp, n, r, c, &neig));
702: }
703: if (isascii) {
704: PetscCall(PetscViewerASCIIPrintf(viewer, "%s computed eigenvalues\n", isExplicit ? "Explicitly" : "Iteratively"));
705: for (i = 0; i < neig; ++i) {
706: if (c[i] >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, "%g + %gi\n", (double)r[i], (double)c[i]));
707: else PetscCall(PetscViewerASCIIPrintf(viewer, "%g - %gi\n", (double)r[i], -(double)c[i]));
708: }
709: } else if (isdraw && rank == 0) {
710: PetscDraw draw;
711: PetscDrawSP drawsp;
713: if (format == PETSC_VIEWER_DRAW_CONTOUR) {
714: PetscCall(KSPPlotEigenContours_Private(ksp, neig, r, c));
715: } else {
716: PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
717: PetscCall(PetscDrawSPCreate(draw, 1, &drawsp));
718: PetscCall(PetscDrawSPReset(drawsp));
719: for (i = 0; i < neig; ++i) PetscCall(PetscDrawSPAddPoint(drawsp, r + i, c + i));
720: PetscCall(PetscDrawSPDraw(drawsp, PETSC_TRUE));
721: PetscCall(PetscDrawSPSave(drawsp));
722: PetscCall(PetscDrawSPDestroy(&drawsp));
723: }
724: }
725: PetscCall(PetscFree2(r, c));
726: PetscFunctionReturn(PETSC_SUCCESS);
727: }
729: static PetscErrorCode KSPViewSingularvalues_Internal(KSP ksp, PetscViewer viewer, PetscViewerFormat format)
730: {
731: PetscReal smax, smin;
732: PetscInt nits;
733: PetscBool isascii;
735: PetscFunctionBegin;
736: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
737: PetscCall(KSPGetIterationNumber(ksp, &nits));
738: if (!nits) {
739: PetscCall(PetscViewerASCIIPrintf(viewer, "Zero iterations in solver, cannot approximate any singular values\n"));
740: PetscFunctionReturn(PETSC_SUCCESS);
741: }
742: PetscCall(KSPComputeExtremeSingularValues(ksp, &smax, &smin));
743: if (isascii) PetscCall(PetscViewerASCIIPrintf(viewer, "Iteratively computed extreme %svalues: max %g min %g max/min %g\n", smin < 0 ? "eigen" : "singular ", (double)smax, (double)smin, (double)(smax / smin)));
744: PetscFunctionReturn(PETSC_SUCCESS);
745: }
747: static PetscErrorCode KSPViewFinalResidual_Internal(KSP ksp, PetscViewer viewer, PetscViewerFormat format)
748: {
749: PetscBool isascii;
751: PetscFunctionBegin;
752: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
753: PetscCheck(!ksp->dscale || ksp->dscalefix, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Cannot compute final scale with -ksp_diagonal_scale except also with -ksp_diagonal_scale_fix");
754: if (isascii) {
755: Mat A;
756: Vec t;
757: PetscReal norm;
759: PetscCall(PCGetOperators(ksp->pc, &A, NULL));
760: PetscCall(VecDuplicate(ksp->vec_rhs, &t));
761: PetscCall(KSP_MatMult(ksp, A, ksp->vec_sol, t));
762: PetscCall(VecAYPX(t, -1.0, ksp->vec_rhs));
763: PetscCall(PetscOptionsPushCreateViewerOff(PETSC_FALSE));
764: PetscCall(VecViewFromOptions(t, (PetscObject)ksp, "-ksp_view_final_residual_vec"));
765: PetscCall(PetscOptionsPopCreateViewerOff());
766: PetscCall(VecNorm(t, NORM_2, &norm));
767: PetscCall(VecDestroy(&t));
768: PetscCall(PetscViewerASCIIPrintf(viewer, "KSP final norm of residual %g\n", (double)norm));
769: }
770: PetscFunctionReturn(PETSC_SUCCESS);
771: }
773: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode PetscMonitorPauseFinal_Internal(PetscInt n, PetscCtx ctx[])
774: {
775: PetscFunctionBegin;
776: for (PetscInt i = 0; i < n; ++i) {
777: PetscViewerAndFormat *vf = (PetscViewerAndFormat *)ctx[i];
778: PetscDraw draw;
779: PetscReal lpause;
780: PetscBool isdraw;
782: if (!vf) continue;
783: if (!PetscCheckPointer(vf->viewer, PETSC_OBJECT)) continue;
784: if (((PetscObject)vf->viewer)->classid != PETSC_VIEWER_CLASSID) continue;
785: PetscCall(PetscObjectTypeCompare((PetscObject)vf->viewer, PETSCVIEWERDRAW, &isdraw));
786: if (!isdraw) continue;
788: PetscCall(PetscViewerDrawGetDraw(vf->viewer, 0, &draw));
789: PetscCall(PetscDrawGetPause(draw, &lpause));
790: PetscCall(PetscDrawSetPause(draw, -1.0));
791: PetscCall(PetscDrawPause(draw));
792: PetscCall(PetscDrawSetPause(draw, lpause));
793: }
794: PetscFunctionReturn(PETSC_SUCCESS);
795: }
797: static PetscErrorCode KSPMonitorPauseFinal_Internal(KSP ksp)
798: {
799: PetscFunctionBegin;
800: if (!ksp->pauseFinal) PetscFunctionReturn(PETSC_SUCCESS);
801: PetscCall(PetscMonitorPauseFinal_Internal(ksp->numbermonitors, ksp->monitorcontext));
802: PetscFunctionReturn(PETSC_SUCCESS);
803: }
805: static PetscErrorCode KSPSolve_Private(KSP ksp, Vec b, Vec x)
806: {
807: PetscBool flg = PETSC_FALSE, inXisinB = PETSC_FALSE, guess_zero;
808: Mat mat, pmat;
809: MPI_Comm comm;
810: MatNullSpace nullsp;
811: Vec btmp, vec_rhs = NULL;
813: PetscFunctionBegin;
814: level++;
815: comm = PetscObjectComm((PetscObject)ksp);
816: if (x && x == b) {
817: PetscCheck(ksp->guess_zero, comm, PETSC_ERR_ARG_INCOMP, "Cannot use x == b with nonzero initial guess");
818: PetscCall(VecDuplicate(b, &x));
819: inXisinB = PETSC_TRUE;
820: }
821: if (b) {
822: PetscCall(PetscObjectReference((PetscObject)b));
823: PetscCall(VecDestroy(&ksp->vec_rhs));
824: ksp->vec_rhs = b;
825: }
826: if (x) {
827: PetscCall(PetscObjectReference((PetscObject)x));
828: PetscCall(VecDestroy(&ksp->vec_sol));
829: ksp->vec_sol = x;
830: }
832: if (ksp->viewPre) PetscCall(ObjectView((PetscObject)ksp, ksp->viewerPre, ksp->formatPre));
834: if (ksp->presolve) PetscCall((*ksp->presolve)(ksp, ksp->vec_rhs, ksp->vec_sol, ksp->prectx));
836: /* reset the residual history list if requested */
837: if (ksp->res_hist_reset) ksp->res_hist_len = 0;
838: if (ksp->err_hist_reset) ksp->err_hist_len = 0;
840: /* KSPSetUp() scales the matrix if needed */
841: PetscCall(KSPSetUp(ksp));
842: PetscCall(KSPSetUpOnBlocks(ksp));
844: if (ksp->guess) {
845: PetscObjectState ostate, state;
847: PetscCall(KSPGuessSetUp(ksp->guess));
848: PetscCall(PetscObjectStateGet((PetscObject)ksp->vec_sol, &ostate));
849: PetscCall(KSPGuessFormGuess(ksp->guess, ksp->vec_rhs, ksp->vec_sol));
850: PetscCall(PetscObjectStateGet((PetscObject)ksp->vec_sol, &state));
851: if (state != ostate) {
852: ksp->guess_zero = PETSC_FALSE;
853: } else {
854: PetscCall(PetscInfo(ksp, "Using zero initial guess since the KSPGuess object did not change the vector\n"));
855: ksp->guess_zero = PETSC_TRUE;
856: }
857: }
859: PetscCall(VecSetErrorIfLocked(ksp->vec_sol, 3));
861: PetscCall(PetscLogEventBegin(!ksp->transpose_solve ? KSP_Solve : KSP_SolveTranspose, ksp, ksp->vec_rhs, ksp->vec_sol, 0));
862: PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
863: /* diagonal scale RHS if called for */
864: if (ksp->dscale) {
865: PetscCall(VecPointwiseMult(ksp->vec_rhs, ksp->vec_rhs, ksp->diagonal));
866: /* second time in, but matrix was scaled back to original */
867: if (ksp->dscalefix && ksp->dscalefix2) {
868: Mat mat, pmat;
870: PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
871: PetscCall(MatDiagonalScale(pmat, ksp->diagonal, ksp->diagonal));
872: if (mat != pmat) PetscCall(MatDiagonalScale(mat, ksp->diagonal, ksp->diagonal));
873: }
875: /* scale initial guess */
876: if (!ksp->guess_zero) {
877: if (!ksp->truediagonal) {
878: PetscCall(VecDuplicate(ksp->diagonal, &ksp->truediagonal));
879: PetscCall(VecCopy(ksp->diagonal, ksp->truediagonal));
880: PetscCall(VecReciprocal(ksp->truediagonal));
881: }
882: PetscCall(VecPointwiseMult(ksp->vec_sol, ksp->vec_sol, ksp->truediagonal));
883: }
884: }
885: PetscCall(PCPreSolve(ksp->pc, ksp));
887: if (ksp->guess_zero && !ksp->guess_not_read) PetscCall(VecSet(ksp->vec_sol, 0.0));
888: if (ksp->guess_knoll) { /* The Knoll trick is independent on the KSPGuess specified */
889: PetscCall(PCApply(ksp->pc, ksp->vec_rhs, ksp->vec_sol));
890: PetscCall(KSP_RemoveNullSpace(ksp, ksp->vec_sol));
891: ksp->guess_zero = PETSC_FALSE;
892: }
894: /* can we mark the initial guess as zero for this solve? */
895: guess_zero = ksp->guess_zero;
896: if (!ksp->guess_zero) {
897: PetscReal norm;
899: PetscCall(VecNormAvailable(ksp->vec_sol, NORM_2, &flg, &norm));
900: if (flg && !norm) ksp->guess_zero = PETSC_TRUE;
901: }
902: if (ksp->transpose_solve) {
903: PetscCall(MatGetNullSpace(mat, &nullsp));
904: } else {
905: PetscCall(MatGetTransposeNullSpace(mat, &nullsp));
906: }
907: if (nullsp) {
908: PetscCall(VecDuplicate(ksp->vec_rhs, &btmp));
909: PetscCall(VecCopy(ksp->vec_rhs, btmp));
910: PetscCall(MatNullSpaceRemove(nullsp, btmp));
911: vec_rhs = ksp->vec_rhs;
912: ksp->vec_rhs = btmp;
913: }
914: PetscCall(VecLockReadPush(ksp->vec_rhs));
915: PetscUseTypeMethod(ksp, solve);
916: PetscCall(KSPMonitorPauseFinal_Internal(ksp));
918: PetscCall(VecLockReadPop(ksp->vec_rhs));
919: if (nullsp) {
920: ksp->vec_rhs = vec_rhs;
921: PetscCall(VecDestroy(&btmp));
922: }
924: ksp->guess_zero = guess_zero;
926: PetscCheck(ksp->reason, comm, PETSC_ERR_PLIB, "Internal error, solver returned without setting converged reason");
927: ksp->totalits += ksp->its;
929: PetscCall(KSPConvergedReasonViewFromOptions(ksp));
931: if (ksp->viewRate) {
932: PetscCall(PetscViewerPushFormat(ksp->viewerRate, ksp->formatRate));
933: PetscCall(KSPConvergedRateView(ksp, ksp->viewerRate));
934: PetscCall(PetscViewerPopFormat(ksp->viewerRate));
935: }
936: PetscCall(PCPostSolve(ksp->pc, ksp));
938: /* diagonal scale solution if called for */
939: if (ksp->dscale) {
940: PetscCall(VecPointwiseMult(ksp->vec_sol, ksp->vec_sol, ksp->diagonal));
941: /* unscale right-hand side and matrix */
942: if (ksp->dscalefix) {
943: Mat mat, pmat;
945: PetscCall(VecReciprocal(ksp->diagonal));
946: PetscCall(VecPointwiseMult(ksp->vec_rhs, ksp->vec_rhs, ksp->diagonal));
947: PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
948: PetscCall(MatDiagonalScale(pmat, ksp->diagonal, ksp->diagonal));
949: if (mat != pmat) PetscCall(MatDiagonalScale(mat, ksp->diagonal, ksp->diagonal));
950: PetscCall(VecReciprocal(ksp->diagonal));
951: ksp->dscalefix2 = PETSC_TRUE;
952: }
953: }
954: PetscCall(PetscLogEventEnd(!ksp->transpose_solve ? KSP_Solve : KSP_SolveTranspose, ksp, ksp->vec_rhs, ksp->vec_sol, 0));
955: if (ksp->guess) PetscCall(KSPGuessUpdate(ksp->guess, ksp->vec_rhs, ksp->vec_sol));
956: if (ksp->postsolve) PetscCall((*ksp->postsolve)(ksp, ksp->vec_rhs, ksp->vec_sol, ksp->postctx));
958: PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
959: if (ksp->viewEV) PetscCall(KSPViewEigenvalues_Internal(ksp, PETSC_FALSE, ksp->viewerEV, ksp->formatEV));
960: if (ksp->viewEVExp) PetscCall(KSPViewEigenvalues_Internal(ksp, PETSC_TRUE, ksp->viewerEVExp, ksp->formatEVExp));
961: if (ksp->viewSV) PetscCall(KSPViewSingularvalues_Internal(ksp, ksp->viewerSV, ksp->formatSV));
962: if (ksp->viewFinalRes) PetscCall(KSPViewFinalResidual_Internal(ksp, ksp->viewerFinalRes, ksp->formatFinalRes));
963: if (ksp->viewMat) PetscCall(ObjectView((PetscObject)mat, ksp->viewerMat, ksp->formatMat));
964: if (ksp->viewPMat) PetscCall(ObjectView((PetscObject)pmat, ksp->viewerPMat, ksp->formatPMat));
965: if (ksp->viewRhs) PetscCall(ObjectView((PetscObject)ksp->vec_rhs, ksp->viewerRhs, ksp->formatRhs));
966: if (ksp->viewSol) PetscCall(ObjectView((PetscObject)ksp->vec_sol, ksp->viewerSol, ksp->formatSol));
967: if (ksp->view) PetscCall(ObjectView((PetscObject)ksp, ksp->viewer, ksp->format));
968: if (ksp->viewDScale) PetscCall(ObjectView((PetscObject)ksp->diagonal, ksp->viewerDScale, ksp->formatDScale));
969: if (ksp->viewMatExp) {
970: Mat A, B;
972: PetscCall(PCGetOperators(ksp->pc, &A, NULL));
973: if (ksp->transpose_solve) {
974: Mat AT;
976: PetscCall(MatCreateTranspose(A, &AT));
977: PetscCall(MatComputeOperator(AT, MATAIJ, &B));
978: PetscCall(MatDestroy(&AT));
979: } else {
980: PetscCall(MatComputeOperator(A, MATAIJ, &B));
981: }
982: PetscCall(ObjectView((PetscObject)B, ksp->viewerMatExp, ksp->formatMatExp));
983: PetscCall(MatDestroy(&B));
984: }
985: if (ksp->viewPOpExp) {
986: Mat B;
988: PetscCall(KSPComputeOperator(ksp, MATAIJ, &B));
989: PetscCall(ObjectView((PetscObject)B, ksp->viewerPOpExp, ksp->formatPOpExp));
990: PetscCall(MatDestroy(&B));
991: }
993: if (inXisinB) {
994: PetscCall(VecCopy(x, b));
995: PetscCall(VecDestroy(&x));
996: }
997: PetscCall(PetscObjectSAWsBlock((PetscObject)ksp));
998: if (ksp->errorifnotconverged && ksp->reason < 0 && ((level == 1) || (ksp->reason != KSP_DIVERGED_ITS))) {
999: PCFailedReason reason;
1001: PetscCheck(ksp->reason == KSP_DIVERGED_PC_FAILED, comm, PETSC_ERR_NOT_CONVERGED, "KSPSolve%s() has not converged, reason %s", !ksp->transpose_solve ? "" : "Transpose", KSPConvergedReasons[ksp->reason]);
1002: PetscCall(PCGetFailedReason(ksp->pc, &reason));
1003: SETERRQ(comm, PETSC_ERR_NOT_CONVERGED, "KSPSolve%s() has not converged, reason %s PC failed due to %s", !ksp->transpose_solve ? "" : "Transpose", KSPConvergedReasons[ksp->reason], PCFailedReasons[reason]);
1004: }
1005: level--;
1006: PetscFunctionReturn(PETSC_SUCCESS);
1007: }
1009: /*@
1010: KSPSolve - Solves a linear system associated with `KSP` object
1012: Collective
1014: Input Parameters:
1015: + ksp - iterative solver obtained from `KSPCreate()`
1016: . b - the right-hand side vector
1017: - x - the solution (this may be the same vector as `b`, then `b` will be overwritten with the answer)
1019: Options Database Keys:
1020: + -ksp_view_eigenvalues - compute preconditioned operators eigenvalues
1021: . -ksp_view_eigenvalues_explicit - compute the eigenvalues by forming the dense operator and using LAPACK
1022: . -ksp_view_mat binary - save matrix to the default binary viewer
1023: . -ksp_view_pmat binary - save matrix used to build preconditioner to the default binary viewer
1024: . -ksp_view_rhs binary - save right-hand side vector to the default binary viewer
1025: . -ksp_view_solution binary - save computed solution vector to the default binary viewer
1026: (can be read later with src/ksp/tutorials/ex10.c for testing solvers)
1027: . -ksp_view_mat_explicit - for matrix-free operators, computes the matrix entries and views them
1028: . -ksp_view_preconditioned_operator_explicit - computes the product of the preconditioner and matrix as an explicit matrix and views it
1029: . -ksp_converged_reason - print reason for converged or diverged, also prints number of iterations
1030: . -ksp_view_final_residual - print 2-norm of true linear system residual at the end of the solution process
1031: . -ksp_view_final_residual_vec - print true linear system residual vector at the end of the solution process;
1032: `-ksp_view_final_residual` must to be called first to enable this option
1033: . -ksp_error_if_not_converged - stop the program as soon as an error is detected in a `KSPSolve()`
1034: . -ksp_view_pre - print the ksp data structure before the system solution
1035: - -ksp_view - print the ksp data structure at the end of the system solution
1037: Level: beginner
1039: Notes:
1040: See `KSPSetFromOptions()` for additional options database keys that affect `KSPSolve()`
1042: If one uses `KSPSetDM()` then `x` or `b` need not be passed. Use `KSPGetSolution()` to access the solution in this case.
1044: The operator is specified with `KSPSetOperators()`.
1046: `KSPSolve()` will normally return without generating an error regardless of whether the linear system was solved or if constructing the preconditioner failed.
1047: Call `KSPGetConvergedReason()` to determine if the solver converged or failed and why. The option -ksp_error_if_not_converged or function `KSPSetErrorIfNotConverged()`
1048: will cause `KSPSolve()` to error as soon as an error occurs in the linear solver. In inner `KSPSolve()` `KSP_DIVERGED_ITS` is not treated as an error because when using nested solvers
1049: it may be fine that inner solvers in the preconditioner do not converge during the solution process.
1051: The number of iterations can be obtained from `KSPGetIterationNumber()`.
1053: If you provide a matrix that has a `MatSetNullSpace()` and `MatSetTransposeNullSpace()` this will use that information to solve singular systems
1054: in the least squares sense with a norm minimizing solution.
1056: $A x = b $ where $b = b_p + b_t$ where $b_t$ is not in the range of $A$ (and hence by the fundamental theorem of linear algebra is in the nullspace(A'), see `MatSetNullSpace()`).
1058: `KSP` first removes $b_t$ producing the linear system $A x = b_p$ (which has multiple solutions) and solves this to find the $\|x\|$ minimizing solution (and hence
1059: it finds the solution $x$ orthogonal to the nullspace(A). The algorithm is simply in each iteration of the Krylov method we remove the nullspace(A) from the search
1060: direction thus the solution which is a linear combination of the search directions has no component in the nullspace(A).
1062: We recommend always using `KSPGMRES` for such singular systems.
1063: If $ nullspace(A) = nullspace(A^T)$ (note symmetric matrices always satisfy this property) then both left and right preconditioning will work
1064: If $nullspace(A) \neq nullspace(A^T)$ then left preconditioning will work but right preconditioning may not work (or it may).
1066: Developer Notes:
1067: The reason we cannot always solve $nullspace(A) \neq nullspace(A^T)$ systems with right preconditioning is because we need to remove at each iteration
1068: $ nullspace(AB) $ from the search direction. While we know the $nullspace(A)$, $nullspace(AB)$ equals $B^{-1}$ times $nullspace(A)$ but except for trivial preconditioners
1069: such as diagonal scaling we cannot apply the inverse of the preconditioner to a vector and thus cannot compute $nullspace(AB)$.
1071: If using a direct method (e.g., via the `KSP` solver
1072: `KSPPREONLY` and a preconditioner such as `PCLU` or `PCCHOLESKY` then usually one iteration of the `KSP` method will be needed for convergence.
1074: To solve a linear system with the transpose of the matrix use `KSPSolveTranspose()`.
1076: Understanding Convergence\:
1077: The manual pages `KSPMonitorSet()`, `KSPComputeEigenvalues()`, and
1078: `KSPComputeEigenvaluesExplicitly()` provide information on additional
1079: options to monitor convergence and print eigenvalue information.
1081: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPDestroy()`, `KSPSetTolerances()`, `KSPConvergedDefault()`,
1082: `KSPSolveTranspose()`, `KSPGetIterationNumber()`, `MatNullSpaceCreate()`, `MatSetNullSpace()`, `MatSetTransposeNullSpace()`, `KSP`,
1083: `KSPConvergedReasonView()`, `KSPCheckSolve()`, `KSPSetErrorIfNotConverged()`
1084: @*/
1085: PetscErrorCode KSPSolve(KSP ksp, Vec b, Vec x)
1086: {
1087: PetscBool isPCMPI;
1089: PetscFunctionBegin;
1093: ksp->transpose_solve = PETSC_FALSE;
1094: PetscCall(KSPSolve_Private(ksp, b, x));
1095: PetscCall(PetscObjectTypeCompare((PetscObject)ksp->pc, PCMPI, &isPCMPI));
1096: if (PCMPIServerActive && isPCMPI) {
1097: KSP subksp;
1099: PetscCall(PCMPIGetKSP(ksp->pc, &subksp));
1100: ksp->its = subksp->its;
1101: ksp->reason = subksp->reason;
1102: }
1103: PetscFunctionReturn(PETSC_SUCCESS);
1104: }
1106: static PetscErrorCode KSPUseExplicitTranspose_Private(KSP ksp)
1107: {
1108: Mat J, Jpre;
1110: PetscFunctionBegin;
1111: PetscCall(KSPGetOperators(ksp, &J, &Jpre));
1112: if (!ksp->transpose.reuse_transpose) {
1113: PetscCall(MatTranspose(J, MAT_INITIAL_MATRIX, &ksp->transpose.AT));
1114: if (J != Jpre) PetscCall(MatTranspose(Jpre, MAT_INITIAL_MATRIX, &ksp->transpose.BT));
1115: ksp->transpose.reuse_transpose = PETSC_TRUE;
1116: } else {
1117: PetscCall(MatTranspose(J, MAT_REUSE_MATRIX, &ksp->transpose.AT));
1118: if (J != Jpre) PetscCall(MatTranspose(Jpre, MAT_REUSE_MATRIX, &ksp->transpose.BT));
1119: }
1120: if (J == Jpre && ksp->transpose.BT != ksp->transpose.AT) {
1121: PetscCall(PetscObjectReference((PetscObject)ksp->transpose.AT));
1122: ksp->transpose.BT = ksp->transpose.AT;
1123: }
1124: PetscCall(KSPSetOperators(ksp, ksp->transpose.AT, ksp->transpose.BT));
1125: PetscFunctionReturn(PETSC_SUCCESS);
1126: }
1128: /*@
1129: KSPSolveTranspose - Solves a linear system with the transpose of the matrix associated with the `KSP` object, $A^T x = b$.
1131: Collective
1133: Input Parameters:
1134: + ksp - iterative solver obtained from `KSPCreate()`
1135: . b - right-hand side vector
1136: - x - solution vector
1138: Level: developer
1140: Note:
1141: For complex numbers, this solve the non-Hermitian transpose system.
1143: Developer Note:
1144: We need to implement a `KSPSolveHermitianTranspose()`
1146: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPDestroy()`, `KSPSetTolerances()`, `KSPConvergedDefault()`,
1147: `KSPSolve()`, `KSP`, `KSPSetOperators()`
1148: @*/
1149: PetscErrorCode KSPSolveTranspose(KSP ksp, Vec b, Vec x)
1150: {
1151: PetscFunctionBegin;
1155: if (ksp->transpose.use_explicittranspose) {
1156: Mat J, Jpre;
1157: PetscCall(KSPGetOperators(ksp, &J, &Jpre));
1158: if (!ksp->transpose.reuse_transpose) {
1159: PetscCall(MatTranspose(J, MAT_INITIAL_MATRIX, &ksp->transpose.AT));
1160: if (J != Jpre) PetscCall(MatTranspose(Jpre, MAT_INITIAL_MATRIX, &ksp->transpose.BT));
1161: ksp->transpose.reuse_transpose = PETSC_TRUE;
1162: } else {
1163: PetscCall(MatTranspose(J, MAT_REUSE_MATRIX, &ksp->transpose.AT));
1164: if (J != Jpre) PetscCall(MatTranspose(Jpre, MAT_REUSE_MATRIX, &ksp->transpose.BT));
1165: }
1166: if (J == Jpre && ksp->transpose.BT != ksp->transpose.AT) {
1167: PetscCall(PetscObjectReference((PetscObject)ksp->transpose.AT));
1168: ksp->transpose.BT = ksp->transpose.AT;
1169: }
1170: PetscCall(KSPSetOperators(ksp, ksp->transpose.AT, ksp->transpose.BT));
1171: } else {
1172: ksp->transpose_solve = PETSC_TRUE;
1173: }
1174: PetscCall(KSPSolve_Private(ksp, b, x));
1175: PetscFunctionReturn(PETSC_SUCCESS);
1176: }
1178: static PetscErrorCode KSPViewFinalMatResidual_Internal(KSP ksp, Mat B, Mat X, PetscViewer viewer, PetscViewerFormat format, PetscInt shift)
1179: {
1180: Mat A, R;
1181: PetscReal *norms;
1182: PetscInt i, N;
1183: PetscBool flg;
1185: PetscFunctionBegin;
1186: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &flg));
1187: if (flg) {
1188: PetscCall(PCGetOperators(ksp->pc, &A, NULL));
1189: if (!ksp->transpose_solve) PetscCall(MatMatMult(A, X, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &R));
1190: else PetscCall(MatTransposeMatMult(A, X, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &R));
1191: PetscCall(MatAYPX(R, -1.0, B, SAME_NONZERO_PATTERN));
1192: PetscCall(MatGetSize(R, NULL, &N));
1193: PetscCall(PetscMalloc1(N, &norms));
1194: PetscCall(MatGetColumnNorms(R, NORM_2, norms));
1195: PetscCall(MatDestroy(&R));
1196: for (i = 0; i < N; ++i) PetscCall(PetscViewerASCIIPrintf(viewer, "%s #%" PetscInt_FMT " %g\n", i == 0 ? "KSP final norm of residual" : " ", shift + i, (double)norms[i]));
1197: PetscCall(PetscFree(norms));
1198: }
1199: PetscFunctionReturn(PETSC_SUCCESS);
1200: }
1202: static PetscErrorCode KSPMatSolve_Private(KSP ksp, Mat B, Mat X)
1203: {
1204: Mat A, P, vB, vX;
1205: Vec cb, cx;
1206: PetscInt n1, N1, n2, N2, Bbn = PETSC_DECIDE;
1207: PetscBool match;
1209: PetscFunctionBegin;
1213: PetscCheckSameComm(ksp, 1, B, 2);
1214: PetscCheckSameComm(ksp, 1, X, 3);
1215: PetscCheckSameType(B, 2, X, 3);
1216: PetscCheck(B->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
1217: MatCheckPreallocated(X, 3);
1218: if (!X->assembled) {
1219: PetscCall(MatSetOption(X, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
1220: PetscCall(MatAssemblyBegin(X, MAT_FINAL_ASSEMBLY));
1221: PetscCall(MatAssemblyEnd(X, MAT_FINAL_ASSEMBLY));
1222: }
1223: PetscCheck(B != X, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_IDN, "B and X must be different matrices");
1224: PetscCall(KSPGetOperators(ksp, &A, &P));
1225: PetscCall(MatGetLocalSize(B, NULL, &n2));
1226: PetscCall(MatGetLocalSize(X, NULL, &n1));
1227: PetscCall(MatGetSize(B, NULL, &N2));
1228: PetscCall(MatGetSize(X, NULL, &N1));
1229: PetscCheck(n1 == n2 && N1 == N2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible number of columns between block of right-hand sides (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and block of solutions (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")", n2, N2, n1, N1);
1230: PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)B, &match, MATSEQDENSE, MATMPIDENSE, ""));
1231: PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of right-hand sides not stored in a dense Mat");
1232: PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, ""));
1233: PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of solutions not stored in a dense Mat");
1234: PetscCall(KSPSetUp(ksp));
1235: PetscCall(KSPSetUpOnBlocks(ksp));
1236: if (ksp->ops->matsolve) {
1237: level++;
1238: if (ksp->guess_zero) PetscCall(MatZeroEntries(X));
1239: PetscCall(PetscLogEventBegin(!ksp->transpose_solve ? KSP_MatSolve : KSP_MatSolveTranspose, ksp, B, X, 0));
1240: PetscCall(KSPGetMatSolveBatchSize(ksp, &Bbn));
1241: /* by default, do a single solve with all columns */
1242: if (Bbn == PETSC_DECIDE) Bbn = N2;
1243: else PetscCheck(Bbn >= 1, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "KSPMatSolve() batch size %" PetscInt_FMT " must be positive", Bbn);
1244: PetscCall(PetscInfo(ksp, "KSP type %s%s solving using batches of width at most %" PetscInt_FMT "\n", ((PetscObject)ksp)->type_name, ksp->transpose_solve ? " transpose" : "", Bbn));
1245: /* if -ksp_matsolve_batch_size is greater than the actual number of columns, do a single solve with all columns */
1246: if (Bbn >= N2) {
1247: PetscUseTypeMethod(ksp, matsolve, B, X);
1248: if (ksp->viewFinalRes) PetscCall(KSPViewFinalMatResidual_Internal(ksp, B, X, ksp->viewerFinalRes, ksp->formatFinalRes, 0));
1250: PetscCall(KSPConvergedReasonViewFromOptions(ksp));
1252: if (ksp->viewRate) {
1253: PetscCall(PetscViewerPushFormat(ksp->viewerRate, PETSC_VIEWER_DEFAULT));
1254: PetscCall(KSPConvergedRateView(ksp, ksp->viewerRate));
1255: PetscCall(PetscViewerPopFormat(ksp->viewerRate));
1256: }
1257: } else {
1258: for (n2 = 0; n2 < N2; n2 += Bbn) {
1259: PetscCall(MatDenseGetSubMatrix(B, PETSC_DECIDE, PETSC_DECIDE, n2, PetscMin(n2 + Bbn, N2), &vB));
1260: PetscCall(MatDenseGetSubMatrix(X, PETSC_DECIDE, PETSC_DECIDE, n2, PetscMin(n2 + Bbn, N2), &vX));
1261: PetscUseTypeMethod(ksp, matsolve, vB, vX);
1262: if (ksp->viewFinalRes) PetscCall(KSPViewFinalMatResidual_Internal(ksp, vB, vX, ksp->viewerFinalRes, ksp->formatFinalRes, n2));
1264: PetscCall(KSPConvergedReasonViewFromOptions(ksp));
1266: if (ksp->viewRate) {
1267: PetscCall(PetscViewerPushFormat(ksp->viewerRate, PETSC_VIEWER_DEFAULT));
1268: PetscCall(KSPConvergedRateView(ksp, ksp->viewerRate));
1269: PetscCall(PetscViewerPopFormat(ksp->viewerRate));
1270: }
1271: PetscCall(MatDenseRestoreSubMatrix(B, &vB));
1272: PetscCall(MatDenseRestoreSubMatrix(X, &vX));
1273: }
1274: }
1275: if (ksp->viewMat) PetscCall(ObjectView((PetscObject)A, ksp->viewerMat, ksp->formatMat));
1276: if (ksp->viewPMat) PetscCall(ObjectView((PetscObject)P, ksp->viewerPMat, ksp->formatPMat));
1277: if (ksp->viewRhs) PetscCall(ObjectView((PetscObject)B, ksp->viewerRhs, ksp->formatRhs));
1278: if (ksp->viewSol) PetscCall(ObjectView((PetscObject)X, ksp->viewerSol, ksp->formatSol));
1279: if (ksp->view) PetscCall(KSPView(ksp, ksp->viewer));
1280: PetscCall(PetscLogEventEnd(!ksp->transpose_solve ? KSP_MatSolve : KSP_MatSolveTranspose, ksp, B, X, 0));
1281: if (ksp->errorifnotconverged && ksp->reason < 0 && (level == 1 || ksp->reason != KSP_DIVERGED_ITS)) {
1282: PCFailedReason reason;
1284: PetscCheck(ksp->reason == KSP_DIVERGED_PC_FAILED, PetscObjectComm((PetscObject)ksp), PETSC_ERR_NOT_CONVERGED, "KSPMatSolve%s() has not converged, reason %s", !ksp->transpose_solve ? "" : "Transpose", KSPConvergedReasons[ksp->reason]);
1285: PetscCall(PCGetFailedReason(ksp->pc, &reason));
1286: SETERRQ(PetscObjectComm((PetscObject)ksp), PETSC_ERR_NOT_CONVERGED, "KSPMatSolve%s() has not converged, reason %s PC failed due to %s", !ksp->transpose_solve ? "" : "Transpose", KSPConvergedReasons[ksp->reason], PCFailedReasons[reason]);
1287: }
1288: level--;
1289: } else {
1290: PetscCall(PetscInfo(ksp, "KSP type %s solving column by column\n", ((PetscObject)ksp)->type_name));
1291: for (n2 = 0; n2 < N2; ++n2) {
1292: PetscCall(MatDenseGetColumnVecRead(B, n2, &cb));
1293: PetscCall(MatDenseGetColumnVecWrite(X, n2, &cx));
1294: PetscCall(KSPSolve_Private(ksp, cb, cx));
1295: PetscCall(MatDenseRestoreColumnVecWrite(X, n2, &cx));
1296: PetscCall(MatDenseRestoreColumnVecRead(B, n2, &cb));
1297: }
1298: }
1299: PetscFunctionReturn(PETSC_SUCCESS);
1300: }
1302: /*@
1303: KSPMatSolve - Solves a linear system with multiple right-hand sides stored as a `MATDENSE`.
1305: Input Parameters:
1306: + ksp - iterative solver
1307: - B - block of right-hand sides
1309: Output Parameter:
1310: . X - block of solutions
1312: Level: intermediate
1314: Notes:
1315: This is a stripped-down version of `KSPSolve()`, which only handles `-ksp_view`, `-ksp_converged_reason`, `-ksp_converged_rate`, and `-ksp_view_final_residual`.
1317: Unlike with `KSPSolve()`, `B` and `X` must be different matrices.
1319: .seealso: [](ch_ksp), `KSPSolve()`, `MatMatSolve()`, `KSPMatSolveTranspose()`, `MATDENSE`, `KSPHPDDM`, `PCBJACOBI`, `PCASM`, `KSPSetMatSolveBatchSize()`
1320: @*/
1321: PetscErrorCode KSPMatSolve(KSP ksp, Mat B, Mat X)
1322: {
1323: PetscFunctionBegin;
1324: ksp->transpose_solve = PETSC_FALSE;
1325: PetscCall(KSPMatSolve_Private(ksp, B, X));
1326: PetscFunctionReturn(PETSC_SUCCESS);
1327: }
1329: /*@
1330: KSPMatSolveTranspose - Solves a linear system with the transposed matrix with multiple right-hand sides stored as a `MATDENSE`.
1332: Input Parameters:
1333: + ksp - iterative solver
1334: - B - block of right-hand sides
1336: Output Parameter:
1337: . X - block of solutions
1339: Level: intermediate
1341: Notes:
1342: This is a stripped-down version of `KSPSolveTranspose()`, which only handles `-ksp_view`, `-ksp_converged_reason`, `-ksp_converged_rate`, and `-ksp_view_final_residual`.
1344: Unlike `KSPSolveTranspose()`,
1345: `B` and `X` must be different matrices and the transposed matrix cannot be assembled explicitly for the user.
1347: .seealso: [](ch_ksp), `KSPSolveTranspose()`, `MatMatTransposeSolve()`, `KSPMatSolve()`, `MATDENSE`, `KSPHPDDM`, `PCBJACOBI`, `PCASM`
1348: @*/
1349: PetscErrorCode KSPMatSolveTranspose(KSP ksp, Mat B, Mat X)
1350: {
1351: PetscFunctionBegin;
1352: if (ksp->transpose.use_explicittranspose) PetscCall(KSPUseExplicitTranspose_Private(ksp));
1353: else ksp->transpose_solve = PETSC_TRUE;
1354: PetscCall(KSPMatSolve_Private(ksp, B, X));
1355: PetscFunctionReturn(PETSC_SUCCESS);
1356: }
1358: /*@
1359: KSPSetMatSolveBatchSize - Sets the maximum number of columns treated simultaneously in `KSPMatSolve()`.
1361: Logically Collective
1363: Input Parameters:
1364: + ksp - the `KSP` iterative solver
1365: - bs - batch size
1367: Level: advanced
1369: Note:
1370: Using a larger block size can improve the efficiency of the solver.
1372: .seealso: [](ch_ksp), `KSPMatSolve()`, `KSPGetMatSolveBatchSize()`, `-mat_mumps_icntl_27`, `-matproduct_batch_size`
1373: @*/
1374: PetscErrorCode KSPSetMatSolveBatchSize(KSP ksp, PetscInt bs)
1375: {
1376: PetscFunctionBegin;
1379: ksp->nmax = bs;
1380: PetscFunctionReturn(PETSC_SUCCESS);
1381: }
1383: /*@
1384: KSPGetMatSolveBatchSize - Gets the maximum number of columns treated simultaneously in `KSPMatSolve()`.
1386: Input Parameter:
1387: . ksp - iterative solver context
1389: Output Parameter:
1390: . bs - batch size
1392: Level: advanced
1394: .seealso: [](ch_ksp), `KSPMatSolve()`, `KSPSetMatSolveBatchSize()`, `-mat_mumps_icntl_27`, `-matproduct_batch_size`
1395: @*/
1396: PetscErrorCode KSPGetMatSolveBatchSize(KSP ksp, PetscInt *bs)
1397: {
1398: PetscFunctionBegin;
1400: PetscAssertPointer(bs, 2);
1401: *bs = ksp->nmax;
1402: PetscFunctionReturn(PETSC_SUCCESS);
1403: }
1405: /*@
1406: KSPResetViewers - Resets all the viewers set from the options database during `KSPSetFromOptions()`
1408: Collective
1410: Input Parameter:
1411: . ksp - the `KSP` iterative solver context obtained from `KSPCreate()`
1413: Level: beginner
1415: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPSolve()`, `KSPSetFromOptions()`, `KSP`
1416: @*/
1417: PetscErrorCode KSPResetViewers(KSP ksp)
1418: {
1419: PetscFunctionBegin;
1421: if (!ksp) PetscFunctionReturn(PETSC_SUCCESS);
1422: PetscCall(PetscViewerDestroy(&ksp->viewer));
1423: PetscCall(PetscViewerDestroy(&ksp->viewerPre));
1424: PetscCall(PetscViewerDestroy(&ksp->viewerRate));
1425: PetscCall(PetscViewerDestroy(&ksp->viewerMat));
1426: PetscCall(PetscViewerDestroy(&ksp->viewerPMat));
1427: PetscCall(PetscViewerDestroy(&ksp->viewerRhs));
1428: PetscCall(PetscViewerDestroy(&ksp->viewerSol));
1429: PetscCall(PetscViewerDestroy(&ksp->viewerMatExp));
1430: PetscCall(PetscViewerDestroy(&ksp->viewerEV));
1431: PetscCall(PetscViewerDestroy(&ksp->viewerSV));
1432: PetscCall(PetscViewerDestroy(&ksp->viewerEVExp));
1433: PetscCall(PetscViewerDestroy(&ksp->viewerFinalRes));
1434: PetscCall(PetscViewerDestroy(&ksp->viewerPOpExp));
1435: PetscCall(PetscViewerDestroy(&ksp->viewerDScale));
1436: ksp->view = PETSC_FALSE;
1437: ksp->viewPre = PETSC_FALSE;
1438: ksp->viewMat = PETSC_FALSE;
1439: ksp->viewPMat = PETSC_FALSE;
1440: ksp->viewRhs = PETSC_FALSE;
1441: ksp->viewSol = PETSC_FALSE;
1442: ksp->viewMatExp = PETSC_FALSE;
1443: ksp->viewEV = PETSC_FALSE;
1444: ksp->viewSV = PETSC_FALSE;
1445: ksp->viewEVExp = PETSC_FALSE;
1446: ksp->viewFinalRes = PETSC_FALSE;
1447: ksp->viewPOpExp = PETSC_FALSE;
1448: ksp->viewDScale = PETSC_FALSE;
1449: PetscFunctionReturn(PETSC_SUCCESS);
1450: }
1452: /*@
1453: KSPReset - Removes any allocated `Vec` and `Mat` from the `KSP` data structures.
1455: Collective
1457: Input Parameter:
1458: . ksp - iterative solver obtained from `KSPCreate()`
1460: Level: intermediate
1462: Notes:
1463: Any options set in the `KSP`, including those set with `KSPSetFromOptions()` remain.
1465: Call `KSPReset()` only before you call `KSPSetOperators()` with a different sized matrix than the previous matrix used with the `KSP`.
1467: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPSolve()`, `KSP`
1468: @*/
1469: PetscErrorCode KSPReset(KSP ksp)
1470: {
1471: PetscFunctionBegin;
1473: if (!ksp) PetscFunctionReturn(PETSC_SUCCESS);
1474: PetscTryTypeMethod(ksp, reset);
1475: if (ksp->pc) PetscCall(PCReset(ksp->pc));
1476: if (ksp->guess) {
1477: KSPGuess guess = ksp->guess;
1478: PetscTryTypeMethod(guess, reset);
1479: }
1480: PetscCall(VecDestroyVecs(ksp->nwork, &ksp->work));
1481: PetscCall(VecDestroy(&ksp->vec_rhs));
1482: PetscCall(VecDestroy(&ksp->vec_sol));
1483: PetscCall(VecDestroy(&ksp->diagonal));
1484: PetscCall(VecDestroy(&ksp->truediagonal));
1486: ksp->setupstage = KSP_SETUP_NEW;
1487: ksp->nmax = PETSC_DECIDE;
1488: PetscFunctionReturn(PETSC_SUCCESS);
1489: }
1491: /*@
1492: KSPDestroy - Destroys a `KSP` context.
1494: Collective
1496: Input Parameter:
1497: . ksp - iterative solver obtained from `KSPCreate()`
1499: Level: beginner
1501: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPSolve()`, `KSP`
1502: @*/
1503: PetscErrorCode KSPDestroy(KSP *ksp)
1504: {
1505: PC pc;
1507: PetscFunctionBegin;
1508: if (!*ksp) PetscFunctionReturn(PETSC_SUCCESS);
1510: if (--((PetscObject)*ksp)->refct > 0) {
1511: *ksp = NULL;
1512: PetscFunctionReturn(PETSC_SUCCESS);
1513: }
1515: PetscCall(PetscObjectSAWsViewOff((PetscObject)*ksp));
1517: /*
1518: Avoid a cascading call to PCReset(ksp->pc) from the following call:
1519: PCReset() shouldn't be called from KSPDestroy() as it is unprotected by pc's
1520: refcount (and may be shared, e.g., by other ksps).
1521: */
1522: pc = (*ksp)->pc;
1523: (*ksp)->pc = NULL;
1524: PetscCall(KSPReset(*ksp));
1525: PetscCall(KSPResetViewers(*ksp));
1526: (*ksp)->pc = pc;
1527: PetscTryTypeMethod(*ksp, destroy);
1529: if ((*ksp)->transpose.use_explicittranspose) {
1530: PetscCall(MatDestroy(&(*ksp)->transpose.AT));
1531: PetscCall(MatDestroy(&(*ksp)->transpose.BT));
1532: (*ksp)->transpose.reuse_transpose = PETSC_FALSE;
1533: }
1535: PetscCall(KSPGuessDestroy(&(*ksp)->guess));
1536: PetscCall(DMDestroy(&(*ksp)->dm));
1537: PetscCall(PCDestroy(&(*ksp)->pc));
1538: PetscCall(PetscFree((*ksp)->res_hist_alloc));
1539: PetscCall(PetscFree((*ksp)->err_hist_alloc));
1540: if ((*ksp)->convergeddestroy) PetscCall((*(*ksp)->convergeddestroy)(&(*ksp)->cnvP));
1541: PetscCall(KSPMonitorCancel(*ksp));
1542: PetscCall(KSPConvergedReasonViewCancel(*ksp));
1543: PetscCall(PetscHeaderDestroy(ksp));
1544: PetscFunctionReturn(PETSC_SUCCESS);
1545: }
1547: /*@
1548: KSPSetPCSide - Sets the preconditioning side.
1550: Logically Collective
1552: Input Parameter:
1553: . ksp - iterative solver obtained from `KSPCreate()`
1555: Output Parameter:
1556: . side - the preconditioning side, where side is one of
1557: .vb
1558: PC_LEFT - left preconditioning (default)
1559: PC_RIGHT - right preconditioning
1560: PC_SYMMETRIC - symmetric preconditioning
1561: .ve
1563: Options Database Key:
1564: . -ksp_pc_side (right|left|symmetric) - `KSP` preconditioner side
1566: Level: intermediate
1568: Notes:
1569: Left preconditioning is used by default for most Krylov methods except `KSPFGMRES` which only supports right preconditioning.
1571: For methods changing the side of the preconditioner changes the norm type that is used, see `KSPSetNormType()`.
1573: Symmetric preconditioning is currently available only for the `KSPQCG` method. However, note that
1574: symmetric preconditioning can be emulated by using either right or left
1575: preconditioning, modifying the application of the matrix (with a custom `Mat` argument to `KSPSetOperators()`,
1576: and using a pre 'KSPSetPreSolve()` or post processing `KSPSetPostSolve()` step).
1578: Setting the `PCSide` often affects the default norm type. See `KSPSetNormType()` for details.
1580: .seealso: [](ch_ksp), `KSPGetPCSide()`, `KSPSetNormType()`, `KSPGetNormType()`, `KSP`, `KSPSetPreSolve()`, `KSPSetPostSolve()`
1581: @*/
1582: PetscErrorCode KSPSetPCSide(KSP ksp, PCSide side)
1583: {
1584: PetscFunctionBegin;
1587: ksp->pc_side = ksp->pc_side_set = side;
1588: PetscFunctionReturn(PETSC_SUCCESS);
1589: }
1591: /*@
1592: KSPGetPCSide - Gets the preconditioning side.
1594: Not Collective
1596: Input Parameter:
1597: . ksp - iterative solver obtained from `KSPCreate()`
1599: Output Parameter:
1600: . side - the preconditioning side, where side is one of
1601: .vb
1602: PC_LEFT - left preconditioning (default)
1603: PC_RIGHT - right preconditioning
1604: PC_SYMMETRIC - symmetric preconditioning
1605: .ve
1607: Level: intermediate
1609: .seealso: [](ch_ksp), `KSPSetPCSide()`, `KSP`
1610: @*/
1611: PetscErrorCode KSPGetPCSide(KSP ksp, PCSide *side)
1612: {
1613: PetscFunctionBegin;
1615: PetscAssertPointer(side, 2);
1616: PetscCall(KSPSetUpNorms_Private(ksp, PETSC_TRUE, &ksp->normtype, &ksp->pc_side));
1617: *side = ksp->pc_side;
1618: PetscFunctionReturn(PETSC_SUCCESS);
1619: }
1621: /*@
1622: KSPGetTolerances - Gets the relative, absolute, divergence, and maximum
1623: iteration tolerances used by the default `KSP` convergence tests.
1625: Not Collective
1627: Input Parameter:
1628: . ksp - the Krylov subspace context
1630: Output Parameters:
1631: + rtol - the relative convergence tolerance
1632: . abstol - the absolute convergence tolerance
1633: . dtol - the divergence tolerance
1634: - maxits - maximum number of iterations
1636: Level: intermediate
1638: Note:
1639: The user can specify `NULL` for any parameter that is not needed.
1641: .seealso: [](ch_ksp), `KSPSetTolerances()`, `KSP`, `KSPSetMinimumIterations()`, `KSPGetMinimumIterations()`
1642: @*/
1643: PetscErrorCode KSPGetTolerances(KSP ksp, PeOp PetscReal *rtol, PeOp PetscReal *abstol, PeOp PetscReal *dtol, PeOp PetscInt *maxits)
1644: {
1645: PetscFunctionBegin;
1647: if (abstol) *abstol = ksp->abstol;
1648: if (rtol) *rtol = ksp->rtol;
1649: if (dtol) *dtol = ksp->divtol;
1650: if (maxits) *maxits = ksp->max_it;
1651: PetscFunctionReturn(PETSC_SUCCESS);
1652: }
1654: /*@
1655: KSPSetTolerances - Sets the relative, absolute, divergence, and maximum
1656: iteration tolerances used by the default `KSP` convergence testers.
1658: Logically Collective
1660: Input Parameters:
1661: + ksp - the Krylov subspace context
1662: . rtol - the relative convergence tolerance, relative decrease in the (possibly preconditioned) residual norm
1663: . abstol - the absolute convergence tolerance absolute size of the (possibly preconditioned) residual norm
1664: . dtol - the divergence tolerance, amount (possibly preconditioned) residual norm can increase before `KSPConvergedDefault()` concludes that the method is diverging
1665: - maxits - maximum number of iterations to use
1667: Options Database Keys:
1668: + -ksp_atol abstol - Sets `abstol`
1669: . -ksp_rtol rtol - Sets `rtol`
1670: . -ksp_divtol dtol - Sets `dtol`
1671: - -ksp_max_it maxits - Sets `maxits`
1673: Level: intermediate
1675: Notes:
1676: The tolerances are with respect to a norm of the residual of the equation $ \| b - A x^n \|$, they do not directly use the error of the equation.
1677: The norm used depends on the `KSPNormType` that has been set with `KSPSetNormType()`, the default depends on the `KSPType` used.
1679: All parameters must be non-negative.
1681: Use `PETSC_CURRENT` to retain the current value of any of the parameters. The deprecated `PETSC_DEFAULT` also retains the current value (though the name is confusing).
1683: Use `PETSC_DETERMINE` to use the default value for the given `KSP`. The default value is the value when the object's type is set.
1685: For `dtol` and `maxits` use `PETSC_UNLIMITED` to indicate there is no upper bound on these values
1687: See `KSPConvergedDefault()` for details how these parameters are used in the default convergence test. See also `KSPSetConvergenceTest()`
1688: for setting user-defined stopping criteria.
1690: Fortran Note:
1691: Use `PETSC_CURRENT_INTEGER`, `PETSC_CURRENT_REAL`, `PETSC_DETERMINE_INTEGER`, or `PETSC_DETERMINE_REAL`
1693: .seealso: [](ch_ksp), `KSPGetTolerances()`, `KSPConvergedDefault()`, `KSPSetConvergenceTest()`, `KSP`, `KSPSetMinimumIterations()`
1694: @*/
1695: PetscErrorCode KSPSetTolerances(KSP ksp, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt maxits)
1696: {
1697: PetscFunctionBegin;
1704: if (rtol == (PetscReal)PETSC_DETERMINE) {
1705: ksp->rtol = ksp->default_rtol;
1706: } else if (rtol != (PetscReal)PETSC_CURRENT) {
1707: PetscCheck(rtol >= 0.0 && rtol < 1.0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Relative tolerance %g must be non-negative and less than 1.0", (double)rtol);
1708: ksp->rtol = rtol;
1709: }
1710: if (abstol == (PetscReal)PETSC_DETERMINE) {
1711: ksp->abstol = ksp->default_abstol;
1712: } else if (abstol != (PetscReal)PETSC_CURRENT) {
1713: PetscCheck(abstol >= 0.0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Absolute tolerance %g must be non-negative", (double)abstol);
1714: ksp->abstol = abstol;
1715: }
1716: if (dtol == (PetscReal)PETSC_DETERMINE) {
1717: ksp->divtol = ksp->default_divtol;
1718: } else if (dtol == (PetscReal)PETSC_UNLIMITED) {
1719: ksp->divtol = PETSC_MAX_REAL;
1720: } else if (dtol != (PetscReal)PETSC_CURRENT) {
1721: PetscCheck(dtol >= 0.0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Divergence tolerance %g must be larger than 1.0", (double)dtol);
1722: ksp->divtol = dtol;
1723: }
1724: if (maxits == PETSC_DETERMINE) {
1725: ksp->max_it = ksp->default_max_it;
1726: } else if (maxits == PETSC_UNLIMITED) {
1727: ksp->max_it = PETSC_INT_MAX;
1728: } else if (maxits != PETSC_CURRENT) {
1729: PetscCheck(maxits >= 0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Maximum number of iterations %" PetscInt_FMT " must be non-negative", maxits);
1730: ksp->max_it = maxits;
1731: }
1732: PetscFunctionReturn(PETSC_SUCCESS);
1733: }
1735: /*@
1736: KSPSetMinimumIterations - Sets the minimum number of iterations to use, regardless of the tolerances
1738: Logically Collective
1740: Input Parameters:
1741: + ksp - the Krylov subspace context
1742: - minit - minimum number of iterations to use
1744: Options Database Key:
1745: . -ksp_min_it minit - Sets `minit`
1747: Level: intermediate
1749: Notes:
1750: Use `KSPSetTolerances()` to set a variety of other tolerances
1752: See `KSPConvergedDefault()` for details on how these parameters are used in the default convergence test. See also `KSPSetConvergenceTest()`
1753: for setting user-defined stopping criteria.
1755: If the initial residual norm is small enough solvers may return immediately without computing any improvement to the solution. Using this routine
1756: prevents that which usually ensures the solution is changed (often minimally) from the previous solution. This option may be used with ODE integrators
1757: to ensure the integrator does not fall into a false steady-state solution of the ODE.
1759: .seealso: [](ch_ksp), `KSPGetTolerances()`, `KSPConvergedDefault()`, `KSPSetConvergenceTest()`, `KSP`, `KSPSetTolerances()`, `KSPGetMinimumIterations()`
1760: @*/
1761: PetscErrorCode KSPSetMinimumIterations(KSP ksp, PetscInt minit)
1762: {
1763: PetscFunctionBegin;
1767: PetscCheck(minit >= 0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Minimum number of iterations %" PetscInt_FMT " must be non-negative", minit);
1768: ksp->min_it = minit;
1769: PetscFunctionReturn(PETSC_SUCCESS);
1770: }
1772: /*@
1773: KSPGetMinimumIterations - Gets the minimum number of iterations to use, regardless of the tolerances, that was set with `KSPSetMinimumIterations()` or `-ksp_min_it`
1775: Not Collective
1777: Input Parameter:
1778: . ksp - the Krylov subspace context
1780: Output Parameter:
1781: . minit - minimum number of iterations to use
1783: Level: intermediate
1785: .seealso: [](ch_ksp), `KSPGetTolerances()`, `KSPConvergedDefault()`, `KSPSetConvergenceTest()`, `KSP`, `KSPSetTolerances()`, `KSPSetMinimumIterations()`
1786: @*/
1787: PetscErrorCode KSPGetMinimumIterations(KSP ksp, PetscInt *minit)
1788: {
1789: PetscFunctionBegin;
1791: PetscAssertPointer(minit, 2);
1793: *minit = ksp->min_it;
1794: PetscFunctionReturn(PETSC_SUCCESS);
1795: }
1797: /*@
1798: KSPSetInitialGuessNonzero - Tells the iterative solver that the
1799: initial guess is nonzero; otherwise `KSP` assumes the initial guess
1800: is to be zero (and thus zeros it out before solving).
1802: Logically Collective
1804: Input Parameters:
1805: + ksp - iterative solver obtained from `KSPCreate()`
1806: - flg - ``PETSC_TRUE`` indicates the guess is non-zero, `PETSC_FALSE` indicates the guess is zero
1808: Options Database Key:
1809: . -ksp_initial_guess_nonzero (true|false) - use nonzero initial guess
1811: Level: beginner
1813: .seealso: [](ch_ksp), `KSPGetInitialGuessNonzero()`, `KSPGuessSetType()`, `KSPGuessType`, `KSP`
1814: @*/
1815: PetscErrorCode KSPSetInitialGuessNonzero(KSP ksp, PetscBool flg)
1816: {
1817: PetscFunctionBegin;
1820: ksp->guess_zero = (PetscBool)!flg;
1821: PetscFunctionReturn(PETSC_SUCCESS);
1822: }
1824: /*@
1825: KSPGetInitialGuessNonzero - Determines whether the `KSP` solver is using
1826: a zero initial guess.
1828: Not Collective
1830: Input Parameter:
1831: . ksp - iterative solver obtained from `KSPCreate()`
1833: Output Parameter:
1834: . flag - `PETSC_TRUE` if guess is nonzero, else `PETSC_FALSE`
1836: Level: intermediate
1838: .seealso: [](ch_ksp), `KSPSetInitialGuessNonzero()`, `KSP`
1839: @*/
1840: PetscErrorCode KSPGetInitialGuessNonzero(KSP ksp, PetscBool *flag)
1841: {
1842: PetscFunctionBegin;
1844: PetscAssertPointer(flag, 2);
1845: if (ksp->guess_zero) *flag = PETSC_FALSE;
1846: else *flag = PETSC_TRUE;
1847: PetscFunctionReturn(PETSC_SUCCESS);
1848: }
1850: /*@
1851: KSPSetErrorIfNotConverged - Causes `KSPSolve()` to generate an error if the solver has not converged as soon as the error is detected.
1853: Logically Collective
1855: Input Parameters:
1856: + ksp - iterative solver obtained from `KSPCreate()`
1857: - flg - `PETSC_TRUE` indicates you want the error generated
1859: Options Database Key:
1860: . -ksp_error_if_not_converged (true|false) - generate an error and stop the program
1862: Level: intermediate
1864: Notes:
1865: Normally PETSc continues if a linear solver fails to converge, you can call `KSPGetConvergedReason()` after a `KSPSolve()`
1866: to determine if it has converged. This functionality is mostly helpful while running in a debugger (`-start_in_debugger`) to determine exactly where
1867: the failure occurs and why.
1869: A `KSP_DIVERGED_ITS` will not generate an error in a `KSPSolve()` inside a nested linear solver
1871: .seealso: [](ch_ksp), `KSPGetErrorIfNotConverged()`, `KSP`
1872: @*/
1873: PetscErrorCode KSPSetErrorIfNotConverged(KSP ksp, PetscBool flg)
1874: {
1875: PC pc;
1877: PetscFunctionBegin;
1880: ksp->errorifnotconverged = flg;
1881: PetscCall(KSPGetPC(ksp, &pc));
1882: PetscCall(PCSetErrorIfFailure(pc, flg));
1883: PetscFunctionReturn(PETSC_SUCCESS);
1884: }
1886: /*@
1887: KSPGetErrorIfNotConverged - Will `KSPSolve()` generate an error if the solver does not converge?
1889: Not Collective
1891: Input Parameter:
1892: . ksp - iterative solver obtained from KSPCreate()
1894: Output Parameter:
1895: . flag - `PETSC_TRUE` if it will generate an error, else `PETSC_FALSE`
1897: Level: intermediate
1899: .seealso: [](ch_ksp), `KSPSetErrorIfNotConverged()`, `KSP`
1900: @*/
1901: PetscErrorCode KSPGetErrorIfNotConverged(KSP ksp, PetscBool *flag)
1902: {
1903: PetscFunctionBegin;
1905: PetscAssertPointer(flag, 2);
1906: *flag = ksp->errorifnotconverged;
1907: PetscFunctionReturn(PETSC_SUCCESS);
1908: }
1910: /*@
1911: KSPSetInitialGuessKnoll - Tells the iterative solver to use `PCApply()` on the right hand side vector to compute the initial guess (The Knoll trick)
1913: Logically Collective
1915: Input Parameters:
1916: + ksp - iterative solver obtained from `KSPCreate()`
1917: - flg - `PETSC_TRUE` or `PETSC_FALSE`
1919: Level: advanced
1921: Developer Note:
1922: The Knoll trick is not currently implemented using the `KSPGuess` class which provides a variety of ways of computing
1923: an initial guess based on previous solves.
1925: .seealso: [](ch_ksp), `KSPGetInitialGuessKnoll()`, `KSPGuess`, `KSPSetInitialGuessNonzero()`, `KSPGetInitialGuessNonzero()`, `KSP`
1926: @*/
1927: PetscErrorCode KSPSetInitialGuessKnoll(KSP ksp, PetscBool flg)
1928: {
1929: PetscFunctionBegin;
1932: ksp->guess_knoll = flg;
1933: PetscFunctionReturn(PETSC_SUCCESS);
1934: }
1936: /*@
1937: KSPGetInitialGuessKnoll - Determines whether the `KSP` solver is using the Knoll trick (using PCApply(pc,b,...) to compute
1938: the initial guess
1940: Not Collective
1942: Input Parameter:
1943: . ksp - iterative solver obtained from `KSPCreate()`
1945: Output Parameter:
1946: . flag - `PETSC_TRUE` if using Knoll trick, else `PETSC_FALSE`
1948: Level: advanced
1950: .seealso: [](ch_ksp), `KSPSetInitialGuessKnoll()`, `KSPSetInitialGuessNonzero()`, `KSPGetInitialGuessNonzero()`, `KSP`
1951: @*/
1952: PetscErrorCode KSPGetInitialGuessKnoll(KSP ksp, PetscBool *flag)
1953: {
1954: PetscFunctionBegin;
1956: PetscAssertPointer(flag, 2);
1957: *flag = ksp->guess_knoll;
1958: PetscFunctionReturn(PETSC_SUCCESS);
1959: }
1961: /*@
1962: KSPGetComputeSingularValues - Gets the flag indicating whether the extreme singular
1963: values will be calculated via a Lanczos or Arnoldi process as the linear
1964: system is solved.
1966: Not Collective
1968: Input Parameter:
1969: . ksp - iterative solver obtained from `KSPCreate()`
1971: Output Parameter:
1972: . flg - `PETSC_TRUE` or `PETSC_FALSE`
1974: Options Database Key:
1975: . -ksp_monitor_singular_value - Activates `KSPSetComputeSingularValues()`
1977: Level: advanced
1979: Notes:
1980: This option is not valid for `KSPType`.
1982: Many users may just want to use the monitoring routine
1983: `KSPMonitorSingularValue()` (which can be set with option `-ksp_monitor_singular_value`)
1984: to print the singular values at each iteration of the linear solve.
1986: .seealso: [](ch_ksp), `KSPComputeExtremeSingularValues()`, `KSPMonitorSingularValue()`, `KSP`
1987: @*/
1988: PetscErrorCode KSPGetComputeSingularValues(KSP ksp, PetscBool *flg)
1989: {
1990: PetscFunctionBegin;
1992: PetscAssertPointer(flg, 2);
1993: *flg = ksp->calc_sings;
1994: PetscFunctionReturn(PETSC_SUCCESS);
1995: }
1997: /*@
1998: KSPSetComputeSingularValues - Sets a flag so that the extreme singular
1999: values will be calculated via a Lanczos or Arnoldi process as the linear
2000: system is solved.
2002: Logically Collective
2004: Input Parameters:
2005: + ksp - iterative solver obtained from `KSPCreate()`
2006: - flg - `PETSC_TRUE` or `PETSC_FALSE`
2008: Options Database Key:
2009: . -ksp_monitor_singular_value - Activates `KSPSetComputeSingularValues()`
2011: Level: advanced
2013: Notes:
2014: This option is not valid for all iterative methods.
2016: Many users may just want to use the monitoring routine
2017: `KSPMonitorSingularValue()` (which can be set with option `-ksp_monitor_singular_value`)
2018: to print the singular values at each iteration of the linear solve.
2020: Consider using the excellent package SLEPc for accurate efficient computations of singular or eigenvalues.
2022: .seealso: [](ch_ksp), `KSPComputeExtremeSingularValues()`, `KSPMonitorSingularValue()`, `KSP`, `KSPSetComputeRitz()`
2023: @*/
2024: PetscErrorCode KSPSetComputeSingularValues(KSP ksp, PetscBool flg)
2025: {
2026: PetscFunctionBegin;
2029: ksp->calc_sings = flg;
2030: PetscFunctionReturn(PETSC_SUCCESS);
2031: }
2033: /*@
2034: KSPGetComputeEigenvalues - Gets the flag indicating that the extreme eigenvalues
2035: values will be calculated via a Lanczos or Arnoldi process as the linear
2036: system is solved.
2038: Not Collective
2040: Input Parameter:
2041: . ksp - iterative solver obtained from `KSPCreate()`
2043: Output Parameter:
2044: . flg - `PETSC_TRUE` or `PETSC_FALSE`
2046: Level: advanced
2048: Note:
2049: Currently this option is not valid for all iterative methods.
2051: .seealso: [](ch_ksp), `KSPComputeEigenvalues()`, `KSPComputeEigenvaluesExplicitly()`, `KSP`, `KSPSetComputeRitz()`
2052: @*/
2053: PetscErrorCode KSPGetComputeEigenvalues(KSP ksp, PetscBool *flg)
2054: {
2055: PetscFunctionBegin;
2057: PetscAssertPointer(flg, 2);
2058: *flg = ksp->calc_sings;
2059: PetscFunctionReturn(PETSC_SUCCESS);
2060: }
2062: /*@
2063: KSPSetComputeEigenvalues - Sets a flag so that the extreme eigenvalues
2064: values will be calculated via a Lanczos or Arnoldi process as the linear
2065: system is solved.
2067: Logically Collective
2069: Input Parameters:
2070: + ksp - iterative solver obtained from `KSPCreate()`
2071: - flg - `PETSC_TRUE` or `PETSC_FALSE`
2073: Level: advanced
2075: Note:
2076: Currently this option is not valid for all iterative methods.
2078: Consider using the excellent package SLEPc for accurate efficient computations of singular or eigenvalues.
2080: .seealso: [](ch_ksp), `KSPComputeEigenvalues()`, `KSPComputeEigenvaluesExplicitly()`, `KSP`, `KSPSetComputeRitz()`
2081: @*/
2082: PetscErrorCode KSPSetComputeEigenvalues(KSP ksp, PetscBool flg)
2083: {
2084: PetscFunctionBegin;
2087: ksp->calc_sings = flg;
2088: PetscFunctionReturn(PETSC_SUCCESS);
2089: }
2091: /*@
2092: KSPSetComputeRitz - Sets a flag so that the Ritz or harmonic Ritz pairs
2093: will be calculated via a Lanczos or Arnoldi process as the linear
2094: system is solved.
2096: Logically Collective
2098: Input Parameters:
2099: + ksp - iterative solver obtained from `KSPCreate()`
2100: - flg - `PETSC_TRUE` or `PETSC_FALSE`
2102: Level: advanced
2104: Note:
2105: Currently this option is only valid for the `KSPGMRES` method.
2107: .seealso: [](ch_ksp), `KSPComputeRitz()`, `KSP`, `KSPComputeEigenvalues()`, `KSPComputeExtremeSingularValues()`
2108: @*/
2109: PetscErrorCode KSPSetComputeRitz(KSP ksp, PetscBool flg)
2110: {
2111: PetscFunctionBegin;
2114: ksp->calc_ritz = flg;
2115: PetscFunctionReturn(PETSC_SUCCESS);
2116: }
2118: /*@
2119: KSPGetRhs - Gets the right-hand-side vector for the linear system to
2120: be solved.
2122: Not Collective
2124: Input Parameter:
2125: . ksp - iterative solver obtained from `KSPCreate()`
2127: Output Parameter:
2128: . r - right-hand-side vector
2130: Level: developer
2132: .seealso: [](ch_ksp), `KSPGetSolution()`, `KSPSolve()`, `KSP`
2133: @*/
2134: PetscErrorCode KSPGetRhs(KSP ksp, Vec *r)
2135: {
2136: PetscFunctionBegin;
2138: PetscAssertPointer(r, 2);
2139: *r = ksp->vec_rhs;
2140: PetscFunctionReturn(PETSC_SUCCESS);
2141: }
2143: /*@
2144: KSPGetSolution - Gets the location of the solution for the
2145: linear system to be solved.
2147: Not Collective
2149: Input Parameter:
2150: . ksp - iterative solver obtained from `KSPCreate()`
2152: Output Parameter:
2153: . v - solution vector
2155: Level: developer
2157: Note:
2158: If this is called during a `KSPSolve()` the vector's values may not represent the solution
2159: to the linear system.
2161: .seealso: [](ch_ksp), `KSPGetRhs()`, `KSPBuildSolution()`, `KSPSolve()`, `KSP`
2162: @*/
2163: PetscErrorCode KSPGetSolution(KSP ksp, Vec *v)
2164: {
2165: PetscFunctionBegin;
2167: PetscAssertPointer(v, 2);
2168: *v = ksp->vec_sol;
2169: PetscFunctionReturn(PETSC_SUCCESS);
2170: }
2172: /*@
2173: KSPSetPC - Sets the preconditioner to be used to calculate the
2174: application of the preconditioner on a vector into a `KSP`.
2176: Collective
2178: Input Parameters:
2179: + ksp - the `KSP` iterative solver obtained from `KSPCreate()`
2180: - pc - the preconditioner object (if `NULL` it returns the `PC` currently held by the `KSP`)
2182: Level: developer
2184: Note:
2185: This routine is almost never used since `KSP` creates its own `PC` when needed.
2186: Use `KSPGetPC()` to retrieve the preconditioner context instead of creating a new one.
2188: .seealso: [](ch_ksp), `KSPGetPC()`, `KSP`
2189: @*/
2190: PetscErrorCode KSPSetPC(KSP ksp, PC pc)
2191: {
2192: PetscFunctionBegin;
2194: if (pc) {
2196: PetscCheckSameComm(ksp, 1, pc, 2);
2197: }
2198: if (ksp->pc != pc && ksp->setupstage) ksp->setupstage = KSP_SETUP_NEWMATRIX;
2199: PetscCall(PetscObjectReference((PetscObject)pc));
2200: PetscCall(PCDestroy(&ksp->pc));
2201: ksp->pc = pc;
2202: PetscFunctionReturn(PETSC_SUCCESS);
2203: }
2205: PETSC_INTERN PetscErrorCode PCCreate_MPI(PC);
2207: // PetscClangLinter pragma disable: -fdoc-internal-linkage
2208: /*@C
2209: KSPCheckPCMPI - Checks if `-mpi_linear_solver_server` is active and the `PC` should be changed to `PCMPI`
2211: Collective, No Fortran Support
2213: Input Parameter:
2214: . ksp - iterative solver obtained from `KSPCreate()`
2216: Level: developer
2218: .seealso: [](ch_ksp), `KSPSetPC()`, `KSP`, `PCMPIServerBegin()`, `PCMPIServerEnd()`
2219: @*/
2220: PETSC_INTERN PetscErrorCode KSPCheckPCMPI(KSP ksp)
2221: {
2222: PetscBool isPCMPI;
2224: PetscFunctionBegin;
2226: PetscCall(PetscObjectTypeCompare((PetscObject)ksp->pc, PCMPI, &isPCMPI));
2227: if (PCMPIServerActive && ksp->nestlevel == 0 && !isPCMPI) {
2228: const char *prefix;
2229: char *found = NULL;
2231: PetscCall(KSPGetOptionsPrefix(ksp, &prefix));
2232: if (prefix) PetscCall(PetscStrstr(prefix, "mpi_linear_solver_server_", &found));
2233: if (!found) PetscCall(KSPAppendOptionsPrefix(ksp, "mpi_linear_solver_server_"));
2234: PetscCall(PetscInfo(NULL, "In MPI Linear Solver Server and detected (root) PC that must be changed to PCMPI\n"));
2235: PetscCall(PCSetType(ksp->pc, PCMPI));
2236: }
2237: PetscFunctionReturn(PETSC_SUCCESS);
2238: }
2240: /*@
2241: KSPGetPC - Returns a pointer to the preconditioner context with the `KSP`
2243: Not Collective
2245: Input Parameter:
2246: . ksp - iterative solver obtained from `KSPCreate()`
2248: Output Parameter:
2249: . pc - preconditioner context
2251: Level: beginner
2253: Note:
2254: The `PC` is created if it does not already exist.
2256: Developer Note:
2257: Calls `KSPCheckPCMPI()` to check if the `KSP` is effected by `-mpi_linear_solver_server`
2259: .seealso: [](ch_ksp), `KSPSetPC()`, `KSP`, `PC`
2260: @*/
2261: PetscErrorCode KSPGetPC(KSP ksp, PC *pc)
2262: {
2263: PetscFunctionBegin;
2265: PetscAssertPointer(pc, 2);
2266: if (!ksp->pc) {
2267: PetscCall(PCCreate(PetscObjectComm((PetscObject)ksp), &ksp->pc));
2268: PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp->pc, (PetscObject)ksp, 0));
2269: PetscCall(PetscObjectSetOptions((PetscObject)ksp->pc, ((PetscObject)ksp)->options));
2270: PetscCall(PCSetKSPNestLevel(ksp->pc, ksp->nestlevel));
2271: PetscCall(PCSetErrorIfFailure(ksp->pc, ksp->errorifnotconverged));
2272: if (ksp->dm) PetscCall(PCSetDM(ksp->pc, ksp->dm));
2273: }
2274: PetscCall(KSPCheckPCMPI(ksp));
2275: *pc = ksp->pc;
2276: PetscFunctionReturn(PETSC_SUCCESS);
2277: }
2279: /*@
2280: KSPMonitor - runs the user provided monitor routines, if they exist
2282: Collective
2284: Input Parameters:
2285: + ksp - iterative solver obtained from `KSPCreate()`
2286: . it - iteration number
2287: - rnorm - relative norm of the residual
2289: Level: developer
2291: Notes:
2292: This routine is called by the `KSP` implementations.
2293: It does not typically need to be called by the user.
2295: For Krylov methods that do not keep a running value of the current solution (such as `KSPGMRES`) this
2296: cannot be called after the `KSPConvergedReason` has been set but before the final solution has been computed.
2298: .seealso: [](ch_ksp), `KSPMonitorSet()`
2299: @*/
2300: PetscErrorCode KSPMonitor(KSP ksp, PetscInt it, PetscReal rnorm)
2301: {
2302: PetscInt i, n = ksp->numbermonitors;
2304: PetscFunctionBegin;
2305: for (i = 0; i < n; i++) PetscCall((*ksp->monitor[i])(ksp, it, rnorm, ksp->monitorcontext[i]));
2306: PetscFunctionReturn(PETSC_SUCCESS);
2307: }
2309: /*@C
2310: KSPMonitorSet - Sets an ADDITIONAL function to be called at every iteration to monitor, i.e. display in some way, perhaps by printing in the terminal,
2311: the residual norm computed in a `KSPSolve()`
2313: Logically Collective
2315: Input Parameters:
2316: + ksp - iterative solver obtained from `KSPCreate()`
2317: . monitor - pointer to function (if this is `NULL`, it turns off monitoring, see `KSPMonitorFn`
2318: . ctx - [optional] context for private data for the monitor routine (use `NULL` if no context is needed)
2319: - monitordestroy - [optional] routine that frees monitor context (may be `NULL`), see `PetscCtxDestroyFn` for the calling sequence
2321: Options Database Keys:
2322: + -ksp_monitor - sets `KSPMonitorResidual()`
2323: . -ksp_monitor hdf5:filename - sets `KSPMonitorResidualView()` and saves residual
2324: . -ksp_monitor draw - sets `KSPMonitorResidualView()` and plots residual
2325: . -ksp_monitor draw::draw_lg - sets `KSPMonitorResidualDrawLG()` and plots residual
2326: . -ksp_monitor_pause_final - Pauses any graphics when the solve finishes (only works for internal monitors)
2327: . -ksp_monitor_true_residual - sets `KSPMonitorTrueResidual()`
2328: . -ksp_monitor_true_residual draw::draw_lg - sets `KSPMonitorTrueResidualDrawLG()` and plots residual
2329: . -ksp_monitor_max - sets `KSPMonitorTrueResidualMax()`
2330: . -ksp_monitor_singular_value - sets `KSPMonitorSingularValue()`
2331: - -ksp_monitor_cancel - cancels all monitors that have been hardwired into a code by calls to `KSPMonitorSet()`, but
2332: does not cancel those set via the options database.
2334: Level: beginner
2336: Notes:
2337: The options database option `-ksp_monitor` and related options are the easiest way to turn on `KSP` iteration monitoring
2339: `KSPMonitorRegister()` provides a way to associate an options database key with `KSP` monitor function.
2341: The default is to do no monitoring. To print the residual, or preconditioned
2342: residual if `KSPSetNormType`(ksp,`KSP_NORM_PRECONDITIONED`) was called, use
2343: `KSPMonitorResidual()` as the monitoring routine, with a `PETSCVIEWERASCII` as the
2344: context.
2346: Several different monitoring routines may be set by calling
2347: `KSPMonitorSet()` multiple times; they will be called in the
2348: order in which they were set.
2350: Fortran Note:
2351: Only a single monitor function can be set for each `KSP` object
2353: .seealso: [](ch_ksp), `KSPMonitorResidual()`, `KSPMonitorRegister()`, `KSPMonitorCancel()`, `KSP`, `PetscCtxDestroyFn`
2354: @*/
2355: PetscErrorCode KSPMonitorSet(KSP ksp, KSPMonitorFn *monitor, PetscCtx ctx, PetscCtxDestroyFn *monitordestroy)
2356: {
2357: PetscFunctionBegin;
2359: for (PetscInt i = 0; i < ksp->numbermonitors; i++) {
2360: PetscBool identical;
2362: PetscCall(PetscMonitorCompare((PetscErrorCode (*)(void))(PetscVoidFn *)monitor, ctx, monitordestroy, (PetscErrorCode (*)(void))(PetscVoidFn *)ksp->monitor[i], ksp->monitorcontext[i], ksp->monitordestroy[i], &identical));
2363: if (identical) PetscFunctionReturn(PETSC_SUCCESS);
2364: }
2365: PetscCheck(ksp->numbermonitors < MAXKSPMONITORS, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Too many KSP monitors set");
2366: ksp->monitor[ksp->numbermonitors] = monitor;
2367: ksp->monitordestroy[ksp->numbermonitors] = monitordestroy;
2368: ksp->monitorcontext[ksp->numbermonitors++] = ctx;
2369: PetscFunctionReturn(PETSC_SUCCESS);
2370: }
2372: /*@
2373: KSPMonitorCancel - Clears all monitors for a `KSP` object.
2375: Logically Collective
2377: Input Parameter:
2378: . ksp - iterative solver obtained from `KSPCreate()`
2380: Options Database Key:
2381: . -ksp_monitor_cancel - Cancels all monitors that have been hardwired into a code by calls to `KSPMonitorSet()`, but does not cancel those set via the options database.
2383: Level: intermediate
2385: .seealso: [](ch_ksp), `KSPMonitorResidual()`, `KSPMonitorSet()`, `KSP`
2386: @*/
2387: PetscErrorCode KSPMonitorCancel(KSP ksp)
2388: {
2389: PetscInt i;
2391: PetscFunctionBegin;
2393: for (i = 0; i < ksp->numbermonitors; i++) {
2394: if (ksp->monitordestroy[i]) PetscCall((*ksp->monitordestroy[i])(&ksp->monitorcontext[i]));
2395: }
2396: ksp->numbermonitors = 0;
2397: PetscFunctionReturn(PETSC_SUCCESS);
2398: }
2400: /*@C
2401: KSPGetMonitorContext - Gets the monitoring context, as set by `KSPMonitorSet()` for the FIRST monitor only.
2403: Not Collective
2405: Input Parameter:
2406: . ksp - iterative solver obtained from `KSPCreate()`
2408: Output Parameter:
2409: . ctx - monitoring context
2411: Level: intermediate
2413: Fortran Notes:
2414: This only works when the context is a Fortran derived type or a `PetscObject`. Declare `ctx` with
2415: .vb
2416: type(tUsertype), pointer :: ctx
2417: .ve
2419: .seealso: [](ch_ksp), `KSPMonitorResidual()`, `KSP`
2420: @*/
2421: PetscErrorCode KSPGetMonitorContext(KSP ksp, PetscCtxRt ctx)
2422: {
2423: PetscFunctionBegin;
2425: *(void **)ctx = ksp->monitorcontext[0];
2426: PetscFunctionReturn(PETSC_SUCCESS);
2427: }
2429: /*@
2430: KSPSetResidualHistory - Sets the array used to hold the residual history.
2431: If set, this array will contain the residual norms computed at each
2432: iteration of the solver.
2434: Not Collective
2436: Input Parameters:
2437: + ksp - iterative solver obtained from `KSPCreate()`
2438: . a - array to hold history
2439: . na - size of `a`
2440: - reset - `PETSC_TRUE` indicates the history counter is reset to zero
2441: for each new linear solve
2443: Level: advanced
2445: Notes:
2446: If provided, `a` is NOT freed by PETSc so the user needs to keep track of it and destroy once the `KSP` object is destroyed.
2447: If 'a' is `NULL` then space is allocated for the history. If 'na' `PETSC_DECIDE` or (deprecated) `PETSC_DEFAULT` then a
2448: default array of length 10,000 is allocated.
2450: If the array is not long enough then once the iterations is longer than the array length `KSPSolve()` stops recording the history
2452: .seealso: [](ch_ksp), `KSPGetResidualHistory()`, `KSP`
2453: @*/
2454: PetscErrorCode KSPSetResidualHistory(KSP ksp, PetscReal a[], PetscCount na, PetscBool reset)
2455: {
2456: PetscFunctionBegin;
2459: PetscCall(PetscFree(ksp->res_hist_alloc));
2460: if (na != PETSC_DECIDE && na != PETSC_DEFAULT && a) {
2461: ksp->res_hist = a;
2462: ksp->res_hist_max = na;
2463: } else {
2464: if (na != PETSC_DECIDE && na != PETSC_DEFAULT) ksp->res_hist_max = (size_t)na;
2465: else ksp->res_hist_max = 10000; /* like default ksp->max_it */
2466: PetscCall(PetscCalloc1(ksp->res_hist_max, &ksp->res_hist_alloc));
2468: ksp->res_hist = ksp->res_hist_alloc;
2469: }
2470: ksp->res_hist_len = 0;
2471: ksp->res_hist_reset = reset;
2472: PetscFunctionReturn(PETSC_SUCCESS);
2473: }
2475: /*@C
2476: KSPGetResidualHistory - Gets the array used to hold the residual history and the number of residuals it contains.
2478: Not Collective
2480: Input Parameter:
2481: . ksp - iterative solver obtained from `KSPCreate()`
2483: Output Parameters:
2484: + a - pointer to array to hold history (or `NULL`)
2485: - na - number of used entries in a (or `NULL`). Note this has different meanings depending on the `reset` argument to `KSPSetResidualHistory()`
2487: Level: advanced
2489: Note:
2490: This array is borrowed and should not be freed by the caller.
2492: Can only be called after a `KSPSetResidualHistory()` otherwise `a` and `na` are set to `NULL` and zero
2494: When `reset` was `PETSC_TRUE` since a residual is computed before the first iteration, the value of `na` is generally one more than the value
2495: returned with `KSPGetIterationNumber()`.
2497: Some Krylov methods may not compute the final residual norm when convergence is declared because the maximum number of iterations allowed has been reached.
2498: In this situation, when `reset` was `PETSC_TRUE`, `na` will then equal the number of iterations reported with `KSPGetIterationNumber()`
2500: Some Krylov methods (such as `KSPSTCG`), under certain circumstances, do not compute the final residual norm. In this situation, when `reset` was `PETSC_TRUE`,
2501: `na` will then equal the number of iterations reported with `KSPGetIterationNumber()`
2503: `KSPBCGSL` does not record the residual norms for the "subiterations" hence the results from `KSPGetResidualHistory()` and `KSPGetIterationNumber()` will be different
2505: Fortran Note:
2506: Call `KSPRestoreResidualHistory()` when access to the history is no longer needed.
2508: .seealso: [](ch_ksp), `KSPSetResidualHistory()`, `KSP`, `KSPGetIterationNumber()`, `KSPSTCG`, `KSPBCGSL`
2509: @*/
2510: PetscErrorCode KSPGetResidualHistory(KSP ksp, const PetscReal *a[], PetscInt *na)
2511: {
2512: PetscFunctionBegin;
2514: if (a) *a = ksp->res_hist;
2515: if (na) PetscCall(PetscIntCast(ksp->res_hist_len, na));
2516: PetscFunctionReturn(PETSC_SUCCESS);
2517: }
2519: /*@
2520: KSPSetErrorHistory - Sets the array used to hold the error history. If set, this array will contain the error norms computed at each iteration of the solver.
2522: Not Collective
2524: Input Parameters:
2525: + ksp - iterative solver obtained from `KSPCreate()`
2526: . a - array to hold history
2527: . na - size of `a`
2528: - reset - `PETSC_TRUE` indicates the history counter is reset to zero for each new linear solve
2530: Level: advanced
2532: Notes:
2533: If provided, `a` is NOT freed by PETSc so the user needs to keep track of it and destroy once the `KSP` object is destroyed.
2534: If 'a' is `NULL` then space is allocated for the history. If 'na' is `PETSC_DECIDE` or (deprecated) `PETSC_DEFAULT` then a default array of length 1,0000 is allocated.
2536: If the array is not long enough then once the iterations is longer than the array length `KSPSolve()` stops recording the history
2538: .seealso: [](ch_ksp), `KSPGetErrorHistory()`, `KSPSetResidualHistory()`, `KSP`
2539: @*/
2540: PetscErrorCode KSPSetErrorHistory(KSP ksp, PetscReal a[], PetscCount na, PetscBool reset)
2541: {
2542: PetscFunctionBegin;
2545: PetscCall(PetscFree(ksp->err_hist_alloc));
2546: if (na != PETSC_DECIDE && na != PETSC_DEFAULT && a) {
2547: ksp->err_hist = a;
2548: ksp->err_hist_max = na;
2549: } else {
2550: if (na != PETSC_DECIDE && na != PETSC_DEFAULT) ksp->err_hist_max = (size_t)na;
2551: else ksp->err_hist_max = 10000; /* like default ksp->max_it */
2552: PetscCall(PetscCalloc1(ksp->err_hist_max, &ksp->err_hist_alloc));
2553: ksp->err_hist = ksp->err_hist_alloc;
2554: }
2555: ksp->err_hist_len = 0;
2556: ksp->err_hist_reset = reset;
2557: PetscFunctionReturn(PETSC_SUCCESS);
2558: }
2560: /*@C
2561: KSPGetErrorHistory - Gets the array used to hold the error history and the number of residuals it contains.
2563: Not Collective
2565: Input Parameter:
2566: . ksp - iterative solver obtained from `KSPCreate()`
2568: Output Parameters:
2569: + a - pointer to array to hold history (or `NULL`)
2570: - na - number of used entries in a (or `NULL`)
2572: Level: advanced
2574: Note:
2575: This array is borrowed and should not be freed by the caller.
2576: Can only be called after a `KSPSetErrorHistory()` otherwise `a` and `na` are set to `NULL` and zero
2578: Fortran Note:
2579: .vb
2580: PetscReal, pointer :: a(:)
2581: .ve
2583: .seealso: [](ch_ksp), `KSPSetErrorHistory()`, `KSPGetResidualHistory()`, `KSP`
2584: @*/
2585: PetscErrorCode KSPGetErrorHistory(KSP ksp, const PetscReal *a[], PetscInt *na)
2586: {
2587: PetscFunctionBegin;
2589: if (a) *a = ksp->err_hist;
2590: if (na) PetscCall(PetscIntCast(ksp->err_hist_len, na));
2591: PetscFunctionReturn(PETSC_SUCCESS);
2592: }
2594: /*@
2595: KSPComputeConvergenceRate - Compute the convergence rate for the iteration <https:/en.wikipedia.org/wiki/Coefficient_of_determination>
2597: Not Collective
2599: Input Parameter:
2600: . ksp - The `KSP`
2602: Output Parameters:
2603: + cr - The residual contraction rate
2604: . rRsq - The coefficient of determination, $R^2$, indicating the linearity of the data
2605: . ce - The error contraction rate
2606: - eRsq - The coefficient of determination, $R^2$, indicating the linearity of the data
2608: Level: advanced
2610: Note:
2611: Suppose that the residual is reduced linearly, $r_k = c^k r_0$, which means $log r_k = log r_0 + k log c$. After linear regression,
2612: the slope is $\log c$. The coefficient of determination is given by $1 - \frac{\sum_i (y_i - f(x_i))^2}{\sum_i (y_i - \bar y)}$,
2614: .seealso: [](ch_ksp), `KSP`, `KSPConvergedRateView()`
2615: @*/
2616: PetscErrorCode KSPComputeConvergenceRate(KSP ksp, PetscReal *cr, PetscReal *rRsq, PetscReal *ce, PetscReal *eRsq)
2617: {
2618: PetscReal const *hist;
2619: PetscReal *x, *y, slope, intercept, mean = 0.0, var = 0.0, res = 0.0;
2620: PetscInt n, k;
2622: PetscFunctionBegin;
2623: if (cr || rRsq) {
2624: PetscCall(KSPGetResidualHistory(ksp, &hist, &n));
2625: if (!n) {
2626: if (cr) *cr = 0.0;
2627: if (rRsq) *rRsq = -1.0;
2628: } else {
2629: PetscCall(PetscMalloc2(n, &x, n, &y));
2630: for (k = 0; k < n; ++k) {
2631: x[k] = k;
2632: y[k] = PetscLogReal(hist[k]);
2633: mean += y[k];
2634: }
2635: mean /= n;
2636: PetscCall(PetscLinearRegression(n, x, y, &slope, &intercept));
2637: for (k = 0; k < n; ++k) {
2638: res += PetscSqr(y[k] - (slope * x[k] + intercept));
2639: var += PetscSqr(y[k] - mean);
2640: }
2641: PetscCall(PetscFree2(x, y));
2642: if (cr) *cr = PetscExpReal(slope);
2643: if (rRsq) *rRsq = var < PETSC_MACHINE_EPSILON ? 0.0 : 1.0 - (res / var);
2644: }
2645: }
2646: if (ce || eRsq) {
2647: PetscCall(KSPGetErrorHistory(ksp, &hist, &n));
2648: if (!n) {
2649: if (ce) *ce = 0.0;
2650: if (eRsq) *eRsq = -1.0;
2651: } else {
2652: PetscCall(PetscMalloc2(n, &x, n, &y));
2653: for (k = 0; k < n; ++k) {
2654: x[k] = k;
2655: y[k] = PetscLogReal(hist[k]);
2656: mean += y[k];
2657: }
2658: mean /= n;
2659: PetscCall(PetscLinearRegression(n, x, y, &slope, &intercept));
2660: for (k = 0; k < n; ++k) {
2661: res += PetscSqr(y[k] - (slope * x[k] + intercept));
2662: var += PetscSqr(y[k] - mean);
2663: }
2664: PetscCall(PetscFree2(x, y));
2665: if (ce) *ce = PetscExpReal(slope);
2666: if (eRsq) *eRsq = var < PETSC_MACHINE_EPSILON ? 0.0 : 1.0 - (res / var);
2667: }
2668: }
2669: PetscFunctionReturn(PETSC_SUCCESS);
2670: }
2672: /*@C
2673: KSPSetConvergenceTest - Sets the function to be used to determine convergence of `KSPSolve()`
2675: Logically Collective
2677: Input Parameters:
2678: + ksp - iterative solver obtained from `KSPCreate()`
2679: . converge - pointer to the function, see `KSPConvergenceTestFn`
2680: . ctx - context for private data for the convergence routine (may be `NULL`)
2681: - destroy - a routine for destroying the context (may be `NULL`)
2683: Level: advanced
2685: Notes:
2686: Must be called after the `KSP` type has been set so put this after
2687: a call to `KSPSetType()`, or `KSPSetFromOptions()`.
2689: The default convergence test, `KSPConvergedDefault()`, aborts if the
2690: residual grows to more than 10000 times the initial residual.
2692: The default is a combination of relative and absolute tolerances.
2693: The residual value that is tested may be an approximation; routines
2694: that need exact values should compute them.
2696: In the default PETSc convergence test, the precise values of reason
2697: are macros such as `KSP_CONVERGED_RTOL`, which are defined in petscksp.h.
2699: .seealso: [](ch_ksp), `KSP`, `KSPConvergenceTestFn`, `KSPConvergedDefault()`, `KSPGetConvergenceContext()`, `KSPSetTolerances()`, `KSPGetConvergenceTest()`, `KSPGetAndClearConvergenceTest()`
2700: @*/
2701: PetscErrorCode KSPSetConvergenceTest(KSP ksp, KSPConvergenceTestFn *converge, PetscCtx ctx, PetscCtxDestroyFn *destroy)
2702: {
2703: PetscFunctionBegin;
2705: if (ksp->convergeddestroy) PetscCall((*ksp->convergeddestroy)(&ksp->cnvP));
2706: ksp->converged = converge;
2707: ksp->convergeddestroy = destroy;
2708: ksp->cnvP = ctx;
2709: PetscFunctionReturn(PETSC_SUCCESS);
2710: }
2712: /*@C
2713: KSPGetConvergenceTest - Gets the function to be used to determine convergence.
2715: Logically Collective
2717: Input Parameter:
2718: . ksp - iterative solver obtained from `KSPCreate()`
2720: Output Parameters:
2721: + converge - pointer to convergence test function, see `KSPConvergenceTestFn`
2722: . ctx - context for private data for the convergence routine (may be `NULL`)
2723: - destroy - a routine for destroying the context (may be `NULL`)
2725: Level: advanced
2727: .seealso: [](ch_ksp), `KSP`, `KSPConvergedDefault()`, `KSPGetConvergenceContext()`, `KSPSetTolerances()`, `KSPSetConvergenceTest()`, `KSPGetAndClearConvergenceTest()`
2728: @*/
2729: PetscErrorCode KSPGetConvergenceTest(KSP ksp, KSPConvergenceTestFn **converge, PetscCtxRt ctx, PetscCtxDestroyFn **destroy)
2730: {
2731: PetscFunctionBegin;
2733: if (converge) *converge = ksp->converged;
2734: if (destroy) *destroy = ksp->convergeddestroy;
2735: if (ctx) *(void **)ctx = ksp->cnvP;
2736: PetscFunctionReturn(PETSC_SUCCESS);
2737: }
2739: /*@C
2740: KSPGetAndClearConvergenceTest - Gets the function to be used to determine convergence. Removes the current test without calling destroy on the test context
2742: Logically Collective
2744: Input Parameter:
2745: . ksp - iterative solver obtained from `KSPCreate()`
2747: Output Parameters:
2748: + converge - pointer to convergence test function, see `KSPConvergenceTestFn`
2749: . ctx - context for private data for the convergence routine
2750: - destroy - a routine for destroying the context
2752: Level: advanced
2754: Note:
2755: This is intended to be used to allow transferring the convergence test (and its context) to another testing object (for example another `KSP`)
2756: and then calling `KSPSetConvergenceTest()` on this original `KSP`. If you just called `KSPGetConvergenceTest()` followed
2757: by `KSPSetConvergenceTest()` the original context information
2758: would be destroyed and hence the transferred context would be invalid and trigger a crash on use
2760: .seealso: [](ch_ksp), `KSP`, `KSPConvergedDefault()`, `KSPGetConvergenceContext()`, `KSPSetTolerances()`, `KSPSetConvergenceTest()`, `KSPGetConvergenceTest()`
2761: @*/
2762: PetscErrorCode KSPGetAndClearConvergenceTest(KSP ksp, KSPConvergenceTestFn **converge, PetscCtxRt ctx, PetscCtxDestroyFn **destroy)
2763: {
2764: PetscFunctionBegin;
2766: *converge = ksp->converged;
2767: *destroy = ksp->convergeddestroy;
2768: *(void **)ctx = ksp->cnvP;
2769: ksp->converged = NULL;
2770: ksp->cnvP = NULL;
2771: ksp->convergeddestroy = NULL;
2772: PetscFunctionReturn(PETSC_SUCCESS);
2773: }
2775: /*@C
2776: KSPGetConvergenceContext - Gets the convergence context set with `KSPSetConvergenceTest()`.
2778: Not Collective
2780: Input Parameter:
2781: . ksp - iterative solver obtained from `KSPCreate()`
2783: Output Parameter:
2784: . ctx - monitoring context
2786: Level: advanced
2788: Fortran Note:
2789: This only works when the context is a Fortran derived type or a `PetscObject`. Declare `ctx` with
2790: .vb
2791: type(tUsertype), pointer :: ctx
2792: .ve
2794: .seealso: [](ch_ksp), `KSP`, `KSPConvergedDefault()`, `KSPSetConvergenceTest()`, `KSPGetConvergenceTest()`
2795: @*/
2796: PetscErrorCode KSPGetConvergenceContext(KSP ksp, PetscCtxRt ctx)
2797: {
2798: PetscFunctionBegin;
2800: *(void **)ctx = ksp->cnvP;
2801: PetscFunctionReturn(PETSC_SUCCESS);
2802: }
2804: /*@
2805: KSPBuildSolution - Builds the approximate solution in a vector provided.
2807: Collective
2809: Input Parameter:
2810: . ksp - iterative solver obtained from `KSPCreate()`
2812: Output Parameter:
2813: Provide exactly one of
2814: + v - location to stash solution, optional, otherwise pass `NULL`
2815: - V - the solution is returned in this location. This vector is created internally. This vector should NOT be destroyed by the user with `VecDestroy()`.
2817: Level: developer
2819: Notes:
2820: This routine can be used in one of two ways
2821: .vb
2822: KSPBuildSolution(ksp,NULL,&V);
2823: or
2824: KSPBuildSolution(ksp,v,NULL); or KSPBuildSolution(ksp,v,&v);
2825: .ve
2826: In the first case an internal vector is allocated to store the solution
2827: (the user cannot destroy this vector). In the second case the solution
2828: is generated in the vector that the user provides. Note that for certain
2829: methods, such as `KSPCG`, the second case requires a copy of the solution,
2830: while in the first case the call is essentially free since it simply
2831: returns the vector where the solution already is stored. For some methods
2832: like `KSPGMRES` during the solve this is a reasonably expensive operation and should only be
2833: used if truly needed.
2835: .seealso: [](ch_ksp), `KSPGetSolution()`, `KSPBuildResidual()`, `KSP`
2836: @*/
2837: PetscErrorCode KSPBuildSolution(KSP ksp, Vec v, Vec *V)
2838: {
2839: PetscFunctionBegin;
2841: PetscCheck(V || v, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONG, "Must provide either v or V");
2842: if (!V) V = &v;
2843: if (ksp->reason != KSP_CONVERGED_ITERATING) {
2844: if (!v) PetscCall(KSPGetSolution(ksp, V));
2845: else PetscCall(VecCopy(ksp->vec_sol, v));
2846: } else {
2847: PetscUseTypeMethod(ksp, buildsolution, v, V);
2848: }
2849: PetscFunctionReturn(PETSC_SUCCESS);
2850: }
2852: /*@
2853: KSPBuildResidual - Builds the residual in a vector provided.
2855: Collective
2857: Input Parameter:
2858: . ksp - iterative solver obtained from `KSPCreate()`
2860: Output Parameters:
2861: + t - work vector. If not provided then one is generated.
2862: . v - optional location to stash residual. If `v` is not provided, then a location is generated.
2863: - V - the residual
2865: Level: advanced
2867: Note:
2868: Regardless of whether or not `v` is provided, the residual is
2869: returned in `V`.
2871: .seealso: [](ch_ksp), `KSP`, `KSPBuildSolution()`
2872: @*/
2873: PetscErrorCode KSPBuildResidual(KSP ksp, Vec t, Vec v, Vec *V)
2874: {
2875: PetscBool flag = PETSC_FALSE;
2876: Vec w = v, tt = t;
2878: PetscFunctionBegin;
2880: if (!w) PetscCall(VecDuplicate(ksp->vec_rhs, &w));
2881: if (!tt) {
2882: PetscCall(VecDuplicate(ksp->vec_sol, &tt));
2883: flag = PETSC_TRUE;
2884: }
2885: PetscUseTypeMethod(ksp, buildresidual, tt, w, V);
2886: if (flag) PetscCall(VecDestroy(&tt));
2887: PetscFunctionReturn(PETSC_SUCCESS);
2888: }
2890: /*@
2891: KSPSetDiagonalScale - Tells `KSP` to symmetrically diagonally scale the system
2892: before solving. This actually CHANGES the matrix (and right-hand side).
2894: Logically Collective
2896: Input Parameters:
2897: + ksp - the `KSP` context
2898: - scale - `PETSC_TRUE` or `PETSC_FALSE`
2900: Options Database Keys:
2901: + -ksp_diagonal_scale - perform a diagonal scaling before the solve
2902: - -ksp_diagonal_scale_fix - scale the matrix back AFTER the solve
2904: Level: advanced
2906: Notes:
2907: Scales the matrix by $D^{-1/2} A D^{-1/2} [D^{1/2} x ] = D^{-1/2} b $
2908: where $D_{ii}$ is $1/abs(A_{ii}) $ unless $A_{ii}$ is zero and then it is 1.
2910: BE CAREFUL with this routine: it actually scales the matrix and right
2911: hand side that define the system. After the system is solved the matrix
2912: and right-hand side remain scaled unless you use `KSPSetDiagonalScaleFix()`
2914: This should NOT be used within the `SNES` solves if you are using a line
2915: search.
2917: If you use this with the `PCType` `PCEISENSTAT` preconditioner than you can
2918: use the `PCEisenstatSetNoDiagonalScaling()` option, or `-pc_eisenstat_no_diagonal_scaling`
2919: to save some unneeded, redundant flops.
2921: .seealso: [](ch_ksp), `KSPGetDiagonalScale()`, `KSPSetDiagonalScaleFix()`, `KSP`
2922: @*/
2923: PetscErrorCode KSPSetDiagonalScale(KSP ksp, PetscBool scale)
2924: {
2925: PetscFunctionBegin;
2928: ksp->dscale = scale;
2929: PetscFunctionReturn(PETSC_SUCCESS);
2930: }
2932: /*@
2933: KSPGetDiagonalScale - Checks if `KSP` solver scales the matrix and right-hand side, that is if `KSPSetDiagonalScale()` has been called
2935: Not Collective
2937: Input Parameter:
2938: . ksp - the `KSP` context
2940: Output Parameter:
2941: . scale - `PETSC_TRUE` or `PETSC_FALSE`
2943: Level: intermediate
2945: .seealso: [](ch_ksp), `KSP`, `KSPSetDiagonalScale()`, `KSPSetDiagonalScaleFix()`
2946: @*/
2947: PetscErrorCode KSPGetDiagonalScale(KSP ksp, PetscBool *scale)
2948: {
2949: PetscFunctionBegin;
2951: PetscAssertPointer(scale, 2);
2952: *scale = ksp->dscale;
2953: PetscFunctionReturn(PETSC_SUCCESS);
2954: }
2956: /*@
2957: KSPSetDiagonalScaleFix - Tells `KSP` to diagonally scale the system back after solving.
2959: Logically Collective
2961: Input Parameters:
2962: + ksp - the `KSP` context
2963: - fix - `PETSC_TRUE` to scale back after the system solve, `PETSC_FALSE` to not
2964: rescale (default)
2966: Level: intermediate
2968: Notes:
2969: Must be called after `KSPSetDiagonalScale()`
2971: Using this will slow things down, because it rescales the matrix before and
2972: after each linear solve. This is intended mainly for testing to allow one
2973: to easily get back the original system to make sure the solution computed is
2974: accurate enough.
2976: .seealso: [](ch_ksp), `KSPGetDiagonalScale()`, `KSPSetDiagonalScale()`, `KSPGetDiagonalScaleFix()`, `KSP`
2977: @*/
2978: PetscErrorCode KSPSetDiagonalScaleFix(KSP ksp, PetscBool fix)
2979: {
2980: PetscFunctionBegin;
2983: ksp->dscalefix = fix;
2984: PetscFunctionReturn(PETSC_SUCCESS);
2985: }
2987: /*@
2988: KSPGetDiagonalScaleFix - Determines if `KSP` diagonally scales the system back after solving. That is `KSPSetDiagonalScaleFix()` has been called
2990: Not Collective
2992: Input Parameter:
2993: . ksp - the `KSP` context
2995: Output Parameter:
2996: . fix - `PETSC_TRUE` to scale back after the system solve, `PETSC_FALSE` to not
2997: rescale (default)
2999: Level: intermediate
3001: .seealso: [](ch_ksp), `KSPGetDiagonalScale()`, `KSPSetDiagonalScale()`, `KSPSetDiagonalScaleFix()`, `KSP`
3002: @*/
3003: PetscErrorCode KSPGetDiagonalScaleFix(KSP ksp, PetscBool *fix)
3004: {
3005: PetscFunctionBegin;
3007: PetscAssertPointer(fix, 2);
3008: *fix = ksp->dscalefix;
3009: PetscFunctionReturn(PETSC_SUCCESS);
3010: }
3012: /*@C
3013: KSPSetComputeOperators - set routine to compute the linear operators
3015: Logically Collective
3017: Input Parameters:
3018: + ksp - the `KSP` context
3019: . func - function to compute the operators, see `KSPComputeOperatorsFn` for the calling sequence
3020: - ctx - optional context
3022: Level: beginner
3024: Notes:
3025: `func()` will be called automatically at the very next call to `KSPSolve()`. It will NOT be called at future `KSPSolve()` calls
3026: unless either `KSPSetComputeOperators()` or `KSPSetOperators()` is called before that `KSPSolve()` is called. This allows the same system to be solved several times
3027: with different right-hand side functions but is a confusing API since one might expect it to be called for each `KSPSolve()`
3029: To reuse the same preconditioner for the next `KSPSolve()` and not compute a new one based on the most recently computed matrix call `KSPSetReusePreconditioner()`
3031: Developer Note:
3032: Perhaps this routine and `KSPSetComputeRHS()` could be combined into a new API that makes clear when new matrices are computing without requiring call this
3033: routine to indicate when the new matrix should be computed.
3035: .seealso: [](ch_ksp), `KSP`, `KSPSetOperators()`, `KSPSetComputeRHS()`, `DMKSPSetComputeOperators()`, `KSPSetComputeInitialGuess()`, `KSPComputeOperatorsFn`
3036: @*/
3037: PetscErrorCode KSPSetComputeOperators(KSP ksp, KSPComputeOperatorsFn *func, PetscCtx ctx)
3038: {
3039: DM dm;
3041: PetscFunctionBegin;
3043: PetscCall(KSPGetDM(ksp, &dm));
3044: PetscCall(DMKSPSetComputeOperators(dm, func, ctx));
3045: if (ksp->setupstage == KSP_SETUP_NEWRHS) ksp->setupstage = KSP_SETUP_NEWMATRIX;
3046: PetscFunctionReturn(PETSC_SUCCESS);
3047: }
3049: /*@C
3050: KSPSetComputeRHS - set routine to compute the right-hand side of the linear system
3052: Logically Collective
3054: Input Parameters:
3055: + ksp - the `KSP` context
3056: . func - function to compute the right-hand side, see `KSPComputeRHSFn` for the calling sequence
3057: - ctx - optional context
3059: Level: beginner
3061: Note:
3062: The routine you provide will be called EACH you call `KSPSolve()` to prepare the new right-hand side for that solve
3064: .seealso: [](ch_ksp), `KSP`, `KSPSolve()`, `DMKSPSetComputeRHS()`, `KSPSetComputeOperators()`, `KSPSetOperators()`, `KSPComputeRHSFn`
3065: @*/
3066: PetscErrorCode KSPSetComputeRHS(KSP ksp, KSPComputeRHSFn *func, PetscCtx ctx)
3067: {
3068: DM dm;
3070: PetscFunctionBegin;
3072: PetscCall(KSPGetDM(ksp, &dm));
3073: PetscCall(DMKSPSetComputeRHS(dm, func, ctx));
3074: PetscFunctionReturn(PETSC_SUCCESS);
3075: }
3077: /*@C
3078: KSPSetComputeInitialGuess - set routine to compute the initial guess of the linear system
3080: Logically Collective
3082: Input Parameters:
3083: + ksp - the `KSP` context
3084: . func - function to compute the initial guess, see `KSPComputeInitialGuessFn` for calling sequence
3085: - ctx - optional context
3087: Level: beginner
3089: Note:
3090: This should only be used in conjunction with `KSPSetComputeRHS()` and `KSPSetComputeOperators()`, otherwise
3091: call `KSPSetInitialGuessNonzero()` and set the initial guess values in the solution vector passed to `KSPSolve()` before calling the solver
3093: .seealso: [](ch_ksp), `KSP`, `KSPSolve()`, `KSPSetComputeRHS()`, `KSPSetComputeOperators()`, `DMKSPSetComputeInitialGuess()`, `KSPSetInitialGuessNonzero()`,
3094: `KSPComputeInitialGuessFn`
3095: @*/
3096: PetscErrorCode KSPSetComputeInitialGuess(KSP ksp, KSPComputeInitialGuessFn *func, PetscCtx ctx)
3097: {
3098: DM dm;
3100: PetscFunctionBegin;
3102: PetscCall(KSPGetDM(ksp, &dm));
3103: PetscCall(DMKSPSetComputeInitialGuess(dm, func, ctx));
3104: PetscFunctionReturn(PETSC_SUCCESS);
3105: }
3107: /*@
3108: KSPSetUseExplicitTranspose - Determines the explicit transpose of the operator is formed in `KSPSolveTranspose()`. In some configurations (like GPUs) it may
3109: be explicitly formed since the solve is much more efficient.
3111: Logically Collective
3113: Input Parameter:
3114: . ksp - the `KSP` context
3116: Output Parameter:
3117: . flg - `PETSC_TRUE` to transpose the system in `KSPSolveTranspose()`, `PETSC_FALSE` to not transpose (default)
3119: Level: advanced
3121: .seealso: [](ch_ksp), `KSPSolveTranspose()`, `KSP`
3122: @*/
3123: PetscErrorCode KSPSetUseExplicitTranspose(KSP ksp, PetscBool flg)
3124: {
3125: PetscFunctionBegin;
3128: ksp->transpose.use_explicittranspose = flg;
3129: PetscFunctionReturn(PETSC_SUCCESS);
3130: }