Actual source code: fgmres.c
1: /*
2: This file implements FGMRES (a Generalized Minimal Residual) method.
3: Reference: Saad, 1993.
5: Preconditioning: If the preconditioner is constant then this fgmres
6: code is equivalent to RIGHT-PRECONDITIONED GMRES.
7: FGMRES is a modification of gmres that allows the preconditioner to change
8: at each iteration.
10: Restarts: Restarts are basically solves with x0 not equal to zero.
11: */
13: #include <../src/ksp/ksp/impls/gmres/fgmres/fgmresimpl.h>
14: #define FGMRES_DELTA_DIRECTIONS 10
15: #define FGMRES_DEFAULT_MAXK 30
16: static PetscErrorCode KSPFGMRESGetNewVectors(KSP, PetscInt);
17: static PetscErrorCode KSPFGMRESUpdateHessenberg(KSP, PetscInt, PetscBool, PetscReal *);
18: static PetscErrorCode KSPFGMRESBuildSoln(PetscScalar *, Vec, Vec, KSP, PetscInt);
20: static PetscErrorCode KSPSetUp_FGMRES(KSP ksp)
21: {
22: PetscInt max_k, k;
23: KSP_FGMRES *fgmres = (KSP_FGMRES *)ksp->data;
25: PetscFunctionBegin;
26: max_k = fgmres->max_k;
28: PetscCall(KSPSetUp_GMRES(ksp));
30: PetscCall(PetscMalloc1(max_k + 2, &fgmres->prevecs));
31: PetscCall(PetscMalloc1(max_k + 2, &fgmres->prevecs_user_work));
33: /* fgmres->vv_allocated includes extra work vectors, which are not used in the additional
34: block of vectors used to store the preconditioned directions, hence the -VEC_OFFSET
35: term for this first allocation of vectors holding preconditioned directions */
36: PetscCall(KSPCreateVecs(ksp, fgmres->vv_allocated - VEC_OFFSET, &fgmres->prevecs_user_work[0], 0, NULL));
37: for (k = 0; k < fgmres->vv_allocated - VEC_OFFSET; k++) fgmres->prevecs[k] = fgmres->prevecs_user_work[0][k];
38: PetscFunctionReturn(PETSC_SUCCESS);
39: }
41: static PetscErrorCode KSPFGMRESResidual(KSP ksp)
42: {
43: KSP_FGMRES *fgmres = (KSP_FGMRES *)(ksp->data);
44: Mat Amat, Pmat;
46: PetscFunctionBegin;
47: PetscCall(PCGetOperators(ksp->pc, &Amat, &Pmat));
49: /* put A*x into VEC_TEMP */
50: PetscCall(KSP_MatMult(ksp, Amat, ksp->vec_sol, VEC_TEMP));
51: /* now put residual (-A*x + f) into vec_vv(0) */
52: PetscCall(VecWAXPY(VEC_VV(0), -1.0, VEC_TEMP, ksp->vec_rhs));
53: PetscFunctionReturn(PETSC_SUCCESS);
54: }
56: static PetscErrorCode KSPFGMRESCycle(PetscInt *itcount, KSP ksp)
57: {
58: KSP_FGMRES *fgmres = (KSP_FGMRES *)(ksp->data);
59: PetscReal res_norm;
60: PetscReal hapbnd, tt;
61: PetscBool hapend = PETSC_FALSE; /* indicates happy breakdown ending */
62: PetscInt loc_it; /* local count of # of dir. in Krylov space */
63: PetscInt max_k = fgmres->max_k; /* max # of directions Krylov space */
64: Mat Amat, Pmat;
66: PetscFunctionBegin;
67: /* Number of pseudo iterations since last restart is the number
68: of prestart directions */
69: loc_it = 0;
71: /* note: (fgmres->it) is always set one less than (loc_it) It is used in
72: KSPBUILDSolution_FGMRES, where it is passed to KSPFGMRESBuildSoln.
73: Note that when KSPFGMRESBuildSoln is called from this function,
74: (loc_it -1) is passed, so the two are equivalent */
75: fgmres->it = (loc_it - 1);
77: /* initial residual is in VEC_VV(0) - compute its norm*/
78: PetscCall(VecNorm(VEC_VV(0), NORM_2, &res_norm));
79: KSPCheckNorm(ksp, res_norm);
81: /* first entry in right-hand-side of hessenberg system is just
82: the initial residual norm */
83: *RS(0) = res_norm;
85: ksp->rnorm = res_norm;
86: PetscCall(KSPLogResidualHistory(ksp, res_norm));
87: PetscCall(KSPMonitor(ksp, ksp->its, res_norm));
89: /* check for the convergence - maybe the current guess is good enough */
90: PetscCall((*ksp->converged)(ksp, ksp->its, res_norm, &ksp->reason, ksp->cnvP));
91: if (ksp->reason) {
92: if (itcount) *itcount = 0;
93: PetscFunctionReturn(PETSC_SUCCESS);
94: }
96: /* scale VEC_VV (the initial residual) */
97: PetscCall(VecScale(VEC_VV(0), 1.0 / res_norm));
99: /* MAIN ITERATION LOOP BEGINNING*/
100: /* keep iterating until we have converged OR generated the max number
101: of directions OR reached the max number of iterations for the method */
102: while (!ksp->reason && loc_it < max_k && ksp->its < ksp->max_it) {
103: if (loc_it) {
104: PetscCall(KSPLogResidualHistory(ksp, res_norm));
105: PetscCall(KSPMonitor(ksp, ksp->its, res_norm));
106: }
107: fgmres->it = (loc_it - 1);
109: /* see if more space is needed for work vectors */
110: if (fgmres->vv_allocated <= loc_it + VEC_OFFSET + 1) {
111: PetscCall(KSPFGMRESGetNewVectors(ksp, loc_it + 1));
112: /* (loc_it+1) is passed in as number of the first vector that should
113: be allocated */
114: }
116: /* CHANGE THE PRECONDITIONER? */
117: /* ModifyPC is the callback function that can be used to
118: change the PC or its attributes before its applied */
119: PetscCall((*fgmres->modifypc)(ksp, ksp->its, loc_it, res_norm, fgmres->modifyctx));
121: /* apply PRECONDITIONER to direction vector and store with
122: preconditioned vectors in prevec */
123: PetscCall(KSP_PCApply(ksp, VEC_VV(loc_it), PREVEC(loc_it)));
125: PetscCall(PCGetOperators(ksp->pc, &Amat, &Pmat));
126: /* Multiply preconditioned vector by operator - put in VEC_VV(loc_it+1) */
127: PetscCall(KSP_MatMult(ksp, Amat, PREVEC(loc_it), VEC_VV(1 + loc_it)));
129: /* update hessenberg matrix and do Gram-Schmidt - new direction is in
130: VEC_VV(1+loc_it)*/
131: PetscCall((*fgmres->orthog)(ksp, loc_it));
133: /* new entry in hessenburg is the 2-norm of our new direction */
134: PetscCall(VecNorm(VEC_VV(loc_it + 1), NORM_2, &tt));
135: KSPCheckNorm(ksp, tt);
137: *HH(loc_it + 1, loc_it) = tt;
138: *HES(loc_it + 1, loc_it) = tt;
140: /* Happy Breakdown Check */
141: hapbnd = PetscAbsScalar((tt) / *RS(loc_it));
142: /* RS(loc_it) contains the res_norm from the last iteration */
143: hapbnd = PetscMin(fgmres->haptol, hapbnd);
144: if (tt > hapbnd) {
145: /* scale new direction by its norm */
146: PetscCall(VecScale(VEC_VV(loc_it + 1), 1.0 / tt));
147: } else {
148: /* This happens when the solution is exactly reached. */
149: /* So there is no new direction... */
150: PetscCall(VecSet(VEC_TEMP, 0.0)); /* set VEC_TEMP to 0 */
151: hapend = PETSC_TRUE;
152: }
153: /* note that for FGMRES we could get HES(loc_it+1, loc_it) = 0 and the
154: current solution would not be exact if HES was singular. Note that
155: HH non-singular implies that HES is no singular, and HES is guaranteed
156: to be nonsingular when PREVECS are linearly independent and A is
157: nonsingular (in GMRES, the nonsingularity of A implies the nonsingularity
158: of HES). So we should really add a check to verify that HES is nonsingular.*/
160: /* Now apply rotations to new col of hessenberg (and right side of system),
161: calculate new rotation, and get new residual norm at the same time*/
162: PetscCall(KSPFGMRESUpdateHessenberg(ksp, loc_it, hapend, &res_norm));
163: if (ksp->reason) break;
165: loc_it++;
166: fgmres->it = (loc_it - 1); /* Add this here in case it has converged */
168: PetscCall(PetscObjectSAWsTakeAccess((PetscObject)ksp));
169: ksp->its++;
170: ksp->rnorm = res_norm;
171: PetscCall(PetscObjectSAWsGrantAccess((PetscObject)ksp));
173: PetscCall((*ksp->converged)(ksp, ksp->its, res_norm, &ksp->reason, ksp->cnvP));
175: /* Catch error in happy breakdown and signal convergence and break from loop */
176: if (hapend) {
177: if (!ksp->reason) {
178: PetscCheck(!ksp->errorifnotconverged, PetscObjectComm((PetscObject)ksp), PETSC_ERR_NOT_CONVERGED, "Reached happy break down, but convergence was not indicated. Residual norm = %g", (double)res_norm);
179: ksp->reason = KSP_DIVERGED_BREAKDOWN;
180: break;
181: }
182: }
183: }
184: /* END OF ITERATION LOOP */
186: /*
187: Monitor if we know that we will not return for a restart */
188: if (loc_it && (ksp->reason || ksp->its >= ksp->max_it)) {
189: PetscCall(KSPMonitor(ksp, ksp->its, res_norm));
190: PetscCall(KSPLogResidualHistory(ksp, res_norm));
191: }
193: if (itcount) *itcount = loc_it;
195: /*
196: Down here we have to solve for the "best" coefficients of the Krylov
197: columns, add the solution values together, and possibly unwind the
198: preconditioning from the solution
199: */
201: /* Form the solution (or the solution so far) */
202: /* Note: must pass in (loc_it-1) for iteration count so that KSPFGMRESBuildSoln
203: properly navigates */
205: PetscCall(KSPFGMRESBuildSoln(RS(0), ksp->vec_sol, ksp->vec_sol, ksp, loc_it - 1));
206: PetscFunctionReturn(PETSC_SUCCESS);
207: }
209: static PetscErrorCode KSPSolve_FGMRES(KSP ksp)
210: {
211: PetscInt cycle_its = 0; /* iterations done in a call to KSPFGMRESCycle */
212: KSP_FGMRES *fgmres = (KSP_FGMRES *)ksp->data;
213: PetscBool diagonalscale;
215: PetscFunctionBegin;
216: PetscCall(PCGetDiagonalScale(ksp->pc, &diagonalscale));
217: PetscCheck(!diagonalscale, PetscObjectComm((PetscObject)ksp), PETSC_ERR_SUP, "Krylov method %s does not support diagonal scaling", ((PetscObject)ksp)->type_name);
219: PetscCall(PetscObjectSAWsTakeAccess((PetscObject)ksp));
220: ksp->its = 0;
221: PetscCall(PetscObjectSAWsGrantAccess((PetscObject)ksp));
223: /* Compute the initial (NOT preconditioned) residual */
224: if (!ksp->guess_zero) {
225: PetscCall(KSPFGMRESResidual(ksp));
226: } else { /* guess is 0 so residual is F (which is in ksp->vec_rhs) */
227: PetscCall(VecCopy(ksp->vec_rhs, VEC_VV(0)));
228: }
229: /* This may be true only on a subset of MPI ranks; setting it here so it will be detected by the first norm computation in the Krylov method */
230: if (ksp->reason == KSP_DIVERGED_PC_FAILED) PetscCall(VecSetInf(VEC_VV(0)));
232: /* now the residual is in VEC_VV(0) - which is what
233: KSPFGMRESCycle expects... */
235: PetscCall(KSPFGMRESCycle(&cycle_its, ksp));
236: while (!ksp->reason) {
237: PetscCall(KSPFGMRESResidual(ksp));
238: if (ksp->its >= ksp->max_it) break;
239: PetscCall(KSPFGMRESCycle(&cycle_its, ksp));
240: }
241: /* mark lack of convergence */
242: if (ksp->its >= ksp->max_it && !ksp->reason) ksp->reason = KSP_DIVERGED_ITS;
243: PetscFunctionReturn(PETSC_SUCCESS);
244: }
246: extern PetscErrorCode KSPReset_FGMRES(KSP);
248: static PetscErrorCode KSPDestroy_FGMRES(KSP ksp)
249: {
250: PetscFunctionBegin;
251: PetscCall(KSPReset_FGMRES(ksp));
252: PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPFGMRESSetModifyPC_C", NULL));
253: PetscCall(KSPDestroy_GMRES(ksp));
254: PetscFunctionReturn(PETSC_SUCCESS);
255: }
257: static PetscErrorCode KSPFGMRESBuildSoln(PetscScalar *nrs, Vec vguess, Vec vdest, KSP ksp, PetscInt it)
258: {
259: PetscScalar tt;
260: PetscInt ii, k, j;
261: KSP_FGMRES *fgmres = (KSP_FGMRES *)(ksp->data);
263: PetscFunctionBegin;
264: /* Solve for solution vector that minimizes the residual */
266: /* If it is < 0, no fgmres steps have been performed */
267: if (it < 0) {
268: PetscCall(VecCopy(vguess, vdest)); /* VecCopy() is smart, exists immediately if vguess == vdest */
269: PetscFunctionReturn(PETSC_SUCCESS);
270: }
272: /* so fgmres steps HAVE been performed */
274: /* solve the upper triangular system - RS is the right side and HH is
275: the upper triangular matrix - put soln in nrs */
276: if (*HH(it, it) != 0.0) {
277: nrs[it] = *RS(it) / *HH(it, it);
278: } else {
279: nrs[it] = 0.0;
280: }
281: for (ii = 1; ii <= it; ii++) {
282: k = it - ii;
283: tt = *RS(k);
284: for (j = k + 1; j <= it; j++) tt = tt - *HH(k, j) * nrs[j];
285: nrs[k] = tt / *HH(k, k);
286: }
288: /* Accumulate the correction to the soln of the preconditioned prob. in
289: VEC_TEMP - note that we use the preconditioned vectors */
290: PetscCall(VecMAXPBY(VEC_TEMP, it + 1, nrs, 0, &PREVEC(0)));
292: /* put updated solution into vdest.*/
293: if (vdest != vguess) {
294: PetscCall(VecCopy(VEC_TEMP, vdest));
295: PetscCall(VecAXPY(vdest, 1.0, vguess));
296: } else { /* replace guess with solution */
297: PetscCall(VecAXPY(vdest, 1.0, VEC_TEMP));
298: }
299: PetscFunctionReturn(PETSC_SUCCESS);
300: }
302: static PetscErrorCode KSPFGMRESUpdateHessenberg(KSP ksp, PetscInt it, PetscBool hapend, PetscReal *res)
303: {
304: PetscScalar *hh, *cc, *ss, tt;
305: PetscInt j;
306: KSP_FGMRES *fgmres = (KSP_FGMRES *)(ksp->data);
308: PetscFunctionBegin;
309: hh = HH(0, it); /* pointer to beginning of column to update - so
310: incrementing hh "steps down" the (it+1)th col of HH*/
311: cc = CC(0); /* beginning of cosine rotations */
312: ss = SS(0); /* beginning of sine rotations */
314: /* Apply all the previously computed plane rotations to the new column
315: of the Hessenberg matrix */
316: /* Note: this uses the rotation [conj(c) s ; -s c], c= cos(theta), s= sin(theta),
317: and some refs have [c s ; -conj(s) c] (don't be confused!) */
319: for (j = 1; j <= it; j++) {
320: tt = *hh;
321: *hh = PetscConj(*cc) * tt + *ss * *(hh + 1);
322: hh++;
323: *hh = *cc++ * *hh - (*ss++ * tt);
324: /* hh, cc, and ss have all been incremented one by end of loop */
325: }
327: /*
328: compute the new plane rotation, and apply it to:
329: 1) the right-hand-side of the Hessenberg system (RS)
330: note: it affects RS(it) and RS(it+1)
331: 2) the new column of the Hessenberg matrix
332: note: it affects HH(it,it) which is currently pointed to
333: by hh and HH(it+1, it) (*(hh+1))
334: thus obtaining the updated value of the residual...
335: */
337: /* compute new plane rotation */
339: if (!hapend) {
340: tt = PetscSqrtScalar(PetscConj(*hh) * *hh + PetscConj(*(hh + 1)) * *(hh + 1));
341: if (tt == 0.0) {
342: ksp->reason = KSP_DIVERGED_NULL;
343: PetscFunctionReturn(PETSC_SUCCESS);
344: }
346: *cc = *hh / tt; /* new cosine value */
347: *ss = *(hh + 1) / tt; /* new sine value */
349: /* apply to 1) and 2) */
350: *RS(it + 1) = -(*ss * *RS(it));
351: *RS(it) = PetscConj(*cc) * *RS(it);
352: *hh = PetscConj(*cc) * *hh + *ss * *(hh + 1);
354: /* residual is the last element (it+1) of right-hand side! */
355: *res = PetscAbsScalar(*RS(it + 1));
357: } else { /* happy breakdown: HH(it+1, it) = 0, therefore we don't need to apply
358: another rotation matrix (so RH doesn't change). The new residual is
359: always the new sine term times the residual from last time (RS(it)),
360: but now the new sine rotation would be zero...so the residual should
361: be zero...so we will multiply "zero" by the last residual. This might
362: not be exactly what we want to do here -could just return "zero". */
364: *res = 0.0;
365: }
366: PetscFunctionReturn(PETSC_SUCCESS);
367: }
369: static PetscErrorCode KSPFGMRESGetNewVectors(KSP ksp, PetscInt it)
370: {
371: KSP_FGMRES *fgmres = (KSP_FGMRES *)ksp->data;
372: PetscInt nwork = fgmres->nwork_alloc; /* number of work vector chunks allocated */
373: PetscInt nalloc; /* number to allocate */
374: PetscInt k;
376: PetscFunctionBegin;
377: nalloc = fgmres->delta_allocate; /* number of vectors to allocate
378: in a single chunk */
380: /* Adjust the number to allocate to make sure that we don't exceed the
381: number of available slots (fgmres->vecs_allocated)*/
382: if (it + VEC_OFFSET + nalloc >= fgmres->vecs_allocated) nalloc = fgmres->vecs_allocated - it - VEC_OFFSET;
383: if (!nalloc) PetscFunctionReturn(PETSC_SUCCESS);
385: fgmres->vv_allocated += nalloc; /* vv_allocated is the number of vectors allocated */
387: /* work vectors */
388: PetscCall(KSPCreateVecs(ksp, nalloc, &fgmres->user_work[nwork], 0, NULL));
389: for (k = 0; k < nalloc; k++) fgmres->vecs[it + VEC_OFFSET + k] = fgmres->user_work[nwork][k];
390: /* specify size of chunk allocated */
391: fgmres->mwork_alloc[nwork] = nalloc;
393: /* preconditioned vectors */
394: PetscCall(KSPCreateVecs(ksp, nalloc, &fgmres->prevecs_user_work[nwork], 0, NULL));
395: for (k = 0; k < nalloc; k++) fgmres->prevecs[it + k] = fgmres->prevecs_user_work[nwork][k];
397: /* increment the number of work vector chunks */
398: fgmres->nwork_alloc++;
399: PetscFunctionReturn(PETSC_SUCCESS);
400: }
402: static PetscErrorCode KSPBuildSolution_FGMRES(KSP ksp, Vec ptr, Vec *result)
403: {
404: KSP_FGMRES *fgmres = (KSP_FGMRES *)ksp->data;
406: PetscFunctionBegin;
407: if (!ptr) {
408: if (!fgmres->sol_temp) PetscCall(VecDuplicate(ksp->vec_sol, &fgmres->sol_temp));
409: ptr = fgmres->sol_temp;
410: }
411: if (!fgmres->nrs) {
412: /* allocate the work area */
413: PetscCall(PetscMalloc1(fgmres->max_k, &fgmres->nrs));
414: }
416: PetscCall(KSPFGMRESBuildSoln(fgmres->nrs, ksp->vec_sol, ptr, ksp, fgmres->it));
417: if (result) *result = ptr;
418: PetscFunctionReturn(PETSC_SUCCESS);
419: }
421: static PetscErrorCode KSPSetFromOptions_FGMRES(KSP ksp, PetscOptionItems *PetscOptionsObject)
422: {
423: PetscBool flg;
425: PetscFunctionBegin;
426: PetscCall(KSPSetFromOptions_GMRES(ksp, PetscOptionsObject));
427: PetscOptionsHeadBegin(PetscOptionsObject, "KSP flexible GMRES Options");
428: PetscCall(PetscOptionsBoolGroupBegin("-ksp_fgmres_modifypcnochange", "do not vary the preconditioner", "KSPFGMRESSetModifyPC", &flg));
429: if (flg) PetscCall(KSPFGMRESSetModifyPC(ksp, KSPFGMRESModifyPCNoChange, NULL, NULL));
430: PetscCall(PetscOptionsBoolGroupEnd("-ksp_fgmres_modifypcksp", "vary the KSP based preconditioner", "KSPFGMRESSetModifyPC", &flg));
431: if (flg) PetscCall(KSPFGMRESSetModifyPC(ksp, KSPFGMRESModifyPCKSP, NULL, NULL));
432: PetscOptionsHeadEnd();
433: PetscFunctionReturn(PETSC_SUCCESS);
434: }
436: typedef PetscErrorCode (*FCN1)(KSP, PetscInt, PetscInt, PetscReal, void *); /* force argument to next function to not be extern C*/
437: typedef PetscErrorCode (*FCN2)(void *);
439: static PetscErrorCode KSPFGMRESSetModifyPC_FGMRES(KSP ksp, FCN1 fcn, void *ctx, FCN2 d)
440: {
441: PetscFunctionBegin;
443: ((KSP_FGMRES *)ksp->data)->modifypc = fcn;
444: ((KSP_FGMRES *)ksp->data)->modifydestroy = d;
445: ((KSP_FGMRES *)ksp->data)->modifyctx = ctx;
446: PetscFunctionReturn(PETSC_SUCCESS);
447: }
449: PetscErrorCode KSPReset_FGMRES(KSP ksp)
450: {
451: KSP_FGMRES *fgmres = (KSP_FGMRES *)ksp->data;
452: PetscInt i;
454: PetscFunctionBegin;
455: PetscCall(PetscFree(fgmres->prevecs));
456: if (fgmres->nwork_alloc > 0) {
457: i = 0;
458: /* In the first allocation we allocated VEC_OFFSET fewer vectors in prevecs */
459: PetscCall(VecDestroyVecs(fgmres->mwork_alloc[i] - VEC_OFFSET, &fgmres->prevecs_user_work[i]));
460: for (i = 1; i < fgmres->nwork_alloc; i++) PetscCall(VecDestroyVecs(fgmres->mwork_alloc[i], &fgmres->prevecs_user_work[i]));
461: }
462: PetscCall(PetscFree(fgmres->prevecs_user_work));
463: if (fgmres->modifydestroy) PetscCall((*fgmres->modifydestroy)(fgmres->modifyctx));
464: PetscCall(KSPReset_GMRES(ksp));
465: PetscFunctionReturn(PETSC_SUCCESS);
466: }
468: static PetscErrorCode KSPGMRESSetRestart_FGMRES(KSP ksp, PetscInt max_k)
469: {
470: KSP_FGMRES *gmres = (KSP_FGMRES *)ksp->data;
472: PetscFunctionBegin;
473: PetscCheck(max_k >= 1, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Restart must be positive");
474: if (!ksp->setupstage) {
475: gmres->max_k = max_k;
476: } else if (gmres->max_k != max_k) {
477: gmres->max_k = max_k;
478: ksp->setupstage = KSP_SETUP_NEW;
479: /* free the data structures, then create them again */
480: PetscCall(KSPReset_FGMRES(ksp));
481: }
482: PetscFunctionReturn(PETSC_SUCCESS);
483: }
485: static PetscErrorCode KSPGMRESGetRestart_FGMRES(KSP ksp, PetscInt *max_k)
486: {
487: KSP_FGMRES *gmres = (KSP_FGMRES *)ksp->data;
489: PetscFunctionBegin;
490: *max_k = gmres->max_k;
491: PetscFunctionReturn(PETSC_SUCCESS);
492: }
494: /*MC
495: KSPFGMRES - Implements the Flexible Generalized Minimal Residual method. [](sec_flexibleksp)
497: Options Database Keys:
498: + -ksp_gmres_restart <restart> - the number of Krylov directions to orthogonalize against
499: . -ksp_gmres_haptol <tol> - sets the tolerance for "happy ending" (exact convergence)
500: . -ksp_gmres_preallocate - preallocate all the Krylov search directions initially (otherwise groups of vectors are allocated as needed)
501: . -ksp_gmres_classicalgramschmidt - use classical (unmodified) Gram-Schmidt to orthogonalize against the Krylov space (fast) (the default)
502: . -ksp_gmres_modifiedgramschmidt - use modified Gram-Schmidt in the orthogonalization (more stable, but slower)
503: . -ksp_gmres_cgs_refinement_type <refine_never,refine_ifneeded,refine_always> - determine if iterative refinement is used to increase the
504: stability of the classical Gram-Schmidt orthogonalization.
505: . -ksp_gmres_krylov_monitor - plot the Krylov space generated
506: . -ksp_fgmres_modifypcnochange - do not change the preconditioner between iterations
507: - -ksp_fgmres_modifypcksp - modify the preconditioner using `KSPFGMRESModifyPCKSP()`
509: Level: beginner
511: Notes:
512: See `KSPFGMRESSetModifyPC()` for how to vary the preconditioner between iterations
514: Only right preconditioning is supported.
516: The following options `-ksp_type fgmres -pc_type ksp -ksp_ksp_type bcgs -ksp_view -ksp_pc_type jacobi` make the preconditioner (or inner solver)
517: be bi-CG-stab with a preconditioner of `PCJACOBI`
519: Developer Note:
520: This object is subclassed off of `KSPGMRES`, see the source code in src/ksp/ksp/impls/gmres for comments on the structure of the code
522: Contributed by:
523: Allison Baker
525: .seealso: [](ch_ksp), [](sec_flexibleksp), `KSPCreate()`, `KSPSetType()`, `KSPType`, `KSP`, `KSPGMRES`, `KSPLGMRES`,
526: `KSPGMRESSetRestart()`, `KSPGMRESSetHapTol()`, `KSPGMRESSetPreAllocateVectors()`, `KSPGMRESSetOrthogonalization()`, `KSPGMRESGetOrthogonalization()`,
527: `KSPGMRESClassicalGramSchmidtOrthogonalization()`, `KSPGMRESModifiedGramSchmidtOrthogonalization()`,
528: `KSPGMRESCGSRefinementType`, `KSPGMRESSetCGSRefinementType()`, `KSPGMRESGetCGSRefinementType()`, `KSPGMRESMonitorKrylov()`, `KSPFGMRESSetModifyPC()`,
529: `KSPFGMRESModifyPCKSP()`
530: M*/
532: PETSC_EXTERN PetscErrorCode KSPCreate_FGMRES(KSP ksp)
533: {
534: KSP_FGMRES *fgmres;
536: PetscFunctionBegin;
537: PetscCall(PetscNew(&fgmres));
539: ksp->data = (void *)fgmres;
540: ksp->ops->buildsolution = KSPBuildSolution_FGMRES;
541: ksp->ops->setup = KSPSetUp_FGMRES;
542: ksp->ops->solve = KSPSolve_FGMRES;
543: ksp->ops->reset = KSPReset_FGMRES;
544: ksp->ops->destroy = KSPDestroy_FGMRES;
545: ksp->ops->view = KSPView_GMRES;
546: ksp->ops->setfromoptions = KSPSetFromOptions_FGMRES;
547: ksp->ops->computeextremesingularvalues = KSPComputeExtremeSingularValues_GMRES;
548: ksp->ops->computeeigenvalues = KSPComputeEigenvalues_GMRES;
550: PetscCall(KSPSetSupportedNorm(ksp, KSP_NORM_UNPRECONDITIONED, PC_RIGHT, 3));
551: PetscCall(KSPSetSupportedNorm(ksp, KSP_NORM_NONE, PC_RIGHT, 1));
553: PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPGMRESSetPreAllocateVectors_C", KSPGMRESSetPreAllocateVectors_GMRES));
554: PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPGMRESSetOrthogonalization_C", KSPGMRESSetOrthogonalization_GMRES));
555: PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPGMRESGetOrthogonalization_C", KSPGMRESGetOrthogonalization_GMRES));
556: PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPGMRESSetRestart_C", KSPGMRESSetRestart_FGMRES));
557: PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPGMRESGetRestart_C", KSPGMRESGetRestart_FGMRES));
558: PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPFGMRESSetModifyPC_C", KSPFGMRESSetModifyPC_FGMRES));
559: PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPGMRESSetCGSRefinementType_C", KSPGMRESSetCGSRefinementType_GMRES));
560: PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPGMRESGetCGSRefinementType_C", KSPGMRESGetCGSRefinementType_GMRES));
562: fgmres->haptol = 1.0e-30;
563: fgmres->q_preallocate = 0;
564: fgmres->delta_allocate = FGMRES_DELTA_DIRECTIONS;
565: fgmres->orthog = KSPGMRESClassicalGramSchmidtOrthogonalization;
566: fgmres->nrs = NULL;
567: fgmres->sol_temp = NULL;
568: fgmres->max_k = FGMRES_DEFAULT_MAXK;
569: fgmres->Rsvd = NULL;
570: fgmres->orthogwork = NULL;
571: fgmres->modifypc = KSPFGMRESModifyPCNoChange;
572: fgmres->modifyctx = NULL;
573: fgmres->modifydestroy = NULL;
574: fgmres->cgstype = KSP_GMRES_CGS_REFINE_NEVER;
575: PetscFunctionReturn(PETSC_SUCCESS);
576: }