Actual source code: bnk.c

  1: #include <petsctaolinesearch.h>
  2: #include <../src/tao/bound/impls/bnk/bnk.h>
  3: #include <petscksp.h>

  5: static const char *BNK_INIT[64] = {"constant", "direction", "interpolation"};
  6: static const char *BNK_UPDATE[64] = {"step", "reduction", "interpolation"};
  7: static const char *BNK_AS[64] = {"none", "bertsekas"};

  9: /*------------------------------------------------------------*/

 11: /* Routine for initializing the KSP solver, the BFGS preconditioner, and the initial trust radius estimation */

 13: PetscErrorCode TaoBNKInitialize(Tao tao, PetscInt initType, PetscBool *needH)
 14: {
 15:   TAO_BNK           *bnk = (TAO_BNK *)tao->data;
 16:   PC                pc;
 17:   PetscReal         f_min, ftrial, prered, actred, kappa, sigma, resnorm;
 18:   PetscReal         tau, tau_1, tau_2, tau_max, tau_min, max_radius;
 19:   PetscBool         is_bfgs, is_jacobi, is_symmetric, sym_set;
 20:   PetscInt          n, N, nDiff;
 21:   PetscInt          i_max = 5;
 22:   PetscInt          j_max = 1;
 23:   PetscInt          i, j;
 24:   PetscVoidFunction kspTR;

 26:   /* Project the current point onto the feasible set */
 27:   TaoComputeVariableBounds(tao);
 28:   TaoSetVariableBounds(bnk->bncg, tao->XL, tao->XU);
 29:   if (tao->bounded) {
 30:     TaoLineSearchSetVariableBounds(tao->linesearch,tao->XL,tao->XU);
 31:   }

 33:   /* Project the initial point onto the feasible region */
 34:   TaoBoundSolution(tao->solution, tao->XL,tao->XU, 0.0, &nDiff, tao->solution);

 36:   /* Check convergence criteria */
 37:   TaoComputeObjectiveAndGradient(tao, tao->solution, &bnk->f, bnk->unprojected_gradient);
 38:   TaoBNKEstimateActiveSet(tao, bnk->as_type);
 39:   VecCopy(bnk->unprojected_gradient, tao->gradient);
 40:   VecISSet(tao->gradient, bnk->active_idx, 0.0);
 41:   TaoGradientNorm(tao, tao->gradient, NORM_2, &bnk->gnorm);

 43:   /* Test the initial point for convergence */
 44:   VecFischer(tao->solution, bnk->unprojected_gradient, tao->XL, tao->XU, bnk->W);
 45:   VecNorm(bnk->W, NORM_2, &resnorm);
 47:   TaoLogConvergenceHistory(tao,bnk->f,resnorm,0.0,tao->ksp_its);
 48:   TaoMonitor(tao,tao->niter,bnk->f,resnorm,0.0,1.0);
 49:   (*tao->ops->convergencetest)(tao,tao->cnvP);
 50:   if (tao->reason != TAO_CONTINUE_ITERATING) return 0;

 52:   /* Reset KSP stopping reason counters */
 53:   bnk->ksp_atol = 0;
 54:   bnk->ksp_rtol = 0;
 55:   bnk->ksp_dtol = 0;
 56:   bnk->ksp_ctol = 0;
 57:   bnk->ksp_negc = 0;
 58:   bnk->ksp_iter = 0;
 59:   bnk->ksp_othr = 0;

 61:   /* Reset accepted step type counters */
 62:   bnk->tot_cg_its = 0;
 63:   bnk->newt = 0;
 64:   bnk->bfgs = 0;
 65:   bnk->sgrad = 0;
 66:   bnk->grad = 0;

 68:   /* Initialize the Hessian perturbation */
 69:   bnk->pert = bnk->sval;

 71:   /* Reset initial steplength to zero (this helps BNCG reset its direction internally) */
 72:   VecSet(tao->stepdirection, 0.0);

 74:   /* Allocate the vectors needed for the BFGS approximation */
 75:   KSPGetPC(tao->ksp, &pc);
 76:   PetscObjectTypeCompare((PetscObject)pc, PCLMVM, &is_bfgs);
 77:   PetscObjectTypeCompare((PetscObject)pc, PCJACOBI, &is_jacobi);
 78:   if (is_bfgs) {
 79:     bnk->bfgs_pre = pc;
 80:     PCLMVMGetMatLMVM(bnk->bfgs_pre, &bnk->M);
 81:     VecGetLocalSize(tao->solution, &n);
 82:     VecGetSize(tao->solution, &N);
 83:     MatSetSizes(bnk->M, n, n, N, N);
 84:     MatLMVMAllocate(bnk->M, tao->solution, bnk->unprojected_gradient);
 85:     MatIsSymmetricKnown(bnk->M, &sym_set, &is_symmetric);
 87:   } else if (is_jacobi) {
 88:     PCJacobiSetUseAbs(pc,PETSC_TRUE);
 89:   }

 91:   /* Prepare the min/max vectors for safeguarding diagonal scales */
 92:   VecSet(bnk->Diag_min, bnk->dmin);
 93:   VecSet(bnk->Diag_max, bnk->dmax);

 95:   /* Initialize trust-region radius.  The initialization is only performed
 96:      when we are using Nash, Steihaug-Toint or the Generalized Lanczos method. */
 97:   *needH = PETSC_TRUE;
 98:   PetscObjectQueryFunction((PetscObject)tao->ksp,"KSPCGSetRadius_C",&kspTR);
 99:   if (kspTR) {
100:     switch (initType) {
101:     case BNK_INIT_CONSTANT:
102:       /* Use the initial radius specified */
103:       tao->trust = tao->trust0;
104:       break;

106:     case BNK_INIT_INTERPOLATION:
107:       /* Use interpolation based on the initial Hessian */
108:       max_radius = 0.0;
109:       tao->trust = tao->trust0;
110:       for (j = 0; j < j_max; ++j) {
111:         f_min = bnk->f;
112:         sigma = 0.0;

114:         if (*needH) {
115:           /* Compute the Hessian at the new step, and extract the inactive subsystem */
116:           (*bnk->computehessian)(tao);
117:           TaoBNKEstimateActiveSet(tao, BNK_AS_NONE);
118:           MatDestroy(&bnk->H_inactive);
119:           if (bnk->active_idx) {
120:             MatCreateSubMatrix(tao->hessian, bnk->inactive_idx, bnk->inactive_idx, MAT_INITIAL_MATRIX, &bnk->H_inactive);
121:           } else {
122:             PetscObjectReference((PetscObject)tao->hessian);
123:             bnk->H_inactive = tao->hessian;
124:           }
125:           *needH = PETSC_FALSE;
126:         }

128:         for (i = 0; i < i_max; ++i) {
129:           /* Take a steepest descent step and snap it to bounds */
130:           VecCopy(tao->solution, bnk->Xold);
131:           VecAXPY(tao->solution, -tao->trust/bnk->gnorm, tao->gradient);
132:           TaoBoundSolution(tao->solution, tao->XL,tao->XU, 0.0, &nDiff, tao->solution);
133:           /* Compute the step we actually accepted */
134:           VecCopy(tao->solution, bnk->W);
135:           VecAXPY(bnk->W, -1.0, bnk->Xold);
136:           /* Compute the objective at the trial */
137:           TaoComputeObjective(tao, tao->solution, &ftrial);
139:           VecCopy(bnk->Xold, tao->solution);
140:           if (PetscIsInfOrNanReal(ftrial)) {
141:             tau = bnk->gamma1_i;
142:           } else {
143:             if (ftrial < f_min) {
144:               f_min = ftrial;
145:               sigma = -tao->trust / bnk->gnorm;
146:             }

148:             /* Compute the predicted and actual reduction */
149:             if (bnk->active_idx) {
150:               VecGetSubVector(bnk->W, bnk->inactive_idx, &bnk->X_inactive);
151:               VecGetSubVector(bnk->Xwork, bnk->inactive_idx, &bnk->inactive_work);
152:             } else {
153:               bnk->X_inactive = bnk->W;
154:               bnk->inactive_work = bnk->Xwork;
155:             }
156:             MatMult(bnk->H_inactive, bnk->X_inactive, bnk->inactive_work);
157:             VecDot(bnk->X_inactive, bnk->inactive_work, &prered);
158:             if (bnk->active_idx) {
159:               VecRestoreSubVector(bnk->W, bnk->inactive_idx, &bnk->X_inactive);
160:               VecRestoreSubVector(bnk->Xwork, bnk->inactive_idx, &bnk->inactive_work);
161:             }
162:             prered = tao->trust * (bnk->gnorm - 0.5 * tao->trust * prered / (bnk->gnorm * bnk->gnorm));
163:             actred = bnk->f - ftrial;
164:             if ((PetscAbsScalar(actred) <= bnk->epsilon) && (PetscAbsScalar(prered) <= bnk->epsilon)) {
165:               kappa = 1.0;
166:             } else {
167:               kappa = actred / prered;
168:             }

170:             tau_1 = bnk->theta_i * bnk->gnorm * tao->trust / (bnk->theta_i * bnk->gnorm * tao->trust + (1.0 - bnk->theta_i) * prered - actred);
171:             tau_2 = bnk->theta_i * bnk->gnorm * tao->trust / (bnk->theta_i * bnk->gnorm * tao->trust - (1.0 + bnk->theta_i) * prered + actred);
172:             tau_min = PetscMin(tau_1, tau_2);
173:             tau_max = PetscMax(tau_1, tau_2);

175:             if (PetscAbsScalar(kappa - (PetscReal)1.0) <= bnk->mu1_i) {
176:               /*  Great agreement */
177:               max_radius = PetscMax(max_radius, tao->trust);

179:               if (tau_max < 1.0) {
180:                 tau = bnk->gamma3_i;
181:               } else if (tau_max > bnk->gamma4_i) {
182:                 tau = bnk->gamma4_i;
183:               } else {
184:                 tau = tau_max;
185:               }
186:             } else if (PetscAbsScalar(kappa - (PetscReal)1.0) <= bnk->mu2_i) {
187:               /*  Good agreement */
188:               max_radius = PetscMax(max_radius, tao->trust);

190:               if (tau_max < bnk->gamma2_i) {
191:                 tau = bnk->gamma2_i;
192:               } else if (tau_max > bnk->gamma3_i) {
193:                 tau = bnk->gamma3_i;
194:               } else {
195:                 tau = tau_max;
196:               }
197:             } else {
198:               /*  Not good agreement */
199:               if (tau_min > 1.0) {
200:                 tau = bnk->gamma2_i;
201:               } else if (tau_max < bnk->gamma1_i) {
202:                 tau = bnk->gamma1_i;
203:               } else if ((tau_min < bnk->gamma1_i) && (tau_max >= 1.0)) {
204:                 tau = bnk->gamma1_i;
205:               } else if ((tau_1 >= bnk->gamma1_i) && (tau_1 < 1.0) && ((tau_2 < bnk->gamma1_i) || (tau_2 >= 1.0))) {
206:                 tau = tau_1;
207:               } else if ((tau_2 >= bnk->gamma1_i) && (tau_2 < 1.0) && ((tau_1 < bnk->gamma1_i) || (tau_2 >= 1.0))) {
208:                 tau = tau_2;
209:               } else {
210:                 tau = tau_max;
211:               }
212:             }
213:           }
214:           tao->trust = tau * tao->trust;
215:         }

217:         if (f_min < bnk->f) {
218:           /* We accidentally found a solution better than the initial, so accept it */
219:           bnk->f = f_min;
220:           VecCopy(tao->solution, bnk->Xold);
221:           VecAXPY(tao->solution,sigma,tao->gradient);
222:           TaoBoundSolution(tao->solution, tao->XL,tao->XU, 0.0, &nDiff, tao->solution);
223:           VecCopy(tao->solution, tao->stepdirection);
224:           VecAXPY(tao->stepdirection, -1.0, bnk->Xold);
225:           TaoComputeGradient(tao,tao->solution,bnk->unprojected_gradient);
226:           TaoBNKEstimateActiveSet(tao, bnk->as_type);
227:           VecCopy(bnk->unprojected_gradient, tao->gradient);
228:           VecISSet(tao->gradient, bnk->active_idx, 0.0);
229:           /* Compute gradient at the new iterate and flip switch to compute the Hessian later */
230:           TaoGradientNorm(tao, tao->gradient, NORM_2, &bnk->gnorm);
231:           *needH = PETSC_TRUE;
232:           /* Test the new step for convergence */
233:           VecFischer(tao->solution, bnk->unprojected_gradient, tao->XL, tao->XU, bnk->W);
234:           VecNorm(bnk->W, NORM_2, &resnorm);
236:           TaoLogConvergenceHistory(tao,bnk->f,resnorm,0.0,tao->ksp_its);
237:           TaoMonitor(tao,tao->niter,bnk->f,resnorm,0.0,1.0);
238:           (*tao->ops->convergencetest)(tao,tao->cnvP);
239:           if (tao->reason != TAO_CONTINUE_ITERATING) return 0;
240:           /* active BNCG recycling early because we have a stepdirection computed */
241:           TaoSetRecycleHistory(bnk->bncg, PETSC_TRUE);
242:         }
243:       }
244:       tao->trust = PetscMax(tao->trust, max_radius);

246:       /* Ensure that the trust radius is within the limits */
247:       tao->trust = PetscMax(tao->trust, bnk->min_radius);
248:       tao->trust = PetscMin(tao->trust, bnk->max_radius);
249:       break;

251:     default:
252:       /* Norm of the first direction will initialize radius */
253:       tao->trust = 0.0;
254:       break;
255:     }
256:   }
257:   return 0;
258: }

260: /*------------------------------------------------------------*/

262: /* Routine for computing the exact Hessian and preparing the preconditioner at the new iterate */

264: PetscErrorCode TaoBNKComputeHessian(Tao tao)
265: {
266:   TAO_BNK        *bnk = (TAO_BNK *)tao->data;

268:   /* Compute the Hessian */
269:   TaoComputeHessian(tao,tao->solution,tao->hessian,tao->hessian_pre);
270:   /* Add a correction to the BFGS preconditioner */
271:   if (bnk->M) {
272:     MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);
273:   }
274:   /* Prepare the reduced sub-matrices for the inactive set */
275:   MatDestroy(&bnk->Hpre_inactive);
276:   MatDestroy(&bnk->H_inactive);
277:   if (bnk->active_idx) {
278:     MatCreateSubMatrix(tao->hessian, bnk->inactive_idx, bnk->inactive_idx, MAT_INITIAL_MATRIX, &bnk->H_inactive);
279:     if (tao->hessian == tao->hessian_pre) {
280:       PetscObjectReference((PetscObject)bnk->H_inactive);
281:       bnk->Hpre_inactive = bnk->H_inactive;
282:     } else {
283:       MatCreateSubMatrix(tao->hessian_pre, bnk->inactive_idx, bnk->inactive_idx, MAT_INITIAL_MATRIX, &bnk->Hpre_inactive);
284:     }
285:     if (bnk->bfgs_pre) {
286:       PCLMVMSetIS(bnk->bfgs_pre, bnk->inactive_idx);
287:     }
288:   } else {
289:     PetscObjectReference((PetscObject)tao->hessian);
290:     bnk->H_inactive = tao->hessian;
291:     if (tao->hessian == tao->hessian_pre) {
292:       PetscObjectReference((PetscObject)bnk->H_inactive);
293:       bnk->Hpre_inactive = bnk->H_inactive;
294:     } else {
295:       PetscObjectReference((PetscObject)tao->hessian_pre);
296:       bnk->Hpre_inactive = tao->hessian_pre;
297:     }
298:     if (bnk->bfgs_pre) {
299:       PCLMVMClearIS(bnk->bfgs_pre);
300:     }
301:   }
302:   return 0;
303: }

305: /*------------------------------------------------------------*/

307: /* Routine for estimating the active set */

309: PetscErrorCode TaoBNKEstimateActiveSet(Tao tao, PetscInt asType)
310: {
312:   TAO_BNK        *bnk = (TAO_BNK *)tao->data;
313:   PetscBool      hessComputed, diagExists;

315:   switch (asType) {
316:   case BNK_AS_NONE:
317:     ISDestroy(&bnk->inactive_idx);
318:     VecWhichInactive(tao->XL, tao->solution, bnk->unprojected_gradient, tao->XU, PETSC_TRUE, &bnk->inactive_idx);
319:     ISDestroy(&bnk->active_idx);
320:     ISComplementVec(bnk->inactive_idx, tao->solution, &bnk->active_idx);
321:     break;

323:   case BNK_AS_BERTSEKAS:
324:     /* Compute the trial step vector with which we will estimate the active set at the next iteration */
325:     if (bnk->M) {
326:       /* If the BFGS preconditioner matrix is available, we will construct a trial step with it */
327:       MatSolve(bnk->M, bnk->unprojected_gradient, bnk->W);
328:     } else {
329:       hessComputed = diagExists = PETSC_FALSE;
330:       if (tao->hessian) {
331:         MatAssembled(tao->hessian, &hessComputed);
332:       }
333:       if (hessComputed) {
334:         MatHasOperation(tao->hessian, MATOP_GET_DIAGONAL, &diagExists);
335:       }
336:       if (diagExists) {
337:         /* BFGS preconditioner doesn't exist so let's invert the absolute diagonal of the Hessian instead onto the gradient */
338:         MatGetDiagonal(tao->hessian, bnk->Xwork);
339:         VecAbs(bnk->Xwork);
340:         VecMedian(bnk->Diag_min, bnk->Xwork, bnk->Diag_max, bnk->Xwork);
341:         VecReciprocal(bnk->Xwork);
342:         VecPointwiseMult(bnk->W, bnk->Xwork, bnk->unprojected_gradient);
343:       } else {
344:         /* If the Hessian or its diagonal does not exist, we will simply use gradient step */
345:         VecCopy(bnk->unprojected_gradient, bnk->W);
346:       }
347:     }
348:     VecScale(bnk->W, -1.0);
349:     TaoEstimateActiveBounds(tao->solution, tao->XL, tao->XU, bnk->unprojected_gradient, bnk->W, bnk->Xwork, bnk->as_step, &bnk->as_tol,
350:                                    &bnk->active_lower, &bnk->active_upper, &bnk->active_fixed, &bnk->active_idx, &bnk->inactive_idx);
351:     break;

353:   default:
354:     break;
355:   }
356:   return 0;
357: }

359: /*------------------------------------------------------------*/

361: /* Routine for bounding the step direction */

363: PetscErrorCode TaoBNKBoundStep(Tao tao, PetscInt asType, Vec step)
364: {
365:   TAO_BNK                      *bnk = (TAO_BNK *)tao->data;

367:   switch (asType) {
368:   case BNK_AS_NONE:
369:     VecISSet(step, bnk->active_idx, 0.0);
370:     break;

372:   case BNK_AS_BERTSEKAS:
373:     TaoBoundStep(tao->solution, tao->XL, tao->XU, bnk->active_lower, bnk->active_upper, bnk->active_fixed, 1.0, step);
374:     break;

376:   default:
377:     break;
378:   }
379:   return 0;
380: }

382: /*------------------------------------------------------------*/

384: /* Routine for taking a finite number of BNCG iterations to
385:    accelerate Newton convergence.

387:    In practice, this approach simply trades off Hessian evaluations
388:    for more gradient evaluations.
389: */

391: PetscErrorCode TaoBNKTakeCGSteps(Tao tao, PetscBool *terminate)
392: {
393:   TAO_BNK                      *bnk = (TAO_BNK *)tao->data;

395:   *terminate = PETSC_FALSE;
396:   if (bnk->max_cg_its > 0) {
397:     /* Copy the current function value (important vectors are already shared) */
398:     bnk->bncg_ctx->f = bnk->f;
399:     /* Take some small finite number of BNCG iterations */
400:     TaoSolve(bnk->bncg);
401:     /* Add the number of gradient and function evaluations to the total */
402:     tao->nfuncs += bnk->bncg->nfuncs;
403:     tao->nfuncgrads += bnk->bncg->nfuncgrads;
404:     tao->ngrads += bnk->bncg->ngrads;
405:     tao->nhess += bnk->bncg->nhess;
406:     bnk->tot_cg_its += bnk->bncg->niter;
407:     /* Extract the BNCG function value out and save it into BNK */
408:     bnk->f = bnk->bncg_ctx->f;
409:     if (bnk->bncg->reason == TAO_CONVERGED_GATOL || bnk->bncg->reason == TAO_CONVERGED_GRTOL || bnk->bncg->reason == TAO_CONVERGED_GTTOL || bnk->bncg->reason == TAO_CONVERGED_MINF) {
410:       *terminate = PETSC_TRUE;
411:     } else {
412:       TaoBNKEstimateActiveSet(tao, bnk->as_type);
413:     }
414:   }
415:   return 0;
416: }

418: /*------------------------------------------------------------*/

420: /* Routine for computing the Newton step. */

422: PetscErrorCode TaoBNKComputeStep(Tao tao, PetscBool shift, KSPConvergedReason *ksp_reason, PetscInt *step_type)
423: {
424:   TAO_BNK           *bnk = (TAO_BNK *)tao->data;
425:   PetscInt          bfgsUpdates = 0;
426:   PetscInt          kspits;
427:   PetscBool         is_lmvm;
428:   PetscVoidFunction kspTR;

430:   /* If there are no inactive variables left, save some computation and return an adjusted zero step
431:      that has (l-x) and (u-x) for lower and upper bounded variables. */
432:   if (!bnk->inactive_idx) {
433:     VecSet(tao->stepdirection, 0.0);
434:     TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection);
435:     return 0;
436:   }

438:   /* Shift the reduced Hessian matrix */
439:   if (shift && bnk->pert > 0) {
440:     PetscObjectTypeCompare((PetscObject)tao->hessian, MATLMVM, &is_lmvm);
441:     if (is_lmvm) {
442:       MatShift(tao->hessian, bnk->pert);
443:     } else {
444:       MatShift(bnk->H_inactive, bnk->pert);
445:       if (bnk->H_inactive != bnk->Hpre_inactive) {
446:         MatShift(bnk->Hpre_inactive, bnk->pert);
447:       }
448:     }
449:   }

451:   /* Solve the Newton system of equations */
452:   tao->ksp_its = 0;
453:   VecSet(tao->stepdirection, 0.0);
454:   KSPReset(tao->ksp);
455:   KSPResetFromOptions(tao->ksp);
456:   KSPSetOperators(tao->ksp,bnk->H_inactive,bnk->Hpre_inactive);
457:   VecCopy(bnk->unprojected_gradient, bnk->Gwork);
458:   if (bnk->active_idx) {
459:     VecGetSubVector(bnk->Gwork, bnk->inactive_idx, &bnk->G_inactive);
460:     VecGetSubVector(tao->stepdirection, bnk->inactive_idx, &bnk->X_inactive);
461:   } else {
462:     bnk->G_inactive = bnk->unprojected_gradient;
463:     bnk->X_inactive = tao->stepdirection;
464:   }
465:   PetscObjectQueryFunction((PetscObject)tao->ksp,"KSPCGSetRadius_C",&kspTR);
466:   if (kspTR) {
467:     KSPCGSetRadius(tao->ksp,tao->trust);
468:     KSPSolve(tao->ksp, bnk->G_inactive, bnk->X_inactive);
469:     KSPGetIterationNumber(tao->ksp,&kspits);
470:     tao->ksp_its += kspits;
471:     tao->ksp_tot_its += kspits;
472:     KSPCGGetNormD(tao->ksp,&bnk->dnorm);

474:     if (0.0 == tao->trust) {
475:       /* Radius was uninitialized; use the norm of the direction */
476:       if (bnk->dnorm > 0.0) {
477:         tao->trust = bnk->dnorm;

479:         /* Modify the radius if it is too large or small */
480:         tao->trust = PetscMax(tao->trust, bnk->min_radius);
481:         tao->trust = PetscMin(tao->trust, bnk->max_radius);
482:       } else {
483:         /* The direction was bad; set radius to default value and re-solve
484:            the trust-region subproblem to get a direction */
485:         tao->trust = tao->trust0;

487:         /* Modify the radius if it is too large or small */
488:         tao->trust = PetscMax(tao->trust, bnk->min_radius);
489:         tao->trust = PetscMin(tao->trust, bnk->max_radius);

491:         KSPCGSetRadius(tao->ksp,tao->trust);
492:         KSPSolve(tao->ksp, bnk->G_inactive, bnk->X_inactive);
493:         KSPGetIterationNumber(tao->ksp,&kspits);
494:         tao->ksp_its += kspits;
495:         tao->ksp_tot_its += kspits;
496:         KSPCGGetNormD(tao->ksp,&bnk->dnorm);

499:       }
500:     }
501:   } else {
502:     KSPSolve(tao->ksp, bnk->G_inactive, bnk->X_inactive);
503:     KSPGetIterationNumber(tao->ksp, &kspits);
504:     tao->ksp_its += kspits;
505:     tao->ksp_tot_its += kspits;
506:   }
507:   /* Restore sub vectors back */
508:   if (bnk->active_idx) {
509:     VecRestoreSubVector(bnk->Gwork, bnk->inactive_idx, &bnk->G_inactive);
510:     VecRestoreSubVector(tao->stepdirection, bnk->inactive_idx, &bnk->X_inactive);
511:   }
512:   /* Make sure the safeguarded fall-back step is zero for actively bounded variables */
513:   VecScale(tao->stepdirection, -1.0);
514:   TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection);

516:   /* Record convergence reasons */
517:   KSPGetConvergedReason(tao->ksp, ksp_reason);
518:   if (KSP_CONVERGED_ATOL == *ksp_reason) {
519:     ++bnk->ksp_atol;
520:   } else if (KSP_CONVERGED_RTOL == *ksp_reason) {
521:     ++bnk->ksp_rtol;
522:   } else if (KSP_CONVERGED_CG_CONSTRAINED == *ksp_reason) {
523:     ++bnk->ksp_ctol;
524:   } else if (KSP_CONVERGED_CG_NEG_CURVE == *ksp_reason) {
525:     ++bnk->ksp_negc;
526:   } else if (KSP_DIVERGED_DTOL == *ksp_reason) {
527:     ++bnk->ksp_dtol;
528:   } else if (KSP_DIVERGED_ITS == *ksp_reason) {
529:     ++bnk->ksp_iter;
530:   } else {
531:     ++bnk->ksp_othr;
532:   }

534:   /* Make sure the BFGS preconditioner is healthy */
535:   if (bnk->M) {
536:     MatLMVMGetUpdateCount(bnk->M, &bfgsUpdates);
537:     if ((KSP_DIVERGED_INDEFINITE_PC == *ksp_reason) && (bfgsUpdates > 0)) {
538:       /* Preconditioner is numerically indefinite; reset the approximation. */
539:       MatLMVMReset(bnk->M, PETSC_FALSE);
540:       MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);
541:     }
542:   }
543:   *step_type = BNK_NEWTON;
544:   return 0;
545: }

547: /*------------------------------------------------------------*/

549: /* Routine for recomputing the predicted reduction for a given step vector */

551: PetscErrorCode TaoBNKRecomputePred(Tao tao, Vec S, PetscReal *prered)
552: {
553:   TAO_BNK        *bnk = (TAO_BNK *)tao->data;

555:   /* Extract subvectors associated with the inactive set */
556:   if (bnk->active_idx) {
557:     VecGetSubVector(tao->stepdirection, bnk->inactive_idx, &bnk->X_inactive);
558:     VecGetSubVector(bnk->Xwork, bnk->inactive_idx, &bnk->inactive_work);
559:     VecGetSubVector(bnk->Gwork, bnk->inactive_idx, &bnk->G_inactive);
560:   } else {
561:     bnk->X_inactive = tao->stepdirection;
562:     bnk->inactive_work = bnk->Xwork;
563:     bnk->G_inactive = bnk->Gwork;
564:   }
565:   /* Recompute the predicted decrease based on the quadratic model */
566:   MatMult(bnk->H_inactive, bnk->X_inactive, bnk->inactive_work);
567:   VecAYPX(bnk->inactive_work, -0.5, bnk->G_inactive);
568:   VecDot(bnk->inactive_work, bnk->X_inactive, prered);
569:   /* Restore the sub vectors */
570:   if (bnk->active_idx) {
571:     VecRestoreSubVector(tao->stepdirection, bnk->inactive_idx, &bnk->X_inactive);
572:     VecRestoreSubVector(bnk->Xwork, bnk->inactive_idx, &bnk->inactive_work);
573:     VecRestoreSubVector(bnk->Gwork, bnk->inactive_idx, &bnk->G_inactive);
574:   }
575:   return 0;
576: }

578: /*------------------------------------------------------------*/

580: /* Routine for ensuring that the Newton step is a descent direction.

582:    The step direction falls back onto BFGS, scaled gradient and gradient steps
583:    in the event that the Newton step fails the test.
584: */

586: PetscErrorCode TaoBNKSafeguardStep(Tao tao, KSPConvergedReason ksp_reason, PetscInt *stepType)
587: {
588:   TAO_BNK        *bnk = (TAO_BNK *)tao->data;
589:   PetscReal      gdx, e_min;
590:   PetscInt       bfgsUpdates;

592:   switch (*stepType) {
593:   case BNK_NEWTON:
594:     VecDot(tao->stepdirection, tao->gradient, &gdx);
595:     if ((gdx >= 0.0) || PetscIsInfOrNanReal(gdx)) {
596:       /* Newton step is not descent or direction produced Inf or NaN
597:         Update the perturbation for next time */
598:       if (bnk->pert <= 0.0) {
599:         PetscBool is_gltr;

601:         /* Initialize the perturbation */
602:         bnk->pert = PetscMin(bnk->imax, PetscMax(bnk->imin, bnk->imfac * bnk->gnorm));
603:         PetscObjectTypeCompare((PetscObject)(tao->ksp),KSPGLTR,&is_gltr);
604:         if (is_gltr) {
605:           KSPGLTRGetMinEig(tao->ksp,&e_min);
606:           bnk->pert = PetscMax(bnk->pert, -e_min);
607:         }
608:       } else {
609:         /* Increase the perturbation */
610:         bnk->pert = PetscMin(bnk->pmax, PetscMax(bnk->pgfac * bnk->pert, bnk->pmgfac * bnk->gnorm));
611:       }

613:       if (!bnk->M) {
614:         /* We don't have the bfgs matrix around and updated
615:           Must use gradient direction in this case */
616:         VecCopy(tao->gradient, tao->stepdirection);
617:         *stepType = BNK_GRADIENT;
618:       } else {
619:         /* Attempt to use the BFGS direction */
620:         MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection);

622:         /* Check for success (descent direction)
623:           NOTE: Negative gdx here means not a descent direction because
624:           the fall-back step is missing a negative sign. */
625:         VecDot(tao->gradient, tao->stepdirection, &gdx);
626:         if ((gdx <= 0.0) || PetscIsInfOrNanReal(gdx)) {
627:           /* BFGS direction is not descent or direction produced not a number
628:             We can assert bfgsUpdates > 1 in this case because
629:             the first solve produces the scaled gradient direction,
630:             which is guaranteed to be descent */

632:           /* Use steepest descent direction (scaled) */
633:           MatLMVMReset(bnk->M, PETSC_FALSE);
634:           MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);
635:           MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection);

637:           *stepType = BNK_SCALED_GRADIENT;
638:         } else {
639:           MatLMVMGetUpdateCount(bnk->M, &bfgsUpdates);
640:           if (1 == bfgsUpdates) {
641:             /* The first BFGS direction is always the scaled gradient */
642:             *stepType = BNK_SCALED_GRADIENT;
643:           } else {
644:             *stepType = BNK_BFGS;
645:           }
646:         }
647:       }
648:       /* Make sure the safeguarded fall-back step is zero for actively bounded variables */
649:       VecScale(tao->stepdirection, -1.0);
650:       TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection);
651:     } else {
652:       /* Computed Newton step is descent */
653:       switch (ksp_reason) {
654:       case KSP_DIVERGED_NANORINF:
655:       case KSP_DIVERGED_BREAKDOWN:
656:       case KSP_DIVERGED_INDEFINITE_MAT:
657:       case KSP_DIVERGED_INDEFINITE_PC:
658:       case KSP_CONVERGED_CG_NEG_CURVE:
659:         /* Matrix or preconditioner is indefinite; increase perturbation */
660:         if (bnk->pert <= 0.0) {
661:           PetscBool is_gltr;

663:           /* Initialize the perturbation */
664:           bnk->pert = PetscMin(bnk->imax, PetscMax(bnk->imin, bnk->imfac * bnk->gnorm));
665:           PetscObjectTypeCompare((PetscObject)(tao->ksp),KSPGLTR,&is_gltr);
666:           if (is_gltr) {
667:             KSPGLTRGetMinEig(tao->ksp, &e_min);
668:             bnk->pert = PetscMax(bnk->pert, -e_min);
669:           }
670:         } else {
671:           /* Increase the perturbation */
672:           bnk->pert = PetscMin(bnk->pmax, PetscMax(bnk->pgfac * bnk->pert, bnk->pmgfac * bnk->gnorm));
673:         }
674:         break;

676:       default:
677:         /* Newton step computation is good; decrease perturbation */
678:         bnk->pert = PetscMin(bnk->psfac * bnk->pert, bnk->pmsfac * bnk->gnorm);
679:         if (bnk->pert < bnk->pmin) {
680:           bnk->pert = 0.0;
681:         }
682:         break;
683:       }
684:       *stepType = BNK_NEWTON;
685:     }
686:     break;

688:   case BNK_BFGS:
689:     /* Check for success (descent direction) */
690:     VecDot(tao->stepdirection, tao->gradient, &gdx);
691:     if (gdx >= 0 || PetscIsInfOrNanReal(gdx)) {
692:       /* Step is not descent or solve was not successful
693:          Use steepest descent direction (scaled) */
694:       MatLMVMReset(bnk->M, PETSC_FALSE);
695:       MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);
696:       MatSolve(bnk->M, tao->gradient, tao->stepdirection);
697:       VecScale(tao->stepdirection,-1.0);
698:       TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection);
699:       *stepType = BNK_SCALED_GRADIENT;
700:     } else {
701:       *stepType = BNK_BFGS;
702:     }
703:     break;

705:   case BNK_SCALED_GRADIENT:
706:     break;

708:   default:
709:     break;
710:   }

712:   return 0;
713: }

715: /*------------------------------------------------------------*/

717: /* Routine for performing a bound-projected More-Thuente line search.

719:   Includes fallbacks to BFGS, scaled gradient, and unscaled gradient steps if the
720:   Newton step does not produce a valid step length.
721: */

723: PetscErrorCode TaoBNKPerformLineSearch(Tao tao, PetscInt *stepType, PetscReal *steplen, TaoLineSearchConvergedReason *reason)
724: {
725:   TAO_BNK                      *bnk = (TAO_BNK *)tao->data;
726:   TaoLineSearchConvergedReason ls_reason;
727:   PetscReal                    e_min, gdx;
728:   PetscInt                     bfgsUpdates;

730:   /* Perform the linesearch */
731:   TaoLineSearchApply(tao->linesearch, tao->solution, &bnk->f, bnk->unprojected_gradient, tao->stepdirection, steplen, &ls_reason);
732:   TaoAddLineSearchCounts(tao);

734:   while (ls_reason != TAOLINESEARCH_SUCCESS && ls_reason != TAOLINESEARCH_SUCCESS_USER && *stepType != BNK_SCALED_GRADIENT && *stepType != BNK_GRADIENT) {
735:     /* Linesearch failed, revert solution */
736:     bnk->f = bnk->fold;
737:     VecCopy(bnk->Xold, tao->solution);
738:     VecCopy(bnk->unprojected_gradient_old, bnk->unprojected_gradient);

740:     switch(*stepType) {
741:     case BNK_NEWTON:
742:       /* Failed to obtain acceptable iterate with Newton step
743:          Update the perturbation for next time */
744:       if (bnk->pert <= 0.0) {
745:         PetscBool is_gltr;

747:         /* Initialize the perturbation */
748:         bnk->pert = PetscMin(bnk->imax, PetscMax(bnk->imin, bnk->imfac * bnk->gnorm));
749:         PetscObjectTypeCompare((PetscObject)(tao->ksp),KSPGLTR,&is_gltr);
750:         if (is_gltr) {
751:           KSPGLTRGetMinEig(tao->ksp,&e_min);
752:           bnk->pert = PetscMax(bnk->pert, -e_min);
753:         }
754:       } else {
755:         /* Increase the perturbation */
756:         bnk->pert = PetscMin(bnk->pmax, PetscMax(bnk->pgfac * bnk->pert, bnk->pmgfac * bnk->gnorm));
757:       }

759:       if (!bnk->M) {
760:         /* We don't have the bfgs matrix around and being updated
761:            Must use gradient direction in this case */
762:         VecCopy(bnk->unprojected_gradient, tao->stepdirection);
763:         *stepType = BNK_GRADIENT;
764:       } else {
765:         /* Attempt to use the BFGS direction */
766:         MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection);
767:         /* Check for success (descent direction)
768:            NOTE: Negative gdx means not a descent direction because the step here is missing a negative sign. */
769:         VecDot(tao->gradient, tao->stepdirection, &gdx);
770:         if ((gdx <= 0.0) || PetscIsInfOrNanReal(gdx)) {
771:           /* BFGS direction is not descent or direction produced not a number
772:              We can assert bfgsUpdates > 1 in this case
773:              Use steepest descent direction (scaled) */
774:           MatLMVMReset(bnk->M, PETSC_FALSE);
775:           MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);
776:           MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection);

778:           bfgsUpdates = 1;
779:           *stepType = BNK_SCALED_GRADIENT;
780:         } else {
781:           MatLMVMGetUpdateCount(bnk->M, &bfgsUpdates);
782:           if (1 == bfgsUpdates) {
783:             /* The first BFGS direction is always the scaled gradient */
784:             *stepType = BNK_SCALED_GRADIENT;
785:           } else {
786:             *stepType = BNK_BFGS;
787:           }
788:         }
789:       }
790:       break;

792:     case BNK_BFGS:
793:       /* Can only enter if pc_type == BNK_PC_BFGS
794:          Failed to obtain acceptable iterate with BFGS step
795:          Attempt to use the scaled gradient direction */
796:       MatLMVMReset(bnk->M, PETSC_FALSE);
797:       MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);
798:       MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection);

800:       bfgsUpdates = 1;
801:       *stepType = BNK_SCALED_GRADIENT;
802:       break;
803:     }
804:     /* Make sure the safeguarded fall-back step is zero for actively bounded variables */
805:     VecScale(tao->stepdirection, -1.0);
806:     TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection);

808:     /* Perform one last line search with the fall-back step */
809:     TaoLineSearchApply(tao->linesearch, tao->solution, &bnk->f, bnk->unprojected_gradient, tao->stepdirection, steplen, &ls_reason);
810:     TaoAddLineSearchCounts(tao);
811:   }
812:   *reason = ls_reason;
813:   return 0;
814: }

816: /*------------------------------------------------------------*/

818: /* Routine for updating the trust radius.

820:   Function features three different update methods:
821:   1) Line-search step length based
822:   2) Predicted decrease on the CG quadratic model
823:   3) Interpolation
824: */

826: PetscErrorCode TaoBNKUpdateTrustRadius(Tao tao, PetscReal prered, PetscReal actred, PetscInt updateType, PetscInt stepType, PetscBool *accept)
827: {
828:   TAO_BNK        *bnk = (TAO_BNK *)tao->data;

830:   PetscReal      step, kappa;
831:   PetscReal      gdx, tau_1, tau_2, tau_min, tau_max;

833:   /* Update trust region radius */
834:   *accept = PETSC_FALSE;
835:   switch(updateType) {
836:   case BNK_UPDATE_STEP:
837:     *accept = PETSC_TRUE; /* always accept here because line search succeeded */
838:     if (stepType == BNK_NEWTON) {
839:       TaoLineSearchGetStepLength(tao->linesearch, &step);
840:       if (step < bnk->nu1) {
841:         /* Very bad step taken; reduce radius */
842:         tao->trust = bnk->omega1 * PetscMin(bnk->dnorm, tao->trust);
843:       } else if (step < bnk->nu2) {
844:         /* Reasonably bad step taken; reduce radius */
845:         tao->trust = bnk->omega2 * PetscMin(bnk->dnorm, tao->trust);
846:       } else if (step < bnk->nu3) {
847:         /*  Reasonable step was taken; leave radius alone */
848:         if (bnk->omega3 < 1.0) {
849:           tao->trust = bnk->omega3 * PetscMin(bnk->dnorm, tao->trust);
850:         } else if (bnk->omega3 > 1.0) {
851:           tao->trust = PetscMax(bnk->omega3 * bnk->dnorm, tao->trust);
852:         }
853:       } else if (step < bnk->nu4) {
854:         /*  Full step taken; increase the radius */
855:         tao->trust = PetscMax(bnk->omega4 * bnk->dnorm, tao->trust);
856:       } else {
857:         /*  More than full step taken; increase the radius */
858:         tao->trust = PetscMax(bnk->omega5 * bnk->dnorm, tao->trust);
859:       }
860:     } else {
861:       /*  Newton step was not good; reduce the radius */
862:       tao->trust = bnk->omega1 * PetscMin(bnk->dnorm, tao->trust);
863:     }
864:     break;

866:   case BNK_UPDATE_REDUCTION:
867:     if (stepType == BNK_NEWTON) {
868:       if ((prered < 0.0) || PetscIsInfOrNanReal(prered)) {
869:         /* The predicted reduction has the wrong sign.  This cannot
870:            happen in infinite precision arithmetic.  Step should
871:            be rejected! */
872:         tao->trust = bnk->alpha1 * PetscMin(tao->trust, bnk->dnorm);
873:       } else {
874:         if (PetscIsInfOrNanReal(actred)) {
875:           tao->trust = bnk->alpha1 * PetscMin(tao->trust, bnk->dnorm);
876:         } else {
877:           if ((PetscAbsScalar(actred) <= PetscMax(1.0, PetscAbsScalar(bnk->f))*bnk->epsilon) && (PetscAbsScalar(prered) <= PetscMax(1.0, PetscAbsScalar(bnk->f))*bnk->epsilon)) {
878:             kappa = 1.0;
879:           } else {
880:             kappa = actred / prered;
881:           }
882:           /* Accept or reject the step and update radius */
883:           if (kappa < bnk->eta1) {
884:             /* Reject the step */
885:             tao->trust = bnk->alpha1 * PetscMin(tao->trust, bnk->dnorm);
886:           } else {
887:             /* Accept the step */
888:             *accept = PETSC_TRUE;
889:             /* Update the trust region radius only if the computed step is at the trust radius boundary */
890:             if (bnk->dnorm == tao->trust) {
891:               if (kappa < bnk->eta2) {
892:                 /* Marginal bad step */
893:                 tao->trust = bnk->alpha2 * tao->trust;
894:               } else if (kappa < bnk->eta3) {
895:                 /* Reasonable step */
896:                 tao->trust = bnk->alpha3 * tao->trust;
897:               } else if (kappa < bnk->eta4) {
898:                 /* Good step */
899:                 tao->trust = bnk->alpha4 * tao->trust;
900:               } else {
901:                 /* Very good step */
902:                 tao->trust = bnk->alpha5 * tao->trust;
903:               }
904:             }
905:           }
906:         }
907:       }
908:     } else {
909:       /*  Newton step was not good; reduce the radius */
910:       tao->trust = bnk->alpha1 * PetscMin(bnk->dnorm, tao->trust);
911:     }
912:     break;

914:   default:
915:     if (stepType == BNK_NEWTON) {
916:       if (prered < 0.0) {
917:         /*  The predicted reduction has the wrong sign.  This cannot */
918:         /*  happen in infinite precision arithmetic.  Step should */
919:         /*  be rejected! */
920:         tao->trust = bnk->gamma1 * PetscMin(tao->trust, bnk->dnorm);
921:       } else {
922:         if (PetscIsInfOrNanReal(actred)) {
923:           tao->trust = bnk->gamma1 * PetscMin(tao->trust, bnk->dnorm);
924:         } else {
925:           if ((PetscAbsScalar(actred) <= bnk->epsilon) && (PetscAbsScalar(prered) <= bnk->epsilon)) {
926:             kappa = 1.0;
927:           } else {
928:             kappa = actred / prered;
929:           }

931:           VecDot(tao->gradient, tao->stepdirection, &gdx);
932:           tau_1 = bnk->theta * gdx / (bnk->theta * gdx - (1.0 - bnk->theta) * prered + actred);
933:           tau_2 = bnk->theta * gdx / (bnk->theta * gdx + (1.0 + bnk->theta) * prered - actred);
934:           tau_min = PetscMin(tau_1, tau_2);
935:           tau_max = PetscMax(tau_1, tau_2);

937:           if (kappa >= 1.0 - bnk->mu1) {
938:             /*  Great agreement */
939:             *accept = PETSC_TRUE;
940:             if (tau_max < 1.0) {
941:               tao->trust = PetscMax(tao->trust, bnk->gamma3 * bnk->dnorm);
942:             } else if (tau_max > bnk->gamma4) {
943:               tao->trust = PetscMax(tao->trust, bnk->gamma4 * bnk->dnorm);
944:             } else {
945:               tao->trust = PetscMax(tao->trust, tau_max * bnk->dnorm);
946:             }
947:           } else if (kappa >= 1.0 - bnk->mu2) {
948:             /*  Good agreement */
949:             *accept = PETSC_TRUE;
950:             if (tau_max < bnk->gamma2) {
951:               tao->trust = bnk->gamma2 * PetscMin(tao->trust, bnk->dnorm);
952:             } else if (tau_max > bnk->gamma3) {
953:               tao->trust = PetscMax(tao->trust, bnk->gamma3 * bnk->dnorm);
954:             } else if (tau_max < 1.0) {
955:               tao->trust = tau_max * PetscMin(tao->trust, bnk->dnorm);
956:             } else {
957:               tao->trust = PetscMax(tao->trust, tau_max * bnk->dnorm);
958:             }
959:           } else {
960:             /*  Not good agreement */
961:             if (tau_min > 1.0) {
962:               tao->trust = bnk->gamma2 * PetscMin(tao->trust, bnk->dnorm);
963:             } else if (tau_max < bnk->gamma1) {
964:               tao->trust = bnk->gamma1 * PetscMin(tao->trust, bnk->dnorm);
965:             } else if ((tau_min < bnk->gamma1) && (tau_max >= 1.0)) {
966:               tao->trust = bnk->gamma1 * PetscMin(tao->trust, bnk->dnorm);
967:             } else if ((tau_1 >= bnk->gamma1) && (tau_1 < 1.0) && ((tau_2 < bnk->gamma1) || (tau_2 >= 1.0))) {
968:               tao->trust = tau_1 * PetscMin(tao->trust, bnk->dnorm);
969:             } else if ((tau_2 >= bnk->gamma1) && (tau_2 < 1.0) && ((tau_1 < bnk->gamma1) || (tau_2 >= 1.0))) {
970:               tao->trust = tau_2 * PetscMin(tao->trust, bnk->dnorm);
971:             } else {
972:               tao->trust = tau_max * PetscMin(tao->trust, bnk->dnorm);
973:             }
974:           }
975:         }
976:       }
977:     } else {
978:       /*  Newton step was not good; reduce the radius */
979:       tao->trust = bnk->gamma1 * PetscMin(bnk->dnorm, tao->trust);
980:     }
981:     break;
982:   }
983:   /* Make sure the radius does not violate min and max settings */
984:   tao->trust = PetscMin(tao->trust, bnk->max_radius);
985:   tao->trust = PetscMax(tao->trust, bnk->min_radius);
986:   return 0;
987: }

989: /* ---------------------------------------------------------- */

991: PetscErrorCode TaoBNKAddStepCounts(Tao tao, PetscInt stepType)
992: {
993:   TAO_BNK        *bnk = (TAO_BNK *)tao->data;

995:   switch (stepType) {
996:   case BNK_NEWTON:
997:     ++bnk->newt;
998:     break;
999:   case BNK_BFGS:
1000:     ++bnk->bfgs;
1001:     break;
1002:   case BNK_SCALED_GRADIENT:
1003:     ++bnk->sgrad;
1004:     break;
1005:   case BNK_GRADIENT:
1006:     ++bnk->grad;
1007:     break;
1008:   default:
1009:     break;
1010:   }
1011:   return 0;
1012: }

1014: /* ---------------------------------------------------------- */

1016: PetscErrorCode TaoSetUp_BNK(Tao tao)
1017: {
1018:   TAO_BNK        *bnk = (TAO_BNK *)tao->data;
1019:   PetscInt       i;

1021:   if (!tao->gradient) {
1022:     VecDuplicate(tao->solution,&tao->gradient);
1023:   }
1024:   if (!tao->stepdirection) {
1025:     VecDuplicate(tao->solution,&tao->stepdirection);
1026:   }
1027:   if (!bnk->W) {
1028:     VecDuplicate(tao->solution,&bnk->W);
1029:   }
1030:   if (!bnk->Xold) {
1031:     VecDuplicate(tao->solution,&bnk->Xold);
1032:   }
1033:   if (!bnk->Gold) {
1034:     VecDuplicate(tao->solution,&bnk->Gold);
1035:   }
1036:   if (!bnk->Xwork) {
1037:     VecDuplicate(tao->solution,&bnk->Xwork);
1038:   }
1039:   if (!bnk->Gwork) {
1040:     VecDuplicate(tao->solution,&bnk->Gwork);
1041:   }
1042:   if (!bnk->unprojected_gradient) {
1043:     VecDuplicate(tao->solution,&bnk->unprojected_gradient);
1044:   }
1045:   if (!bnk->unprojected_gradient_old) {
1046:     VecDuplicate(tao->solution,&bnk->unprojected_gradient_old);
1047:   }
1048:   if (!bnk->Diag_min) {
1049:     VecDuplicate(tao->solution,&bnk->Diag_min);
1050:   }
1051:   if (!bnk->Diag_max) {
1052:     VecDuplicate(tao->solution,&bnk->Diag_max);
1053:   }
1054:   if (bnk->max_cg_its > 0) {
1055:     /* Ensure that the important common vectors are shared between BNK and embedded BNCG */
1056:     bnk->bncg_ctx = (TAO_BNCG *)bnk->bncg->data;
1057:     PetscObjectReference((PetscObject)(bnk->unprojected_gradient_old));
1058:     VecDestroy(&bnk->bncg_ctx->unprojected_gradient_old);
1059:     bnk->bncg_ctx->unprojected_gradient_old = bnk->unprojected_gradient_old;
1060:     PetscObjectReference((PetscObject)(bnk->unprojected_gradient));
1061:     VecDestroy(&bnk->bncg_ctx->unprojected_gradient);
1062:     bnk->bncg_ctx->unprojected_gradient = bnk->unprojected_gradient;
1063:     PetscObjectReference((PetscObject)(bnk->Gold));
1064:     VecDestroy(&bnk->bncg_ctx->G_old);
1065:     bnk->bncg_ctx->G_old = bnk->Gold;
1066:     PetscObjectReference((PetscObject)(tao->gradient));
1067:     VecDestroy(&bnk->bncg->gradient);
1068:     bnk->bncg->gradient = tao->gradient;
1069:     PetscObjectReference((PetscObject)(tao->stepdirection));
1070:     VecDestroy(&bnk->bncg->stepdirection);
1071:     bnk->bncg->stepdirection = tao->stepdirection;
1072:     TaoSetSolution(bnk->bncg, tao->solution);
1073:     /* Copy over some settings from BNK into BNCG */
1074:     TaoSetMaximumIterations(bnk->bncg, bnk->max_cg_its);
1075:     TaoSetTolerances(bnk->bncg, tao->gatol, tao->grtol, tao->gttol);
1076:     TaoSetFunctionLowerBound(bnk->bncg, tao->fmin);
1077:     TaoSetConvergenceTest(bnk->bncg, tao->ops->convergencetest, tao->cnvP);
1078:     TaoSetObjective(bnk->bncg, tao->ops->computeobjective, tao->user_objP);
1079:     TaoSetGradient(bnk->bncg, NULL, tao->ops->computegradient, tao->user_gradP);
1080:     TaoSetObjectiveAndGradient(bnk->bncg, NULL, tao->ops->computeobjectiveandgradient, tao->user_objgradP);
1081:     PetscObjectCopyFortranFunctionPointers((PetscObject)tao, (PetscObject)(bnk->bncg));
1082:     for (i=0; i<tao->numbermonitors; ++i) {
1083:       TaoSetMonitor(bnk->bncg, tao->monitor[i], tao->monitorcontext[i], tao->monitordestroy[i]);
1084:       PetscObjectReference((PetscObject)(tao->monitorcontext[i]));
1085:     }
1086:   }
1087:   bnk->X_inactive = NULL;
1088:   bnk->G_inactive = NULL;
1089:   bnk->inactive_work = NULL;
1090:   bnk->active_work = NULL;
1091:   bnk->inactive_idx = NULL;
1092:   bnk->active_idx = NULL;
1093:   bnk->active_lower = NULL;
1094:   bnk->active_upper = NULL;
1095:   bnk->active_fixed = NULL;
1096:   bnk->M = NULL;
1097:   bnk->H_inactive = NULL;
1098:   bnk->Hpre_inactive = NULL;
1099:   return 0;
1100: }

1102: /*------------------------------------------------------------*/

1104: PetscErrorCode TaoDestroy_BNK(Tao tao)
1105: {
1106:   TAO_BNK        *bnk = (TAO_BNK *)tao->data;

1108:   if (tao->setupcalled) {
1109:     VecDestroy(&bnk->W);
1110:     VecDestroy(&bnk->Xold);
1111:     VecDestroy(&bnk->Gold);
1112:     VecDestroy(&bnk->Xwork);
1113:     VecDestroy(&bnk->Gwork);
1114:     VecDestroy(&bnk->unprojected_gradient);
1115:     VecDestroy(&bnk->unprojected_gradient_old);
1116:     VecDestroy(&bnk->Diag_min);
1117:     VecDestroy(&bnk->Diag_max);
1118:   }
1119:   ISDestroy(&bnk->active_lower);
1120:   ISDestroy(&bnk->active_upper);
1121:   ISDestroy(&bnk->active_fixed);
1122:   ISDestroy(&bnk->active_idx);
1123:   ISDestroy(&bnk->inactive_idx);
1124:   MatDestroy(&bnk->Hpre_inactive);
1125:   MatDestroy(&bnk->H_inactive);
1126:   TaoDestroy(&bnk->bncg);
1127:   PetscFree(tao->data);
1128:   return 0;
1129: }

1131: /*------------------------------------------------------------*/

1133: PetscErrorCode TaoSetFromOptions_BNK(PetscOptionItems *PetscOptionsObject,Tao tao)
1134: {
1135:   TAO_BNK        *bnk = (TAO_BNK *)tao->data;

1137:   PetscOptionsHead(PetscOptionsObject,"Newton-Krylov method for bound constrained optimization");
1138:   PetscOptionsEList("-tao_bnk_init_type", "radius initialization type", "", BNK_INIT, BNK_INIT_TYPES, BNK_INIT[bnk->init_type], &bnk->init_type, NULL);
1139:   PetscOptionsEList("-tao_bnk_update_type", "radius update type", "", BNK_UPDATE, BNK_UPDATE_TYPES, BNK_UPDATE[bnk->update_type], &bnk->update_type, NULL);
1140:   PetscOptionsEList("-tao_bnk_as_type", "active set estimation method", "", BNK_AS, BNK_AS_TYPES, BNK_AS[bnk->as_type], &bnk->as_type, NULL);
1141:   PetscOptionsReal("-tao_bnk_sval", "(developer) Hessian perturbation starting value", "", bnk->sval, &bnk->sval,NULL);
1142:   PetscOptionsReal("-tao_bnk_imin", "(developer) minimum initial Hessian perturbation", "", bnk->imin, &bnk->imin,NULL);
1143:   PetscOptionsReal("-tao_bnk_imax", "(developer) maximum initial Hessian perturbation", "", bnk->imax, &bnk->imax,NULL);
1144:   PetscOptionsReal("-tao_bnk_imfac", "(developer) initial merit factor for Hessian perturbation", "", bnk->imfac, &bnk->imfac,NULL);
1145:   PetscOptionsReal("-tao_bnk_pmin", "(developer) minimum Hessian perturbation", "", bnk->pmin, &bnk->pmin,NULL);
1146:   PetscOptionsReal("-tao_bnk_pmax", "(developer) maximum Hessian perturbation", "", bnk->pmax, &bnk->pmax,NULL);
1147:   PetscOptionsReal("-tao_bnk_pgfac", "(developer) Hessian perturbation growth factor", "", bnk->pgfac, &bnk->pgfac,NULL);
1148:   PetscOptionsReal("-tao_bnk_psfac", "(developer) Hessian perturbation shrink factor", "", bnk->psfac, &bnk->psfac,NULL);
1149:   PetscOptionsReal("-tao_bnk_pmgfac", "(developer) merit growth factor for Hessian perturbation", "", bnk->pmgfac, &bnk->pmgfac,NULL);
1150:   PetscOptionsReal("-tao_bnk_pmsfac", "(developer) merit shrink factor for Hessian perturbation", "", bnk->pmsfac, &bnk->pmsfac,NULL);
1151:   PetscOptionsReal("-tao_bnk_eta1", "(developer) threshold for rejecting step (-tao_bnk_update_type reduction)", "", bnk->eta1, &bnk->eta1,NULL);
1152:   PetscOptionsReal("-tao_bnk_eta2", "(developer) threshold for accepting marginal step (-tao_bnk_update_type reduction)", "", bnk->eta2, &bnk->eta2,NULL);
1153:   PetscOptionsReal("-tao_bnk_eta3", "(developer) threshold for accepting reasonable step (-tao_bnk_update_type reduction)", "", bnk->eta3, &bnk->eta3,NULL);
1154:   PetscOptionsReal("-tao_bnk_eta4", "(developer) threshold for accepting good step (-tao_bnk_update_type reduction)", "", bnk->eta4, &bnk->eta4,NULL);
1155:   PetscOptionsReal("-tao_bnk_alpha1", "(developer) radius reduction factor for rejected step (-tao_bnk_update_type reduction)", "", bnk->alpha1, &bnk->alpha1,NULL);
1156:   PetscOptionsReal("-tao_bnk_alpha2", "(developer) radius reduction factor for marginally accepted bad step (-tao_bnk_update_type reduction)", "", bnk->alpha2, &bnk->alpha2,NULL);
1157:   PetscOptionsReal("-tao_bnk_alpha3", "(developer) radius increase factor for reasonable accepted step (-tao_bnk_update_type reduction)", "", bnk->alpha3, &bnk->alpha3,NULL);
1158:   PetscOptionsReal("-tao_bnk_alpha4", "(developer) radius increase factor for good accepted step (-tao_bnk_update_type reduction)", "", bnk->alpha4, &bnk->alpha4,NULL);
1159:   PetscOptionsReal("-tao_bnk_alpha5", "(developer) radius increase factor for very good accepted step (-tao_bnk_update_type reduction)", "", bnk->alpha5, &bnk->alpha5,NULL);
1160:   PetscOptionsReal("-tao_bnk_nu1", "(developer) threshold for small line-search step length (-tao_bnk_update_type step)", "", bnk->nu1, &bnk->nu1,NULL);
1161:   PetscOptionsReal("-tao_bnk_nu2", "(developer) threshold for reasonable line-search step length (-tao_bnk_update_type step)", "", bnk->nu2, &bnk->nu2,NULL);
1162:   PetscOptionsReal("-tao_bnk_nu3", "(developer) threshold for large line-search step length (-tao_bnk_update_type step)", "", bnk->nu3, &bnk->nu3,NULL);
1163:   PetscOptionsReal("-tao_bnk_nu4", "(developer) threshold for very large line-search step length (-tao_bnk_update_type step)", "", bnk->nu4, &bnk->nu4,NULL);
1164:   PetscOptionsReal("-tao_bnk_omega1", "(developer) radius reduction factor for very small line-search step length (-tao_bnk_update_type step)", "", bnk->omega1, &bnk->omega1,NULL);
1165:   PetscOptionsReal("-tao_bnk_omega2", "(developer) radius reduction factor for small line-search step length (-tao_bnk_update_type step)", "", bnk->omega2, &bnk->omega2,NULL);
1166:   PetscOptionsReal("-tao_bnk_omega3", "(developer) radius factor for decent line-search step length (-tao_bnk_update_type step)", "", bnk->omega3, &bnk->omega3,NULL);
1167:   PetscOptionsReal("-tao_bnk_omega4", "(developer) radius increase factor for large line-search step length (-tao_bnk_update_type step)", "", bnk->omega4, &bnk->omega4,NULL);
1168:   PetscOptionsReal("-tao_bnk_omega5", "(developer) radius increase factor for very large line-search step length (-tao_bnk_update_type step)", "", bnk->omega5, &bnk->omega5,NULL);
1169:   PetscOptionsReal("-tao_bnk_mu1_i", "(developer) threshold for accepting very good step (-tao_bnk_init_type interpolation)", "", bnk->mu1_i, &bnk->mu1_i,NULL);
1170:   PetscOptionsReal("-tao_bnk_mu2_i", "(developer) threshold for accepting good step (-tao_bnk_init_type interpolation)", "", bnk->mu2_i, &bnk->mu2_i,NULL);
1171:   PetscOptionsReal("-tao_bnk_gamma1_i", "(developer) radius reduction factor for rejected very bad step (-tao_bnk_init_type interpolation)", "", bnk->gamma1_i, &bnk->gamma1_i,NULL);
1172:   PetscOptionsReal("-tao_bnk_gamma2_i", "(developer) radius reduction factor for rejected bad step (-tao_bnk_init_type interpolation)", "", bnk->gamma2_i, &bnk->gamma2_i,NULL);
1173:   PetscOptionsReal("-tao_bnk_gamma3_i", "(developer) radius increase factor for accepted good step (-tao_bnk_init_type interpolation)", "", bnk->gamma3_i, &bnk->gamma3_i,NULL);
1174:   PetscOptionsReal("-tao_bnk_gamma4_i", "(developer) radius increase factor for accepted very good step (-tao_bnk_init_type interpolation)", "", bnk->gamma4_i, &bnk->gamma4_i,NULL);
1175:   PetscOptionsReal("-tao_bnk_theta_i", "(developer) trust region interpolation factor (-tao_bnk_init_type interpolation)", "", bnk->theta_i, &bnk->theta_i,NULL);
1176:   PetscOptionsReal("-tao_bnk_mu1", "(developer) threshold for accepting very good step (-tao_bnk_update_type interpolation)", "", bnk->mu1, &bnk->mu1,NULL);
1177:   PetscOptionsReal("-tao_bnk_mu2", "(developer) threshold for accepting good step (-tao_bnk_update_type interpolation)", "", bnk->mu2, &bnk->mu2,NULL);
1178:   PetscOptionsReal("-tao_bnk_gamma1", "(developer) radius reduction factor for rejected very bad step (-tao_bnk_update_type interpolation)", "", bnk->gamma1, &bnk->gamma1,NULL);
1179:   PetscOptionsReal("-tao_bnk_gamma2", "(developer) radius reduction factor for rejected bad step (-tao_bnk_update_type interpolation)", "", bnk->gamma2, &bnk->gamma2,NULL);
1180:   PetscOptionsReal("-tao_bnk_gamma3", "(developer) radius increase factor for accepted good step (-tao_bnk_update_type interpolation)", "", bnk->gamma3, &bnk->gamma3,NULL);
1181:   PetscOptionsReal("-tao_bnk_gamma4", "(developer) radius increase factor for accepted very good step (-tao_bnk_update_type interpolation)", "", bnk->gamma4, &bnk->gamma4,NULL);
1182:   PetscOptionsReal("-tao_bnk_theta", "(developer) trust region interpolation factor (-tao_bnk_update_type interpolation)", "", bnk->theta, &bnk->theta,NULL);
1183:   PetscOptionsReal("-tao_bnk_min_radius", "(developer) lower bound on initial radius", "", bnk->min_radius, &bnk->min_radius,NULL);
1184:   PetscOptionsReal("-tao_bnk_max_radius", "(developer) upper bound on radius", "", bnk->max_radius, &bnk->max_radius,NULL);
1185:   PetscOptionsReal("-tao_bnk_epsilon", "(developer) tolerance used when computing actual and predicted reduction", "", bnk->epsilon, &bnk->epsilon,NULL);
1186:   PetscOptionsReal("-tao_bnk_as_tol", "(developer) initial tolerance used when estimating actively bounded variables", "", bnk->as_tol, &bnk->as_tol,NULL);
1187:   PetscOptionsReal("-tao_bnk_as_step", "(developer) step length used when estimating actively bounded variables", "", bnk->as_step, &bnk->as_step,NULL);
1188:   PetscOptionsInt("-tao_bnk_max_cg_its", "number of BNCG iterations to take for each Newton step", "", bnk->max_cg_its, &bnk->max_cg_its,NULL);
1189:   PetscOptionsTail();

1191:   TaoSetOptionsPrefix(bnk->bncg,((PetscObject)(tao))->prefix);
1192:   TaoAppendOptionsPrefix(bnk->bncg,"tao_bnk_cg_");
1193:   TaoSetFromOptions(bnk->bncg);

1195:   KSPSetOptionsPrefix(tao->ksp,((PetscObject)(tao))->prefix);
1196:   KSPAppendOptionsPrefix(tao->ksp,"tao_bnk_");
1197:   KSPSetFromOptions(tao->ksp);
1198:   return 0;
1199: }

1201: /*------------------------------------------------------------*/

1203: PetscErrorCode TaoView_BNK(Tao tao, PetscViewer viewer)
1204: {
1205:   TAO_BNK        *bnk = (TAO_BNK *)tao->data;
1206:   PetscInt       nrejects;
1207:   PetscBool      isascii;

1209:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
1210:   if (isascii) {
1211:     PetscViewerASCIIPushTab(viewer);
1212:     if (bnk->M) {
1213:       MatLMVMGetRejectCount(bnk->M,&nrejects);
1214:       PetscViewerASCIIPrintf(viewer, "Rejected BFGS updates: %D\n",nrejects);
1215:     }
1216:     PetscViewerASCIIPrintf(viewer, "CG steps: %D\n", bnk->tot_cg_its);
1217:     PetscViewerASCIIPrintf(viewer, "Newton steps: %D\n", bnk->newt);
1218:     if (bnk->M) {
1219:       PetscViewerASCIIPrintf(viewer, "BFGS steps: %D\n", bnk->bfgs);
1220:     }
1221:     PetscViewerASCIIPrintf(viewer, "Scaled gradient steps: %D\n", bnk->sgrad);
1222:     PetscViewerASCIIPrintf(viewer, "Gradient steps: %D\n", bnk->grad);
1223:     PetscViewerASCIIPrintf(viewer, "KSP termination reasons:\n");
1224:     PetscViewerASCIIPrintf(viewer, "  atol: %D\n", bnk->ksp_atol);
1225:     PetscViewerASCIIPrintf(viewer, "  rtol: %D\n", bnk->ksp_rtol);
1226:     PetscViewerASCIIPrintf(viewer, "  ctol: %D\n", bnk->ksp_ctol);
1227:     PetscViewerASCIIPrintf(viewer, "  negc: %D\n", bnk->ksp_negc);
1228:     PetscViewerASCIIPrintf(viewer, "  dtol: %D\n", bnk->ksp_dtol);
1229:     PetscViewerASCIIPrintf(viewer, "  iter: %D\n", bnk->ksp_iter);
1230:     PetscViewerASCIIPrintf(viewer, "  othr: %D\n", bnk->ksp_othr);
1231:     PetscViewerASCIIPopTab(viewer);
1232:   }
1233:   return 0;
1234: }

1236: /* ---------------------------------------------------------- */

1238: /*MC
1239:   TAOBNK - Shared base-type for Bounded Newton-Krylov type algorithms.
1240:   At each iteration, the BNK methods solve the symmetric
1241:   system of equations to obtain the step diretion dk:
1242:               Hk dk = -gk
1243:   for free variables only. The step can be globalized either through
1244:   trust-region methods, or a line search, or a heuristic mixture of both.

1246:     Options Database Keys:
1247: + -tao_bnk_max_cg_its - maximum number of bounded conjugate-gradient iterations taken in each Newton loop
1248: . -tao_bnk_init_type - trust radius initialization method ("constant", "direction", "interpolation")
1249: . -tao_bnk_update_type - trust radius update method ("step", "direction", "interpolation")
1250: . -tao_bnk_as_type - active-set estimation method ("none", "bertsekas")
1251: . -tao_bnk_as_tol - (developer) initial tolerance used in estimating bounded active variables (-as_type bertsekas)
1252: . -tao_bnk_as_step - (developer) trial step length used in estimating bounded active variables (-as_type bertsekas)
1253: . -tao_bnk_sval - (developer) Hessian perturbation starting value
1254: . -tao_bnk_imin - (developer) minimum initial Hessian perturbation
1255: . -tao_bnk_imax - (developer) maximum initial Hessian perturbation
1256: . -tao_bnk_pmin - (developer) minimum Hessian perturbation
1257: . -tao_bnk_pmax - (developer) aximum Hessian perturbation
1258: . -tao_bnk_pgfac - (developer) Hessian perturbation growth factor
1259: . -tao_bnk_psfac - (developer) Hessian perturbation shrink factor
1260: . -tao_bnk_imfac - (developer) initial merit factor for Hessian perturbation
1261: . -tao_bnk_pmgfac - (developer) merit growth factor for Hessian perturbation
1262: . -tao_bnk_pmsfac - (developer) merit shrink factor for Hessian perturbation
1263: . -tao_bnk_eta1 - (developer) threshold for rejecting step (-update_type reduction)
1264: . -tao_bnk_eta2 - (developer) threshold for accepting marginal step (-update_type reduction)
1265: . -tao_bnk_eta3 - (developer) threshold for accepting reasonable step (-update_type reduction)
1266: . -tao_bnk_eta4 - (developer) threshold for accepting good step (-update_type reduction)
1267: . -tao_bnk_alpha1 - (developer) radius reduction factor for rejected step (-update_type reduction)
1268: . -tao_bnk_alpha2 - (developer) radius reduction factor for marginally accepted bad step (-update_type reduction)
1269: . -tao_bnk_alpha3 - (developer) radius increase factor for reasonable accepted step (-update_type reduction)
1270: . -tao_bnk_alpha4 - (developer) radius increase factor for good accepted step (-update_type reduction)
1271: . -tao_bnk_alpha5 - (developer) radius increase factor for very good accepted step (-update_type reduction)
1272: . -tao_bnk_epsilon - (developer) tolerance for small pred/actual ratios that trigger automatic step acceptance (-update_type reduction)
1273: . -tao_bnk_mu1 - (developer) threshold for accepting very good step (-update_type interpolation)
1274: . -tao_bnk_mu2 - (developer) threshold for accepting good step (-update_type interpolation)
1275: . -tao_bnk_gamma1 - (developer) radius reduction factor for rejected very bad step (-update_type interpolation)
1276: . -tao_bnk_gamma2 - (developer) radius reduction factor for rejected bad step (-update_type interpolation)
1277: . -tao_bnk_gamma3 - (developer) radius increase factor for accepted good step (-update_type interpolation)
1278: . -tao_bnk_gamma4 - (developer) radius increase factor for accepted very good step (-update_type interpolation)
1279: . -tao_bnk_theta - (developer) trust region interpolation factor (-update_type interpolation)
1280: . -tao_bnk_nu1 - (developer) threshold for small line-search step length (-update_type step)
1281: . -tao_bnk_nu2 - (developer) threshold for reasonable line-search step length (-update_type step)
1282: . -tao_bnk_nu3 - (developer) threshold for large line-search step length (-update_type step)
1283: . -tao_bnk_nu4 - (developer) threshold for very large line-search step length (-update_type step)
1284: . -tao_bnk_omega1 - (developer) radius reduction factor for very small line-search step length (-update_type step)
1285: . -tao_bnk_omega2 - (developer) radius reduction factor for small line-search step length (-update_type step)
1286: . -tao_bnk_omega3 - (developer) radius factor for decent line-search step length (-update_type step)
1287: . -tao_bnk_omega4 - (developer) radius increase factor for large line-search step length (-update_type step)
1288: . -tao_bnk_omega5 - (developer) radius increase factor for very large line-search step length (-update_type step)
1289: . -tao_bnk_mu1_i -  (developer) threshold for accepting very good step (-init_type interpolation)
1290: . -tao_bnk_mu2_i -  (developer) threshold for accepting good step (-init_type interpolation)
1291: . -tao_bnk_gamma1_i - (developer) radius reduction factor for rejected very bad step (-init_type interpolation)
1292: . -tao_bnk_gamma2_i - (developer) radius reduction factor for rejected bad step (-init_type interpolation)
1293: . -tao_bnk_gamma3_i - (developer) radius increase factor for accepted good step (-init_type interpolation)
1294: . -tao_bnk_gamma4_i - (developer) radius increase factor for accepted very good step (-init_type interpolation)
1295: - -tao_bnk_theta_i - (developer) trust region interpolation factor (-init_type interpolation)

1297:   Level: beginner
1298: M*/

1300: PetscErrorCode TaoCreate_BNK(Tao tao)
1301: {
1302:   TAO_BNK        *bnk;
1303:   const char     *morethuente_type = TAOLINESEARCHMT;
1304:   PC             pc;

1306:   PetscNewLog(tao,&bnk);

1308:   tao->ops->setup = TaoSetUp_BNK;
1309:   tao->ops->view = TaoView_BNK;
1310:   tao->ops->setfromoptions = TaoSetFromOptions_BNK;
1311:   tao->ops->destroy = TaoDestroy_BNK;

1313:   /*  Override default settings (unless already changed) */
1314:   if (!tao->max_it_changed) tao->max_it = 50;
1315:   if (!tao->trust0_changed) tao->trust0 = 100.0;

1317:   tao->data = (void*)bnk;

1319:   /*  Hessian shifting parameters */
1320:   bnk->computehessian = TaoBNKComputeHessian;
1321:   bnk->computestep = TaoBNKComputeStep;

1323:   bnk->sval   = 0.0;
1324:   bnk->imin   = 1.0e-4;
1325:   bnk->imax   = 1.0e+2;
1326:   bnk->imfac  = 1.0e-1;

1328:   bnk->pmin   = 1.0e-12;
1329:   bnk->pmax   = 1.0e+2;
1330:   bnk->pgfac  = 1.0e+1;
1331:   bnk->psfac  = 4.0e-1;
1332:   bnk->pmgfac = 1.0e-1;
1333:   bnk->pmsfac = 1.0e-1;

1335:   /*  Default values for trust-region radius update based on steplength */
1336:   bnk->nu1 = 0.25;
1337:   bnk->nu2 = 0.50;
1338:   bnk->nu3 = 1.00;
1339:   bnk->nu4 = 1.25;

1341:   bnk->omega1 = 0.25;
1342:   bnk->omega2 = 0.50;
1343:   bnk->omega3 = 1.00;
1344:   bnk->omega4 = 2.00;
1345:   bnk->omega5 = 4.00;

1347:   /*  Default values for trust-region radius update based on reduction */
1348:   bnk->eta1 = 1.0e-4;
1349:   bnk->eta2 = 0.25;
1350:   bnk->eta3 = 0.50;
1351:   bnk->eta4 = 0.90;

1353:   bnk->alpha1 = 0.25;
1354:   bnk->alpha2 = 0.50;
1355:   bnk->alpha3 = 1.00;
1356:   bnk->alpha4 = 2.00;
1357:   bnk->alpha5 = 4.00;

1359:   /*  Default values for trust-region radius update based on interpolation */
1360:   bnk->mu1 = 0.10;
1361:   bnk->mu2 = 0.50;

1363:   bnk->gamma1 = 0.25;
1364:   bnk->gamma2 = 0.50;
1365:   bnk->gamma3 = 2.00;
1366:   bnk->gamma4 = 4.00;

1368:   bnk->theta = 0.05;

1370:   /*  Default values for trust region initialization based on interpolation */
1371:   bnk->mu1_i = 0.35;
1372:   bnk->mu2_i = 0.50;

1374:   bnk->gamma1_i = 0.0625;
1375:   bnk->gamma2_i = 0.5;
1376:   bnk->gamma3_i = 2.0;
1377:   bnk->gamma4_i = 5.0;

1379:   bnk->theta_i = 0.25;

1381:   /*  Remaining parameters */
1382:   bnk->max_cg_its = 0;
1383:   bnk->min_radius = 1.0e-10;
1384:   bnk->max_radius = 1.0e10;
1385:   bnk->epsilon = PetscPowReal(PETSC_MACHINE_EPSILON, 2.0/3.0);
1386:   bnk->as_tol = 1.0e-3;
1387:   bnk->as_step = 1.0e-3;
1388:   bnk->dmin = 1.0e-6;
1389:   bnk->dmax = 1.0e6;

1391:   bnk->M           = NULL;
1392:   bnk->bfgs_pre    = NULL;
1393:   bnk->init_type   = BNK_INIT_INTERPOLATION;
1394:   bnk->update_type = BNK_UPDATE_REDUCTION;
1395:   bnk->as_type     = BNK_AS_BERTSEKAS;

1397:   /* Create the embedded BNCG solver */
1398:   TaoCreate(PetscObjectComm((PetscObject)tao), &bnk->bncg);
1399:   PetscObjectIncrementTabLevel((PetscObject)bnk->bncg, (PetscObject)tao, 1);
1400:   TaoSetType(bnk->bncg, TAOBNCG);

1402:   /* Create the line search */
1403:   TaoLineSearchCreate(((PetscObject)tao)->comm,&tao->linesearch);
1404:   PetscObjectIncrementTabLevel((PetscObject)tao->linesearch, (PetscObject)tao, 1);
1405:   TaoLineSearchSetType(tao->linesearch,morethuente_type);
1406:   TaoLineSearchUseTaoRoutines(tao->linesearch,tao);

1408:   /*  Set linear solver to default for symmetric matrices */
1409:   KSPCreate(((PetscObject)tao)->comm,&tao->ksp);
1410:   PetscObjectIncrementTabLevel((PetscObject)tao->ksp, (PetscObject)tao, 1);
1411:   KSPSetType(tao->ksp,KSPSTCG);
1412:   KSPGetPC(tao->ksp, &pc);
1413:   PCSetType(pc, PCLMVM);
1414:   return 0;
1415: }