Actual source code: taosolver.c

  1: #define TAO_DLL

  3: #include <petsc/private/taoimpl.h>

  5: PetscBool TaoRegisterAllCalled = PETSC_FALSE;
  6: PetscFunctionList TaoList = NULL;

  8: PetscClassId TAO_CLASSID;

 10: PetscLogEvent TAO_Solve;
 11: PetscLogEvent TAO_ObjectiveEval;
 12: PetscLogEvent TAO_GradientEval;
 13: PetscLogEvent TAO_ObjGradEval;
 14: PetscLogEvent TAO_HessianEval;
 15: PetscLogEvent TAO_JacobianEval;
 16: PetscLogEvent TAO_ConstraintsEval;

 18: const char *TaoSubSetTypes[] = {"subvec","mask","matrixfree","TaoSubSetType","TAO_SUBSET_",NULL};

 20: struct _n_TaoMonitorDrawCtx {
 21:   PetscViewer viewer;
 22:   PetscInt    howoften;  /* when > 0 uses iteration % howoften, when negative only final solution plotted */
 23: };

 25: /*@
 26:   TaoCreate - Creates a TAO solver

 28:   Collective

 30:   Input Parameter:
 31: . comm - MPI communicator

 33:   Output Parameter:
 34: . newtao - the new Tao context

 36:   Available methods include:
 37: +    nls - Newton's method with line search for unconstrained minimization
 38: .    ntr - Newton's method with trust region for unconstrained minimization
 39: .    ntl - Newton's method with trust region, line search for unconstrained minimization
 40: .    lmvm - Limited memory variable metric method for unconstrained minimization
 41: .    cg - Nonlinear conjugate gradient method for unconstrained minimization
 42: .    nm - Nelder-Mead algorithm for derivate-free unconstrained minimization
 43: .    tron - Newton Trust Region method for bound constrained minimization
 44: .    gpcg - Newton Trust Region method for quadratic bound constrained minimization
 45: .    blmvm - Limited memory variable metric method for bound constrained minimization
 46: .    lcl - Linearly constrained Lagrangian method for pde-constrained minimization
 47: -    pounders - Model-based algorithm for nonlinear least squares

 49:    Options Database Keys:
 50: .   -tao_type - select which method TAO should use

 52:    Level: beginner

 54: .seealso: TaoSolve(), TaoDestroy()
 55: @*/
 56: PetscErrorCode TaoCreate(MPI_Comm comm, Tao *newtao)
 57: {
 59:   Tao            tao;

 63:   *newtao = NULL;

 65:   TaoInitializePackage();
 66:   TaoLineSearchInitializePackage();

 68:   PetscHeaderCreate(tao,TAO_CLASSID,"Tao","Optimization solver","Tao",comm,TaoDestroy,TaoView);
 69:   tao->ops->computeobjective = NULL;
 70:   tao->ops->computeobjectiveandgradient = NULL;
 71:   tao->ops->computegradient = NULL;
 72:   tao->ops->computehessian = NULL;
 73:   tao->ops->computeresidual = NULL;
 74:   tao->ops->computeresidualjacobian = NULL;
 75:   tao->ops->computeconstraints = NULL;
 76:   tao->ops->computejacobian = NULL;
 77:   tao->ops->computejacobianequality = NULL;
 78:   tao->ops->computejacobianinequality = NULL;
 79:   tao->ops->computeequalityconstraints = NULL;
 80:   tao->ops->computeinequalityconstraints = NULL;
 81:   tao->ops->convergencetest = TaoDefaultConvergenceTest;
 82:   tao->ops->convergencedestroy = NULL;
 83:   tao->ops->computedual = NULL;
 84:   tao->ops->setup = NULL;
 85:   tao->ops->solve = NULL;
 86:   tao->ops->view = NULL;
 87:   tao->ops->setfromoptions = NULL;
 88:   tao->ops->destroy = NULL;

 90:   tao->solution=NULL;
 91:   tao->gradient=NULL;
 92:   tao->ls_res = NULL;
 93:   tao->ls_jac = NULL;
 94:   tao->constraints=NULL;
 95:   tao->constraints_equality=NULL;
 96:   tao->constraints_inequality=NULL;
 97:   tao->res_weights_v=NULL;
 98:   tao->res_weights_w=NULL;
 99:   tao->stepdirection=NULL;
100:   tao->niter=0;
101:   tao->ntotalits=0;
102:   tao->XL = NULL;
103:   tao->XU = NULL;
104:   tao->IL = NULL;
105:   tao->IU = NULL;
106:   tao->DI = NULL;
107:   tao->DE = NULL;
108:   tao->gradient_norm = NULL;
109:   tao->gradient_norm_tmp = NULL;
110:   tao->hessian = NULL;
111:   tao->hessian_pre = NULL;
112:   tao->jacobian = NULL;
113:   tao->jacobian_pre = NULL;
114:   tao->jacobian_state = NULL;
115:   tao->jacobian_state_pre = NULL;
116:   tao->jacobian_state_inv = NULL;
117:   tao->jacobian_design = NULL;
118:   tao->jacobian_design_pre = NULL;
119:   tao->jacobian_equality = NULL;
120:   tao->jacobian_equality_pre = NULL;
121:   tao->jacobian_inequality = NULL;
122:   tao->jacobian_inequality_pre = NULL;
123:   tao->state_is = NULL;
124:   tao->design_is = NULL;

126:   tao->max_it     = 10000;
127:   tao->max_funcs   = 10000;
128: #if defined(PETSC_USE_REAL_SINGLE)
129:   tao->gatol       = 1e-5;
130:   tao->grtol       = 1e-5;
131:   tao->crtol       = 1e-5;
132:   tao->catol       = 1e-5;
133: #else
134:   tao->gatol       = 1e-8;
135:   tao->grtol       = 1e-8;
136:   tao->crtol       = 1e-8;
137:   tao->catol       = 1e-8;
138: #endif
139:   tao->gttol       = 0.0;
140:   tao->steptol     = 0.0;
141:   tao->trust0      = PETSC_INFINITY;
142:   tao->fmin        = PETSC_NINFINITY;
143:   tao->hist_malloc = PETSC_FALSE;
144:   tao->hist_reset = PETSC_TRUE;
145:   tao->hist_max = 0;
146:   tao->hist_len = 0;
147:   tao->hist_obj = NULL;
148:   tao->hist_resid = NULL;
149:   tao->hist_cnorm = NULL;
150:   tao->hist_lits = NULL;

152:   tao->numbermonitors=0;
153:   tao->viewsolution=PETSC_FALSE;
154:   tao->viewhessian=PETSC_FALSE;
155:   tao->viewgradient=PETSC_FALSE;
156:   tao->viewjacobian=PETSC_FALSE;
157:   tao->viewconstraints = PETSC_FALSE;

159:   tao->bounded = PETSC_FALSE;
160:   tao->constrained = PETSC_FALSE;
161:   tao->eq_constrained = PETSC_FALSE;
162:   tao->ineq_constrained = PETSC_FALSE;
163:   tao->ineq_doublesided = PETSC_FALSE;

165:   tao->recycle = PETSC_FALSE;

167:   tao->header_printed = PETSC_FALSE;

169:   /* These flags prevents algorithms from overriding user options */
170:   tao->max_it_changed   =PETSC_FALSE;
171:   tao->max_funcs_changed=PETSC_FALSE;
172:   tao->gatol_changed    =PETSC_FALSE;
173:   tao->grtol_changed    =PETSC_FALSE;
174:   tao->gttol_changed    =PETSC_FALSE;
175:   tao->steptol_changed  =PETSC_FALSE;
176:   tao->trust0_changed   =PETSC_FALSE;
177:   tao->fmin_changed     =PETSC_FALSE;
178:   tao->catol_changed    =PETSC_FALSE;
179:   tao->crtol_changed    =PETSC_FALSE;
180:   TaoResetStatistics(tao);
181:   *newtao = tao;
182:   return(0);
183: }

185: /*@
186:   TaoSolve - Solves an optimization problem min F(x) s.t. l <= x <= u

188:   Collective on Tao

190:   Input Parameters:
191: . tao - the Tao context

193:   Notes:
194:   The user must set up the Tao with calls to TaoSetInitialVector(),
195:   TaoSetObjectiveRoutine(),
196:   TaoSetGradientRoutine(), and (if using 2nd order method) TaoSetHessianRoutine().

198:   You should call TaoGetConvergedReason() or run with -tao_converged_reason to determine if the optimization algorithm actually succeeded or
199:   why it failed.

201:   Level: beginner

203: .seealso: TaoCreate(), TaoSetObjectiveRoutine(), TaoSetGradientRoutine(), TaoSetHessianRoutine(), TaoGetConvergedReason()
204:  @*/
205: PetscErrorCode TaoSolve(Tao tao)
206: {
207:   PetscErrorCode   ierr;
208:   static PetscBool set = PETSC_FALSE;

212:   PetscCitationsRegister("@TechReport{tao-user-ref,\n"
213:                                 "title   = {Toolkit for Advanced Optimization (TAO) Users Manual},\n"
214:                                 "author  = {Todd Munson and Jason Sarich and Stefan Wild and Steve Benson and Lois Curfman McInnes},\n"
215:                                 "Institution = {Argonne National Laboratory},\n"
216:                                 "Year   = 2014,\n"
217:                                 "Number = {ANL/MCS-TM-322 - Revision 3.5},\n"
218:                                 "url    = {https://www.mcs.anl.gov/research/projects/tao/}\n}\n",&set);
219:   tao->header_printed = PETSC_FALSE;
220:   TaoSetUp(tao);
221:   TaoResetStatistics(tao);
222:   if (tao->linesearch) {
223:     TaoLineSearchReset(tao->linesearch);
224:   }

226:   PetscLogEventBegin(TAO_Solve,tao,0,0,0);
227:   if (tao->ops->solve){ (*tao->ops->solve)(tao); }
228:   PetscLogEventEnd(TAO_Solve,tao,0,0,0);

230:   VecViewFromOptions(tao->solution,(PetscObject)tao,"-tao_view_solution");

232:   tao->ntotalits += tao->niter;
233:   TaoViewFromOptions(tao,NULL,"-tao_view");

235:   if (tao->printreason) {
236:     if (tao->reason > 0) {
237:       PetscPrintf(((PetscObject)tao)->comm,"TAO solve converged due to %s iterations %D\n",TaoConvergedReasons[tao->reason],tao->niter);
238:     } else {
239:       PetscPrintf(((PetscObject)tao)->comm,"TAO solve did not converge due to %s iteration %D\n",TaoConvergedReasons[tao->reason],tao->niter);
240:     }
241:   }
242:   return(0);
243: }

245: /*@
246:   TaoSetUp - Sets up the internal data structures for the later use
247:   of a Tao solver

249:   Collective on tao

251:   Input Parameters:
252: . tao - the TAO context

254:   Notes:
255:   The user will not need to explicitly call TaoSetUp(), as it will
256:   automatically be called in TaoSolve().  However, if the user
257:   desires to call it explicitly, it should come after TaoCreate()
258:   and any TaoSetSomething() routines, but before TaoSolve().

260:   Level: advanced

262: .seealso: TaoCreate(), TaoSolve()
263: @*/
264: PetscErrorCode TaoSetUp(Tao tao)
265: {

270:   if (tao->setupcalled) return(0);

272:   if (!tao->solution) SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"Must call TaoSetInitialVector");
273:   if (tao->ops->setup) {
274:     (*tao->ops->setup)(tao);
275:   }
276:   tao->setupcalled = PETSC_TRUE;
277:   return(0);
278: }

280: /*@C
281:   TaoDestroy - Destroys the TAO context that was created with
282:   TaoCreate()

284:   Collective on Tao

286:   Input Parameter:
287: . tao - the Tao context

289:   Level: beginner

291: .seealso: TaoCreate(), TaoSolve()
292: @*/
293: PetscErrorCode TaoDestroy(Tao *tao)
294: {

298:   if (!*tao) return(0);
300:   if (--((PetscObject)*tao)->refct > 0) {*tao = NULL;return(0);}

302:   if ((*tao)->ops->destroy) {
303:     (*((*tao))->ops->destroy)(*tao);
304:   }
305:   KSPDestroy(&(*tao)->ksp);
306:   TaoLineSearchDestroy(&(*tao)->linesearch);

308:   if ((*tao)->ops->convergencedestroy) {
309:     (*(*tao)->ops->convergencedestroy)((*tao)->cnvP);
310:     if ((*tao)->jacobian_state_inv) {
311:       MatDestroy(&(*tao)->jacobian_state_inv);
312:     }
313:   }
314:   VecDestroy(&(*tao)->solution);
315:   VecDestroy(&(*tao)->gradient);
316:   VecDestroy(&(*tao)->ls_res);

318:   if ((*tao)->gradient_norm) {
319:     PetscObjectDereference((PetscObject)(*tao)->gradient_norm);
320:     VecDestroy(&(*tao)->gradient_norm_tmp);
321:   }

323:   VecDestroy(&(*tao)->XL);
324:   VecDestroy(&(*tao)->XU);
325:   VecDestroy(&(*tao)->IL);
326:   VecDestroy(&(*tao)->IU);
327:   VecDestroy(&(*tao)->DE);
328:   VecDestroy(&(*tao)->DI);
329:   VecDestroy(&(*tao)->constraints_equality);
330:   VecDestroy(&(*tao)->constraints_inequality);
331:   VecDestroy(&(*tao)->stepdirection);
332:   MatDestroy(&(*tao)->hessian_pre);
333:   MatDestroy(&(*tao)->hessian);
334:   MatDestroy(&(*tao)->ls_jac);
335:   MatDestroy(&(*tao)->ls_jac_pre);
336:   MatDestroy(&(*tao)->jacobian_pre);
337:   MatDestroy(&(*tao)->jacobian);
338:   MatDestroy(&(*tao)->jacobian_state_pre);
339:   MatDestroy(&(*tao)->jacobian_state);
340:   MatDestroy(&(*tao)->jacobian_state_inv);
341:   MatDestroy(&(*tao)->jacobian_design);
342:   MatDestroy(&(*tao)->jacobian_equality);
343:   MatDestroy(&(*tao)->jacobian_equality_pre);
344:   MatDestroy(&(*tao)->jacobian_inequality);
345:   MatDestroy(&(*tao)->jacobian_inequality_pre);
346:   ISDestroy(&(*tao)->state_is);
347:   ISDestroy(&(*tao)->design_is);
348:   VecDestroy(&(*tao)->res_weights_v);
349:   TaoCancelMonitors(*tao);
350:   if ((*tao)->hist_malloc) {
351:     PetscFree4((*tao)->hist_obj,(*tao)->hist_resid,(*tao)->hist_cnorm,(*tao)->hist_lits);
352:   }
353:   if ((*tao)->res_weights_n) {
354:     PetscFree((*tao)->res_weights_rows);
355:     PetscFree((*tao)->res_weights_cols);
356:     PetscFree((*tao)->res_weights_w);
357:   }
358:   PetscHeaderDestroy(tao);
359:   return(0);
360: }

362: /*@
363:   TaoSetFromOptions - Sets various Tao parameters from user
364:   options.

366:   Collective on Tao

368:   Input Paremeter:
369: . tao - the Tao solver context

371:   options Database Keys:
372: + -tao_type <type> - The algorithm that TAO uses (lmvm, nls, etc.)
373: . -tao_gatol <gatol> - absolute error tolerance for ||gradient||
374: . -tao_grtol <grtol> - relative error tolerance for ||gradient||
375: . -tao_gttol <gttol> - reduction of ||gradient|| relative to initial gradient
376: . -tao_max_it <max> - sets maximum number of iterations
377: . -tao_max_funcs <max> - sets maximum number of function evaluations
378: . -tao_fmin <fmin> - stop if function value reaches fmin
379: . -tao_steptol <tol> - stop if trust region radius less than <tol>
380: . -tao_trust0 <t> - initial trust region radius
381: . -tao_monitor - prints function value and residual at each iteration
382: . -tao_smonitor - same as tao_monitor, but truncates very small values
383: . -tao_cmonitor - prints function value, residual, and constraint norm at each iteration
384: . -tao_view_solution - prints solution vector at each iteration
385: . -tao_view_ls_residual - prints least-squares residual vector at each iteration
386: . -tao_view_step - prints step direction vector at each iteration
387: . -tao_view_gradient - prints gradient vector at each iteration
388: . -tao_draw_solution - graphically view solution vector at each iteration
389: . -tao_draw_step - graphically view step vector at each iteration
390: . -tao_draw_gradient - graphically view gradient at each iteration
391: . -tao_fd_gradient - use gradient computed with finite differences
392: . -tao_fd_hessian - use hessian computed with finite differences
393: . -tao_mf_hessian - use matrix-free hessian computed with finite differences
394: . -tao_cancelmonitors - cancels all monitors (except those set with command line)
395: . -tao_view - prints information about the Tao after solving
396: - -tao_converged_reason - prints the reason TAO stopped iterating

398:   Notes:
399:   To see all options, run your program with the -help option or consult the
400:   user's manual. Should be called after TaoCreate() but before TaoSolve()

402:   Level: beginner
403: @*/
404: PetscErrorCode TaoSetFromOptions(Tao tao)
405: {
407:   TaoType        default_type = TAOLMVM;
408:   char           type[256], monfilename[PETSC_MAX_PATH_LEN];
409:   PetscViewer    monviewer;
410:   PetscBool      flg;
411:   MPI_Comm       comm;

415:   PetscObjectGetComm((PetscObject)tao,&comm);

417:   /* So no warnings are given about unused options */
418:   PetscOptionsHasName(((PetscObject)tao)->options,((PetscObject)tao)->prefix,"-tao_ls_type",&flg);

420:   PetscObjectOptionsBegin((PetscObject)tao);
421:   {
422:     TaoRegisterAll();
423:     if (((PetscObject)tao)->type_name) {
424:       default_type = ((PetscObject)tao)->type_name;
425:     }
426:     /* Check for type from options */
427:     PetscOptionsFList("-tao_type","Tao Solver type","TaoSetType",TaoList,default_type,type,256,&flg);
428:     if (flg) {
429:       TaoSetType(tao,type);
430:     } else if (!((PetscObject)tao)->type_name) {
431:       TaoSetType(tao,default_type);
432:     }

434:     PetscOptionsReal("-tao_catol","Stop if constraints violations within","TaoSetConstraintTolerances",tao->catol,&tao->catol,&flg);
435:     if (flg) tao->catol_changed=PETSC_TRUE;
436:     PetscOptionsReal("-tao_crtol","Stop if relative contraint violations within","TaoSetConstraintTolerances",tao->crtol,&tao->crtol,&flg);
437:     if (flg) tao->crtol_changed=PETSC_TRUE;
438:     PetscOptionsReal("-tao_gatol","Stop if norm of gradient less than","TaoSetTolerances",tao->gatol,&tao->gatol,&flg);
439:     if (flg) tao->gatol_changed=PETSC_TRUE;
440:     PetscOptionsReal("-tao_grtol","Stop if norm of gradient divided by the function value is less than","TaoSetTolerances",tao->grtol,&tao->grtol,&flg);
441:     if (flg) tao->grtol_changed=PETSC_TRUE;
442:     PetscOptionsReal("-tao_gttol","Stop if the norm of the gradient is less than the norm of the initial gradient times tol","TaoSetTolerances",tao->gttol,&tao->gttol,&flg);
443:     if (flg) tao->gttol_changed=PETSC_TRUE;
444:     PetscOptionsInt("-tao_max_it","Stop if iteration number exceeds","TaoSetMaximumIterations",tao->max_it,&tao->max_it,&flg);
445:     if (flg) tao->max_it_changed=PETSC_TRUE;
446:     PetscOptionsInt("-tao_max_funcs","Stop if number of function evaluations exceeds","TaoSetMaximumFunctionEvaluations",tao->max_funcs,&tao->max_funcs,&flg);
447:     if (flg) tao->max_funcs_changed=PETSC_TRUE;
448:     PetscOptionsReal("-tao_fmin","Stop if function less than","TaoSetFunctionLowerBound",tao->fmin,&tao->fmin,&flg);
449:     if (flg) tao->fmin_changed=PETSC_TRUE;
450:     PetscOptionsReal("-tao_steptol","Stop if step size or trust region radius less than","",tao->steptol,&tao->steptol,&flg);
451:     if (flg) tao->steptol_changed=PETSC_TRUE;
452:     PetscOptionsReal("-tao_trust0","Initial trust region radius","TaoSetTrustRegionRadius",tao->trust0,&tao->trust0,&flg);
453:     if (flg) tao->trust0_changed=PETSC_TRUE;
454:     PetscOptionsString("-tao_view_solution","view solution vector after each evaluation","TaoSetMonitor","stdout",monfilename,sizeof(monfilename),&flg);
455:     if (flg) {
456:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
457:       TaoSetMonitor(tao,TaoSolutionMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
458:     }

460:     PetscOptionsBool("-tao_converged_reason","Print reason for TAO converged","TaoSolve",tao->printreason,&tao->printreason,NULL);
461:     PetscOptionsString("-tao_view_gradient","view gradient vector after each evaluation","TaoSetMonitor","stdout",monfilename,sizeof(monfilename),&flg);
462:     if (flg) {
463:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
464:       TaoSetMonitor(tao,TaoGradientMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
465:     }

467:     PetscOptionsString("-tao_view_stepdirection","view step direction vector after each iteration","TaoSetMonitor","stdout",monfilename,sizeof(monfilename),&flg);
468:     if (flg) {
469:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
470:       TaoSetMonitor(tao,TaoStepDirectionMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
471:     }

473:     PetscOptionsString("-tao_view_residual","view least-squares residual vector after each evaluation","TaoSetMonitor","stdout",monfilename,sizeof(monfilename),&flg);
474:     if (flg) {
475:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
476:       TaoSetMonitor(tao,TaoResidualMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
477:     }

479:     PetscOptionsString("-tao_monitor","Use the default convergence monitor","TaoSetMonitor","stdout",monfilename,sizeof(monfilename),&flg);
480:     if (flg) {
481:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
482:       TaoSetMonitor(tao,TaoMonitorDefault,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
483:     }

485:     PetscOptionsString("-tao_gmonitor","Use the convergence monitor with extra globalization info","TaoSetMonitor","stdout",monfilename,sizeof(monfilename),&flg);
486:     if (flg) {
487:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
488:       TaoSetMonitor(tao,TaoDefaultGMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
489:     }

491:     PetscOptionsString("-tao_smonitor","Use the short convergence monitor","TaoSetMonitor","stdout",monfilename,sizeof(monfilename),&flg);
492:     if (flg) {
493:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
494:       TaoSetMonitor(tao,TaoDefaultSMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
495:     }

497:     PetscOptionsString("-tao_cmonitor","Use the default convergence monitor with constraint norm","TaoSetMonitor","stdout",monfilename,sizeof(monfilename),&flg);
498:     if (flg) {
499:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
500:       TaoSetMonitor(tao,TaoDefaultCMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
501:     }


504:     flg = PETSC_FALSE;
505:     PetscOptionsBool("-tao_cancelmonitors","cancel all monitors and call any registered destroy routines","TaoCancelMonitors",flg,&flg,NULL);
506:     if (flg) {TaoCancelMonitors(tao);}

508:     flg = PETSC_FALSE;
509:     PetscOptionsBool("-tao_draw_solution","Plot solution vector at each iteration","TaoSetMonitor",flg,&flg,NULL);
510:     if (flg) {
511:       TaoMonitorDrawCtx drawctx;
512:       PetscInt          howoften = 1;
513:       TaoMonitorDrawCtxCreate(PetscObjectComm((PetscObject)tao),NULL,NULL,PETSC_DECIDE,PETSC_DECIDE,300,300,howoften,&drawctx);
514:       TaoSetMonitor(tao,TaoDrawSolutionMonitor,drawctx,(PetscErrorCode (*)(void**))TaoMonitorDrawCtxDestroy);
515:     }

517:     flg = PETSC_FALSE;
518:     PetscOptionsBool("-tao_draw_step","plots step direction at each iteration","TaoSetMonitor",flg,&flg,NULL);
519:     if (flg) {
520:       TaoSetMonitor(tao,TaoDrawStepMonitor,NULL,NULL);
521:     }

523:     flg = PETSC_FALSE;
524:     PetscOptionsBool("-tao_draw_gradient","plots gradient at each iteration","TaoSetMonitor",flg,&flg,NULL);
525:     if (flg) {
526:       TaoMonitorDrawCtx drawctx;
527:       PetscInt          howoften = 1;
528:       TaoMonitorDrawCtxCreate(PetscObjectComm((PetscObject)tao),NULL,NULL,PETSC_DECIDE,PETSC_DECIDE,300,300,howoften,&drawctx);
529:       TaoSetMonitor(tao,TaoDrawGradientMonitor,drawctx,(PetscErrorCode (*)(void**))TaoMonitorDrawCtxDestroy);
530:     }
531:     flg = PETSC_FALSE;
532:     PetscOptionsBool("-tao_fd_gradient","compute gradient using finite differences","TaoDefaultComputeGradient",flg,&flg,NULL);
533:     if (flg) {
534:       TaoSetGradientRoutine(tao,TaoDefaultComputeGradient,NULL);
535:     }
536:     flg = PETSC_FALSE;
537:     PetscOptionsBool("-tao_fd_hessian","compute hessian using finite differences","TaoDefaultComputeHessian",flg,&flg,NULL);
538:     if (flg) {
539:       Mat H;

541:       MatCreate(PetscObjectComm((PetscObject)tao),&H);
542:       MatSetType(H,MATAIJ);
543:       TaoSetHessianRoutine(tao,H,H,TaoDefaultComputeHessian,NULL);
544:       MatDestroy(&H);
545:     }
546:     flg = PETSC_FALSE;
547:     PetscOptionsBool("-tao_mf_hessian","compute matrix-free hessian using finite differences","TaoDefaultComputeHessianMFFD",flg,&flg,NULL);
548:     if (flg) {
549:       Mat H;

551:       MatCreate(PetscObjectComm((PetscObject)tao),&H);
552:       TaoSetHessianRoutine(tao,H,H,TaoDefaultComputeHessianMFFD,NULL);
553:       MatDestroy(&H);
554:     }
555:     flg = PETSC_FALSE;
556:     PetscOptionsBool("-tao_recycle_history","enable recycling/re-using information from the previous TaoSolve() call for some algorithms","TaoSetRecycleHistory",flg,&flg,NULL);
557:     if (flg) {
558:       TaoSetRecycleHistory(tao, PETSC_TRUE);
559:     }
560:     PetscOptionsEnum("-tao_subset_type","subset type","",TaoSubSetTypes,(PetscEnum)tao->subset_type,(PetscEnum*)&tao->subset_type,NULL);

562:     if (tao->ops->setfromoptions) {
563:       (*tao->ops->setfromoptions)(PetscOptionsObject,tao);
564:     }
565:   }
566:   PetscOptionsEnd();
567:   return(0);
568: }

570: /*@C
571:    TaoViewFromOptions - View from Options

573:    Collective on Tao

575:    Input Parameters:
576: +  A - the  Tao context
577: .  obj - Optional object
578: -  name - command line option

580:    Level: intermediate
581: .seealso:  Tao, TaoView, PetscObjectViewFromOptions(), TaoCreate()
582: @*/
583: PetscErrorCode  TaoViewFromOptions(Tao A,PetscObject obj,const char name[])
584: {

589:   PetscObjectViewFromOptions((PetscObject)A,obj,name);
590:   return(0);
591: }

593: /*@C
594:   TaoView - Prints information about the Tao

596:   Collective on Tao

598:   InputParameters:
599: + tao - the Tao context
600: - viewer - visualization context

602:   Options Database Key:
603: . -tao_view - Calls TaoView() at the end of TaoSolve()

605:   Notes:
606:   The available visualization contexts include
607: +     PETSC_VIEWER_STDOUT_SELF - standard output (default)
608: -     PETSC_VIEWER_STDOUT_WORLD - synchronized standard
609:          output where only the first processor opens
610:          the file.  All other processors send their
611:          data to the first processor to print.

613:   Level: beginner

615: .seealso: PetscViewerASCIIOpen()
616: @*/
617: PetscErrorCode TaoView(Tao tao, PetscViewer viewer)
618: {
619:   PetscErrorCode      ierr;
620:   PetscBool           isascii,isstring;
621:   TaoType             type;

625:   if (!viewer) {
626:     PetscViewerASCIIGetStdout(((PetscObject)tao)->comm,&viewer);
627:   }

631:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
632:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
633:   if (isascii) {
634:     PetscObjectPrintClassNamePrefixType((PetscObject)tao,viewer);

636:     if (tao->ops->view) {
637:       PetscViewerASCIIPushTab(viewer);
638:       (*tao->ops->view)(tao,viewer);
639:       PetscViewerASCIIPopTab(viewer);
640:     }
641:     if (tao->linesearch) {
642:       PetscViewerASCIIPushTab(viewer);
643:       TaoLineSearchView(tao->linesearch,viewer);
644:       PetscViewerASCIIPopTab(viewer);
645:     }
646:     if (tao->ksp) {
647:       PetscViewerASCIIPushTab(viewer);
648:       KSPView(tao->ksp,viewer);
649:       PetscViewerASCIIPrintf(viewer,"total KSP iterations: %D\n",tao->ksp_tot_its);
650:       PetscViewerASCIIPopTab(viewer);
651:     }

653:     PetscViewerASCIIPushTab(viewer);

655:     if (tao->XL || tao->XU) {
656:       PetscViewerASCIIPrintf(viewer,"Active Set subset type: %s\n",TaoSubSetTypes[tao->subset_type]);
657:     }

659:     PetscViewerASCIIPrintf(viewer,"convergence tolerances: gatol=%g,",(double)tao->gatol);
660:     PetscViewerASCIIPrintf(viewer," steptol=%g,",(double)tao->steptol);
661:     PetscViewerASCIIPrintf(viewer," gttol=%g\n",(double)tao->gttol);
662:     PetscViewerASCIIPrintf(viewer,"Residual in Function/Gradient:=%g\n",(double)tao->residual);

664:     if (tao->constrained){
665:       ierr=PetscViewerASCIIPrintf(viewer,"convergence tolerances:");
666:       ierr=PetscViewerASCIIPrintf(viewer," catol=%g,",(double)tao->catol);
667:       ierr=PetscViewerASCIIPrintf(viewer," crtol=%g\n",(double)tao->crtol);
668:       PetscViewerASCIIPrintf(viewer,"Residual in Constraints:=%g\n",(double)tao->cnorm);
669:     }

671:     if (tao->trust < tao->steptol){
672:       ierr=PetscViewerASCIIPrintf(viewer,"convergence tolerances: steptol=%g\n",(double)tao->steptol);
673:       ierr=PetscViewerASCIIPrintf(viewer,"Final trust region radius:=%g\n",(double)tao->trust);
674:     }

676:     if (tao->fmin>-1.e25){
677:       ierr=PetscViewerASCIIPrintf(viewer,"convergence tolerances: function minimum=%g\n",(double)tao->fmin);
678:     }
679:     PetscViewerASCIIPrintf(viewer,"Objective value=%g\n",(double)tao->fc);

681:     PetscViewerASCIIPrintf(viewer,"total number of iterations=%D,          ",tao->niter);
682:     PetscViewerASCIIPrintf(viewer,"              (max: %D)\n",tao->max_it);

684:     if (tao->nfuncs>0){
685:       PetscViewerASCIIPrintf(viewer,"total number of function evaluations=%D,",tao->nfuncs);
686:       PetscViewerASCIIPrintf(viewer,"                max: %D\n",tao->max_funcs);
687:     }
688:     if (tao->ngrads>0){
689:       PetscViewerASCIIPrintf(viewer,"total number of gradient evaluations=%D,",tao->ngrads);
690:       PetscViewerASCIIPrintf(viewer,"                max: %D\n",tao->max_funcs);
691:     }
692:     if (tao->nfuncgrads>0){
693:       PetscViewerASCIIPrintf(viewer,"total number of function/gradient evaluations=%D,",tao->nfuncgrads);
694:       PetscViewerASCIIPrintf(viewer,"    (max: %D)\n",tao->max_funcs);
695:     }
696:     if (tao->nhess>0){
697:       PetscViewerASCIIPrintf(viewer,"total number of Hessian evaluations=%D\n",tao->nhess);
698:     }
699:     /*  if (tao->linear_its>0){
700:      PetscViewerASCIIPrintf(viewer,"  total Krylov method iterations=%D\n",tao->linear_its);
701:      }*/
702:     if (tao->nconstraints>0){
703:       PetscViewerASCIIPrintf(viewer,"total number of constraint function evaluations=%D\n",tao->nconstraints);
704:     }
705:     if (tao->njac>0){
706:       PetscViewerASCIIPrintf(viewer,"total number of Jacobian evaluations=%D\n",tao->njac);
707:     }

709:     if (tao->reason>0){
710:       PetscViewerASCIIPrintf(viewer,    "Solution converged: ");
711:       switch (tao->reason) {
712:       case TAO_CONVERGED_GATOL:
713:         PetscViewerASCIIPrintf(viewer," ||g(X)|| <= gatol\n");
714:         break;
715:       case TAO_CONVERGED_GRTOL:
716:         PetscViewerASCIIPrintf(viewer," ||g(X)||/|f(X)| <= grtol\n");
717:         break;
718:       case TAO_CONVERGED_GTTOL:
719:         PetscViewerASCIIPrintf(viewer," ||g(X)||/||g(X0)|| <= gttol\n");
720:         break;
721:       case TAO_CONVERGED_STEPTOL:
722:         PetscViewerASCIIPrintf(viewer," Steptol -- step size small\n");
723:         break;
724:       case TAO_CONVERGED_MINF:
725:         PetscViewerASCIIPrintf(viewer," Minf --  f < fmin\n");
726:         break;
727:       case TAO_CONVERGED_USER:
728:         PetscViewerASCIIPrintf(viewer," User Terminated\n");
729:         break;
730:       default:
731:         PetscViewerASCIIPrintf(viewer,"\n");
732:         break;
733:       }

735:     } else {
736:       PetscViewerASCIIPrintf(viewer,"Solver terminated: %d",tao->reason);
737:       switch (tao->reason) {
738:       case TAO_DIVERGED_MAXITS:
739:         PetscViewerASCIIPrintf(viewer," Maximum Iterations\n");
740:         break;
741:       case TAO_DIVERGED_NAN:
742:         PetscViewerASCIIPrintf(viewer," NAN or Inf encountered\n");
743:         break;
744:       case TAO_DIVERGED_MAXFCN:
745:         PetscViewerASCIIPrintf(viewer," Maximum Function Evaluations\n");
746:         break;
747:       case TAO_DIVERGED_LS_FAILURE:
748:         PetscViewerASCIIPrintf(viewer," Line Search Failure\n");
749:         break;
750:       case TAO_DIVERGED_TR_REDUCTION:
751:         PetscViewerASCIIPrintf(viewer," Trust Region too small\n");
752:         break;
753:       case TAO_DIVERGED_USER:
754:         PetscViewerASCIIPrintf(viewer," User Terminated\n");
755:         break;
756:       default:
757:         PetscViewerASCIIPrintf(viewer,"\n");
758:         break;
759:       }
760:     }
761:     PetscViewerASCIIPopTab(viewer);
762:   } else if (isstring) {
763:     TaoGetType(tao,&type);
764:     PetscViewerStringSPrintf(viewer," %-3.3s",type);
765:   }
766:   return(0);
767: }

769: /*@
770:   TaoSetRecycleHistory - Sets the boolean flag to enable/disable re-using
771:   iterate information from the previous TaoSolve(). This feature is disabled by
772:   default.

774:   For conjugate gradient methods (BNCG), this re-uses the latest search direction
775:   from the previous TaoSolve() call when computing the first search direction in a
776:   new solution. By default, CG methods set the first search direction to the
777:   negative gradient.

779:   For quasi-Newton family of methods (BQNLS, BQNKLS, BQNKTR, BQNKTL), this re-uses
780:   the accumulated quasi-Newton Hessian approximation from the previous TaoSolve()
781:   call. By default, QN family of methods reset the initial Hessian approximation to
782:   the identity matrix.

784:   For any other algorithm, this setting has no effect.

786:   Logically collective on Tao

788:   Input Parameters:
789: + tao - the Tao context
790: - recycle - boolean flag

792:   Options Database Keys:
793: . -tao_recycle_history

795:   Level: intermediate

797: .seealso: TaoSetRecycleHistory(), TAOBNCG, TAOBQNLS, TAOBQNKLS, TAOBQNKTR, TAOBQNKTL

799: @*/
800: PetscErrorCode TaoSetRecycleHistory(Tao tao, PetscBool recycle)
801: {
804:   tao->recycle = recycle;
805:   return(0);
806: }

808: /*@
809:   TaoGetRecycleHistory - Retrieve the boolean flag for re-using iterate information
810:   from the previous TaoSolve(). This feature is disabled by default.

812:   Logically collective on Tao

814:   Input Parameters:
815: , tao - the Tao context

817:   Output Parameters:
818: , recycle - boolean flag

820:   Options Database Keys:
821: . -tao_recycle_history

823:   Level: intermediate

825: .seealso: TaoGetRecycleHistory(), TAOBNCG, TAOBQNLS, TAOBQNKLS, TAOBQNKTR, TAOBQNKTL

827: @*/
828: PetscErrorCode TaoGetRecycleHistory(Tao tao, PetscBool *recycle)
829: {
832:   *recycle = tao->recycle;
833:   return(0);
834: }

836: /*@
837:   TaoSetTolerances - Sets parameters used in TAO convergence tests

839:   Logically collective on Tao

841:   Input Parameters:
842: + tao - the Tao context
843: . gatol - stop if norm of gradient is less than this
844: . grtol - stop if relative norm of gradient is less than this
845: - gttol - stop if norm of gradient is reduced by this factor

847:   Options Database Keys:
848: + -tao_gatol <gatol> - Sets gatol
849: . -tao_grtol <grtol> - Sets grtol
850: - -tao_gttol <gttol> - Sets gttol

852:   Stopping Criteria:
853: $ ||g(X)||                            <= gatol
854: $ ||g(X)|| / |f(X)|                   <= grtol
855: $ ||g(X)|| / ||g(X0)||                <= gttol

857:   Notes:
858:   Use PETSC_DEFAULT to leave one or more tolerances unchanged.

860:   Level: beginner

862: .seealso: TaoGetTolerances()

864: @*/
865: PetscErrorCode TaoSetTolerances(Tao tao, PetscReal gatol, PetscReal grtol, PetscReal gttol)
866: {


872:   if (gatol != PETSC_DEFAULT) {
873:     if (gatol<0) {
874:       PetscInfo(tao,"Tried to set negative gatol -- ignored.\n");
875:     } else {
876:       tao->gatol = PetscMax(0,gatol);
877:       tao->gatol_changed=PETSC_TRUE;
878:     }
879:   }

881:   if (grtol != PETSC_DEFAULT) {
882:     if (grtol<0) {
883:       PetscInfo(tao,"Tried to set negative grtol -- ignored.\n");
884:     } else {
885:       tao->grtol = PetscMax(0,grtol);
886:       tao->grtol_changed=PETSC_TRUE;
887:     }
888:   }

890:   if (gttol != PETSC_DEFAULT) {
891:     if (gttol<0) {
892:       PetscInfo(tao,"Tried to set negative gttol -- ignored.\n");
893:     } else {
894:       tao->gttol = PetscMax(0,gttol);
895:       tao->gttol_changed=PETSC_TRUE;
896:     }
897:   }
898:   return(0);
899: }

901: /*@
902:   TaoSetConstraintTolerances - Sets constraint tolerance parameters used in TAO  convergence tests

904:   Logically collective on Tao

906:   Input Parameters:
907: + tao - the Tao context
908: . catol - absolute constraint tolerance, constraint norm must be less than catol for used for gatol convergence criteria
909: - crtol - relative contraint tolerance, constraint norm must be less than crtol for used for gatol, gttol convergence criteria

911:   Options Database Keys:
912: + -tao_catol <catol> - Sets catol
913: - -tao_crtol <crtol> - Sets crtol

915:   Notes:
916:   Use PETSC_DEFAULT to leave any tolerance unchanged.

918:   Level: intermediate

920: .seealso: TaoGetTolerances(), TaoGetConstraintTolerances(), TaoSetTolerances()

922: @*/
923: PetscErrorCode TaoSetConstraintTolerances(Tao tao, PetscReal catol, PetscReal crtol)
924: {


930:   if (catol != PETSC_DEFAULT) {
931:     if (catol<0) {
932:       PetscInfo(tao,"Tried to set negative catol -- ignored.\n");
933:     } else {
934:       tao->catol = PetscMax(0,catol);
935:       tao->catol_changed=PETSC_TRUE;
936:     }
937:   }

939:   if (crtol != PETSC_DEFAULT) {
940:     if (crtol<0) {
941:       PetscInfo(tao,"Tried to set negative crtol -- ignored.\n");
942:     } else {
943:       tao->crtol = PetscMax(0,crtol);
944:       tao->crtol_changed=PETSC_TRUE;
945:     }
946:   }
947:   return(0);
948: }

950: /*@
951:   TaoGetConstraintTolerances - Gets constraint tolerance parameters used in TAO  convergence tests

953:   Not ollective

955:   Input Parameter:
956: . tao - the Tao context

958:   Output Parameter:
959: + catol - absolute constraint tolerance, constraint norm must be less than catol for used for gatol convergence criteria
960: - crtol - relative contraint tolerance, constraint norm must be less than crtol for used for gatol, gttol convergence criteria

962:   Level: intermediate

964: .seealso: TaoGetTolerances(), TaoSetTolerances(), TaoSetConstraintTolerances()

966: @*/
967: PetscErrorCode TaoGetConstraintTolerances(Tao tao, PetscReal *catol, PetscReal *crtol)
968: {
971:   if (catol) *catol = tao->catol;
972:   if (crtol) *crtol = tao->crtol;
973:   return(0);
974: }

976: /*@
977:    TaoSetFunctionLowerBound - Sets a bound on the solution objective value.
978:    When an approximate solution with an objective value below this number
979:    has been found, the solver will terminate.

981:    Logically Collective on Tao

983:    Input Parameters:
984: +  tao - the Tao solver context
985: -  fmin - the tolerance

987:    Options Database Keys:
988: .    -tao_fmin <fmin> - sets the minimum function value

990:    Level: intermediate

992: .seealso: TaoSetTolerances()
993: @*/
994: PetscErrorCode TaoSetFunctionLowerBound(Tao tao,PetscReal fmin)
995: {
998:   tao->fmin = fmin;
999:   tao->fmin_changed=PETSC_TRUE;
1000:   return(0);
1001: }

1003: /*@
1004:    TaoGetFunctionLowerBound - Gets the bound on the solution objective value.
1005:    When an approximate solution with an objective value below this number
1006:    has been found, the solver will terminate.

1008:    Not collective on Tao

1010:    Input Parameters:
1011: .  tao - the Tao solver context

1013:    OutputParameters:
1014: .  fmin - the minimum function value

1016:    Level: intermediate

1018: .seealso: TaoSetFunctionLowerBound()
1019: @*/
1020: PetscErrorCode TaoGetFunctionLowerBound(Tao tao,PetscReal *fmin)
1021: {
1024:   *fmin = tao->fmin;
1025:   return(0);
1026: }

1028: /*@
1029:    TaoSetMaximumFunctionEvaluations - Sets a maximum number of
1030:    function evaluations.

1032:    Logically Collective on Tao

1034:    Input Parameters:
1035: +  tao - the Tao solver context
1036: -  nfcn - the maximum number of function evaluations (>=0)

1038:    Options Database Keys:
1039: .    -tao_max_funcs <nfcn> - sets the maximum number of function evaluations

1041:    Level: intermediate

1043: .seealso: TaoSetTolerances(), TaoSetMaximumIterations()
1044: @*/

1046: PetscErrorCode TaoSetMaximumFunctionEvaluations(Tao tao,PetscInt nfcn)
1047: {
1050:   tao->max_funcs = PetscMax(0,nfcn);
1051:   tao->max_funcs_changed=PETSC_TRUE;
1052:   return(0);
1053: }

1055: /*@
1056:    TaoGetMaximumFunctionEvaluations - Sets a maximum number of
1057:    function evaluations.

1059:    Not Collective

1061:    Input Parameters:
1062: .  tao - the Tao solver context

1064:    Output Parameters:
1065: .  nfcn - the maximum number of function evaluations

1067:    Level: intermediate

1069: .seealso: TaoSetMaximumFunctionEvaluations(), TaoGetMaximumIterations()
1070: @*/

1072: PetscErrorCode TaoGetMaximumFunctionEvaluations(Tao tao,PetscInt *nfcn)
1073: {
1076:   *nfcn = tao->max_funcs;
1077:   return(0);
1078: }

1080: /*@
1081:    TaoGetCurrentFunctionEvaluations - Get current number of
1082:    function evaluations.

1084:    Not Collective

1086:    Input Parameters:
1087: .  tao - the Tao solver context

1089:    Output Parameters:
1090: .  nfuncs - the current number of function evaluations

1092:    Level: intermediate

1094: .seealso: TaoSetMaximumFunctionEvaluations(), TaoGetMaximumFunctionEvaluations(), TaoGetMaximumIterations()
1095: @*/

1097: PetscErrorCode TaoGetCurrentFunctionEvaluations(Tao tao,PetscInt *nfuncs)
1098: {
1101:   *nfuncs=PetscMax(tao->nfuncs,tao->nfuncgrads);
1102:   return(0);
1103: }

1105: /*@
1106:    TaoSetMaximumIterations - Sets a maximum number of iterates.

1108:    Logically Collective on Tao

1110:    Input Parameters:
1111: +  tao - the Tao solver context
1112: -  maxits - the maximum number of iterates (>=0)

1114:    Options Database Keys:
1115: .    -tao_max_it <its> - sets the maximum number of iterations

1117:    Level: intermediate

1119: .seealso: TaoSetTolerances(), TaoSetMaximumFunctionEvaluations()
1120: @*/
1121: PetscErrorCode TaoSetMaximumIterations(Tao tao,PetscInt maxits)
1122: {
1125:   tao->max_it = PetscMax(0,maxits);
1126:   tao->max_it_changed=PETSC_TRUE;
1127:   return(0);
1128: }

1130: /*@
1131:    TaoGetMaximumIterations - Sets a maximum number of iterates.

1133:    Not Collective

1135:    Input Parameters:
1136: .  tao - the Tao solver context

1138:    Output Parameters:
1139: .  maxits - the maximum number of iterates

1141:    Level: intermediate

1143: .seealso: TaoSetMaximumIterations(), TaoGetMaximumFunctionEvaluations()
1144: @*/
1145: PetscErrorCode TaoGetMaximumIterations(Tao tao,PetscInt *maxits)
1146: {
1149:   *maxits = tao->max_it;
1150:   return(0);
1151: }

1153: /*@
1154:    TaoSetInitialTrustRegionRadius - Sets the initial trust region radius.

1156:    Logically collective on Tao

1158:    Input Parameter:
1159: +  tao - a TAO optimization solver
1160: -  radius - the trust region radius

1162:    Level: intermediate

1164:    Options Database Key:
1165: .  -tao_trust0 <t0> - sets initial trust region radius

1167: .seealso: TaoGetTrustRegionRadius(), TaoSetTrustRegionTolerance()
1168: @*/
1169: PetscErrorCode TaoSetInitialTrustRegionRadius(Tao tao, PetscReal radius)
1170: {
1173:   tao->trust0 = PetscMax(0.0,radius);
1174:   tao->trust0_changed=PETSC_TRUE;
1175:   return(0);
1176: }

1178: /*@
1179:    TaoGetInitialTrustRegionRadius - Sets the initial trust region radius.

1181:    Not Collective

1183:    Input Parameter:
1184: .  tao - a TAO optimization solver

1186:    Output Parameter:
1187: .  radius - the trust region radius

1189:    Level: intermediate

1191: .seealso: TaoSetInitialTrustRegionRadius(), TaoGetCurrentTrustRegionRadius()
1192: @*/
1193: PetscErrorCode TaoGetInitialTrustRegionRadius(Tao tao, PetscReal *radius)
1194: {
1197:   *radius = tao->trust0;
1198:   return(0);
1199: }

1201: /*@
1202:    TaoGetCurrentTrustRegionRadius - Gets the current trust region radius.

1204:    Not Collective

1206:    Input Parameter:
1207: .  tao - a TAO optimization solver

1209:    Output Parameter:
1210: .  radius - the trust region radius

1212:    Level: intermediate

1214: .seealso: TaoSetInitialTrustRegionRadius(), TaoGetInitialTrustRegionRadius()
1215: @*/
1216: PetscErrorCode TaoGetCurrentTrustRegionRadius(Tao tao, PetscReal *radius)
1217: {
1220:   *radius = tao->trust;
1221:   return(0);
1222: }

1224: /*@
1225:   TaoGetTolerances - gets the current values of tolerances

1227:   Not Collective

1229:   Input Parameters:
1230: . tao - the Tao context

1232:   Output Parameters:
1233: + gatol - stop if norm of gradient is less than this
1234: . grtol - stop if relative norm of gradient is less than this
1235: - gttol - stop if norm of gradient is reduced by a this factor

1237:   Note: NULL can be used as an argument if not all tolerances values are needed

1239: .seealso TaoSetTolerances()

1241:   Level: intermediate
1242: @*/
1243: PetscErrorCode TaoGetTolerances(Tao tao, PetscReal *gatol, PetscReal *grtol, PetscReal *gttol)
1244: {
1247:   if (gatol) *gatol=tao->gatol;
1248:   if (grtol) *grtol=tao->grtol;
1249:   if (gttol) *gttol=tao->gttol;
1250:   return(0);
1251: }

1253: /*@
1254:   TaoGetKSP - Gets the linear solver used by the optimization solver.
1255:   Application writers should use TaoGetKSP if they need direct access
1256:   to the PETSc KSP object.

1258:   Not Collective

1260:    Input Parameters:
1261: .  tao - the TAO solver

1263:    Output Parameters:
1264: .  ksp - the KSP linear solver used in the optimization solver

1266:    Level: intermediate

1268: @*/
1269: PetscErrorCode TaoGetKSP(Tao tao, KSP *ksp)
1270: {
1272:   *ksp = tao->ksp;
1273:   return(0);
1274: }

1276: /*@
1277:    TaoGetLinearSolveIterations - Gets the total number of linear iterations
1278:    used by the TAO solver

1280:    Not Collective

1282:    Input Parameter:
1283: .  tao - TAO context

1285:    Output Parameter:
1286: .  lits - number of linear iterations

1288:    Notes:
1289:    This counter is reset to zero for each successive call to TaoSolve()

1291:    Level: intermediate

1293: .seealso:  TaoGetKSP()
1294: @*/
1295: PetscErrorCode  TaoGetLinearSolveIterations(Tao tao,PetscInt *lits)
1296: {
1300:   *lits = tao->ksp_tot_its;
1301:   return(0);
1302: }

1304: /*@
1305:   TaoGetLineSearch - Gets the line search used by the optimization solver.
1306:   Application writers should use TaoGetLineSearch if they need direct access
1307:   to the TaoLineSearch object.

1309:   Not Collective

1311:    Input Parameters:
1312: .  tao - the TAO solver

1314:    Output Parameters:
1315: .  ls - the line search used in the optimization solver

1317:    Level: intermediate

1319: @*/
1320: PetscErrorCode TaoGetLineSearch(Tao tao, TaoLineSearch *ls)
1321: {
1323:   *ls = tao->linesearch;
1324:   return(0);
1325: }

1327: /*@
1328:   TaoAddLineSearchCounts - Adds the number of function evaluations spent
1329:   in the line search to the running total.

1331:    Input Parameters:
1332: +  tao - the TAO solver
1333: -  ls - the line search used in the optimization solver

1335:    Level: developer

1337: .seealso: TaoLineSearchApply()
1338: @*/
1339: PetscErrorCode TaoAddLineSearchCounts(Tao tao)
1340: {
1342:   PetscBool      flg;
1343:   PetscInt       nfeval,ngeval,nfgeval;

1347:   if (tao->linesearch) {
1348:     TaoLineSearchIsUsingTaoRoutines(tao->linesearch,&flg);
1349:     if (!flg) {
1350:       TaoLineSearchGetNumberFunctionEvaluations(tao->linesearch,&nfeval,&ngeval,&nfgeval);
1351:       tao->nfuncs+=nfeval;
1352:       tao->ngrads+=ngeval;
1353:       tao->nfuncgrads+=nfgeval;
1354:     }
1355:   }
1356:   return(0);
1357: }

1359: /*@
1360:   TaoGetSolutionVector - Returns the vector with the current TAO solution

1362:   Not Collective

1364:   Input Parameter:
1365: . tao - the Tao context

1367:   Output Parameter:
1368: . X - the current solution

1370:   Level: intermediate

1372:   Note:  The returned vector will be the same object that was passed into TaoSetInitialVector()
1373: @*/
1374: PetscErrorCode TaoGetSolutionVector(Tao tao, Vec *X)
1375: {
1378:   *X = tao->solution;
1379:   return(0);
1380: }

1382: /*@
1383:   TaoGetGradientVector - Returns the vector with the current TAO gradient

1385:   Not Collective

1387:   Input Parameter:
1388: . tao - the Tao context

1390:   Output Parameter:
1391: . G - the current solution

1393:   Level: intermediate
1394: @*/
1395: PetscErrorCode TaoGetGradientVector(Tao tao, Vec *G)
1396: {
1399:   *G = tao->gradient;
1400:   return(0);
1401: }

1403: /*@
1404:    TaoResetStatistics - Initialize the statistics used by TAO for all of the solvers.
1405:    These statistics include the iteration number, residual norms, and convergence status.
1406:    This routine gets called before solving each optimization problem.

1408:    Collective on Tao

1410:    Input Parameters:
1411: .  solver - the Tao context

1413:    Level: developer

1415: .seealso: TaoCreate(), TaoSolve()
1416: @*/
1417: PetscErrorCode TaoResetStatistics(Tao tao)
1418: {
1421:   tao->niter        = 0;
1422:   tao->nfuncs       = 0;
1423:   tao->nfuncgrads   = 0;
1424:   tao->ngrads       = 0;
1425:   tao->nhess        = 0;
1426:   tao->njac         = 0;
1427:   tao->nconstraints = 0;
1428:   tao->ksp_its      = 0;
1429:   tao->ksp_tot_its  = 0;
1430:   tao->reason       = TAO_CONTINUE_ITERATING;
1431:   tao->residual     = 0.0;
1432:   tao->cnorm        = 0.0;
1433:   tao->step         = 0.0;
1434:   tao->lsflag       = PETSC_FALSE;
1435:   if (tao->hist_reset) tao->hist_len=0;
1436:   return(0);
1437: }

1439: /*@C
1440:   TaoSetUpdate - Sets the general-purpose update function called
1441:   at the beginning of every iteration of the nonlinear solve. Specifically
1442:   it is called at the top of every iteration, after the new solution and the gradient
1443:   is determined, but before the Hessian is computed (if applicable).

1445:   Logically Collective on Tao

1447:   Input Parameters:
1448: + tao - The tao solver context
1449: - func - The function

1451:   Calling sequence of func:
1452: $ func (Tao tao, PetscInt step);

1454: . step - The current step of the iteration

1456:   Level: advanced

1458: .seealso TaoSolve()
1459: @*/
1460: PetscErrorCode  TaoSetUpdate(Tao tao, PetscErrorCode (*func)(Tao, PetscInt,void*), void *ctx)
1461: {
1464:   tao->ops->update = func;
1465:   tao->user_update = ctx;
1466:   return(0);
1467: }

1469: /*@C
1470:   TaoSetConvergenceTest - Sets the function that is to be used to test
1471:   for convergence o fthe iterative minimization solution.  The new convergence
1472:   testing routine will replace TAO's default convergence test.

1474:   Logically Collective on Tao

1476:   Input Parameters:
1477: + tao - the Tao object
1478: . conv - the routine to test for convergence
1479: - ctx - [optional] context for private data for the convergence routine
1480:         (may be NULL)

1482:   Calling sequence of conv:
1483: $   PetscErrorCode conv(Tao tao, void *ctx)

1485: + tao - the Tao object
1486: - ctx - [optional] convergence context

1488:   Note: The new convergence testing routine should call TaoSetConvergedReason().

1490:   Level: advanced

1492: .seealso: TaoSetConvergedReason(), TaoGetSolutionStatus(), TaoGetTolerances(), TaoSetMonitor

1494: @*/
1495: PetscErrorCode TaoSetConvergenceTest(Tao tao, PetscErrorCode (*conv)(Tao,void*), void *ctx)
1496: {
1499:   (tao)->ops->convergencetest = conv;
1500:   (tao)->cnvP = ctx;
1501:   return(0);
1502: }

1504: /*@C
1505:    TaoSetMonitor - Sets an ADDITIONAL function that is to be used at every
1506:    iteration of the solver to display the iteration's
1507:    progress.

1509:    Logically Collective on Tao

1511:    Input Parameters:
1512: +  tao - the Tao solver context
1513: .  mymonitor - monitoring routine
1514: -  mctx - [optional] user-defined context for private data for the
1515:           monitor routine (may be NULL)

1517:    Calling sequence of mymonitor:
1518: $     PetscErrorCode mymonitor(Tao tao,void *mctx)

1520: +    tao - the Tao solver context
1521: -    mctx - [optional] monitoring context


1524:    Options Database Keys:
1525: +    -tao_monitor        - sets TaoMonitorDefault()
1526: .    -tao_smonitor       - sets short monitor
1527: .    -tao_cmonitor       - same as smonitor plus constraint norm
1528: .    -tao_view_solution   - view solution at each iteration
1529: .    -tao_view_gradient   - view gradient at each iteration
1530: .    -tao_view_ls_residual - view least-squares residual vector at each iteration
1531: -    -tao_cancelmonitors - cancels all monitors that have been hardwired into a code by calls to TaoSetMonitor(), but does not cancel those set via the options database.


1534:    Notes:
1535:    Several different monitoring routines may be set by calling
1536:    TaoSetMonitor() multiple times; all will be called in the
1537:    order in which they were set.

1539:    Fortran Notes:
1540:     Only one monitor function may be set

1542:    Level: intermediate

1544: .seealso: TaoMonitorDefault(), TaoCancelMonitors(),  TaoSetDestroyRoutine()
1545: @*/
1546: PetscErrorCode TaoSetMonitor(Tao tao, PetscErrorCode (*func)(Tao, void*), void *ctx,PetscErrorCode (*dest)(void**))
1547: {
1549:   PetscInt       i;
1550:   PetscBool      identical;

1554:   if (tao->numbermonitors >= MAXTAOMONITORS) SETERRQ1(PetscObjectComm((PetscObject)tao),1,"Cannot attach another monitor -- max=",MAXTAOMONITORS);

1556:   for (i=0; i<tao->numbermonitors;i++) {
1557:     PetscMonitorCompare((PetscErrorCode (*)(void))func,ctx,dest,(PetscErrorCode (*)(void))tao->monitor[i],tao->monitorcontext[i],tao->monitordestroy[i],&identical);
1558:     if (identical) return(0);
1559:   }
1560:   tao->monitor[tao->numbermonitors] = func;
1561:   tao->monitorcontext[tao->numbermonitors] = (void*)ctx;
1562:   tao->monitordestroy[tao->numbermonitors] = dest;
1563:   ++tao->numbermonitors;
1564:   return(0);
1565: }

1567: /*@
1568:    TaoCancelMonitors - Clears all the monitor functions for a Tao object.

1570:    Logically Collective on Tao

1572:    Input Parameters:
1573: .  tao - the Tao solver context

1575:    Options Database:
1576: .  -tao_cancelmonitors - cancels all monitors that have been hardwired
1577:     into a code by calls to TaoSetMonitor(), but does not cancel those
1578:     set via the options database

1580:    Notes:
1581:    There is no way to clear one specific monitor from a Tao object.

1583:    Level: advanced

1585: .seealso: TaoMonitorDefault(), TaoSetMonitor()
1586: @*/
1587: PetscErrorCode TaoCancelMonitors(Tao tao)
1588: {
1589:   PetscInt       i;

1594:   for (i=0;i<tao->numbermonitors;i++) {
1595:     if (tao->monitordestroy[i]) {
1596:       (*tao->monitordestroy[i])(&tao->monitorcontext[i]);
1597:     }
1598:   }
1599:   tao->numbermonitors=0;
1600:   return(0);
1601: }

1603: /*@
1604:    TaoMonitorDefault - Default routine for monitoring progress of the
1605:    Tao solvers (default).  This monitor prints the function value and gradient
1606:    norm at each iteration.  It can be turned on from the command line using the
1607:    -tao_monitor option

1609:    Collective on Tao

1611:    Input Parameters:
1612: +  tao - the Tao context
1613: -  ctx - PetscViewer context or NULL

1615:    Options Database Keys:
1616: .  -tao_monitor

1618:    Level: advanced

1620: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1621: @*/
1622: PetscErrorCode TaoMonitorDefault(Tao tao, void *ctx)
1623: {
1625:   PetscInt       its, tabs;
1626:   PetscReal      fct,gnorm;
1627:   PetscViewer    viewer = (PetscViewer)ctx;

1631:   its=tao->niter;
1632:   fct=tao->fc;
1633:   gnorm=tao->residual;
1634:   PetscViewerASCIIGetTab(viewer, &tabs);
1635:   PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel);
1636:   if (its == 0 && ((PetscObject)tao)->prefix && !tao->header_printed) {
1637:      PetscViewerASCIIPrintf(viewer,"  Iteration information for %s solve.\n",((PetscObject)tao)->prefix);
1638:      tao->header_printed = PETSC_TRUE;
1639:    }
1640:   ierr=PetscViewerASCIIPrintf(viewer,"%3D TAO,",its);
1641:   ierr=PetscViewerASCIIPrintf(viewer,"  Function value: %g,",(double)fct);
1642:   if (gnorm >= PETSC_INFINITY) {
1643:     ierr=PetscViewerASCIIPrintf(viewer,"  Residual: Inf \n");
1644:   } else {
1645:     ierr=PetscViewerASCIIPrintf(viewer,"  Residual: %g \n",(double)gnorm);
1646:   }
1647:   PetscViewerASCIISetTab(viewer, tabs);
1648:   return(0);
1649: }

1651: /*@
1652:    TaoDefaultGMonitor - Default routine for monitoring progress of the
1653:    Tao solvers (default) with extra detail on the globalization method.
1654:    This monitor prints the function value and gradient norm at each
1655:    iteration, as well as the step size and trust radius. Note that the
1656:    step size and trust radius may be the same for some algorithms.
1657:    It can be turned on from the command line using the
1658:    -tao_gmonitor option

1660:    Collective on Tao

1662:    Input Parameters:
1663: +  tao - the Tao context
1664: -  ctx - PetscViewer context or NULL

1666:    Options Database Keys:
1667: .  -tao_monitor

1669:    Level: advanced

1671: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1672: @*/
1673: PetscErrorCode TaoDefaultGMonitor(Tao tao, void *ctx)
1674: {
1676:   PetscInt       its, tabs;
1677:   PetscReal      fct,gnorm,stp,tr;
1678:   PetscViewer    viewer = (PetscViewer)ctx;

1682:   its=tao->niter;
1683:   fct=tao->fc;
1684:   gnorm=tao->residual;
1685:   stp=tao->step;
1686:   tr=tao->trust;
1687:   PetscViewerASCIIGetTab(viewer, &tabs);
1688:   PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel);
1689:   if (its == 0 && ((PetscObject)tao)->prefix && !tao->header_printed) {
1690:      PetscViewerASCIIPrintf(viewer,"  Iteration information for %s solve.\n",((PetscObject)tao)->prefix);
1691:      tao->header_printed = PETSC_TRUE;
1692:    }
1693:   ierr=PetscViewerASCIIPrintf(viewer,"%3D TAO,",its);
1694:   ierr=PetscViewerASCIIPrintf(viewer,"  Function value: %g,",(double)fct);
1695:   if (gnorm >= PETSC_INFINITY) {
1696:     ierr=PetscViewerASCIIPrintf(viewer,"  Residual: Inf,");
1697:   } else {
1698:     ierr=PetscViewerASCIIPrintf(viewer,"  Residual: %g,",(double)gnorm);
1699:   }
1700:   PetscViewerASCIIPrintf(viewer,"  Step: %g,  Trust: %g\n",(double)stp,(double)tr);
1701:   PetscViewerASCIISetTab(viewer, tabs);
1702:   return(0);
1703: }

1705: /*@
1706:    TaoDefaultSMonitor - Default routine for monitoring progress of the
1707:    solver. Same as TaoMonitorDefault() except
1708:    it prints fewer digits of the residual as the residual gets smaller.
1709:    This is because the later digits are meaningless and are often
1710:    different on different machines; by using this routine different
1711:    machines will usually generate the same output. It can be turned on
1712:    by using the -tao_smonitor option

1714:    Collective on Tao

1716:    Input Parameters:
1717: +  tao - the Tao context
1718: -  ctx - PetscViewer context of type ASCII

1720:    Options Database Keys:
1721: .  -tao_smonitor

1723:    Level: advanced

1725: .seealso: TaoMonitorDefault(), TaoSetMonitor()
1726: @*/
1727: PetscErrorCode TaoDefaultSMonitor(Tao tao, void *ctx)
1728: {
1730:   PetscInt       its, tabs;
1731:   PetscReal      fct,gnorm;
1732:   PetscViewer    viewer = (PetscViewer)ctx;

1736:   its=tao->niter;
1737:   fct=tao->fc;
1738:   gnorm=tao->residual;
1739:   PetscViewerASCIIGetTab(viewer, &tabs);
1740:   PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel);
1741:   ierr=PetscViewerASCIIPrintf(viewer,"iter = %3D,",its);
1742:   ierr=PetscViewerASCIIPrintf(viewer," Function value %g,",(double)fct);
1743:   if (gnorm >= PETSC_INFINITY) {
1744:     ierr=PetscViewerASCIIPrintf(viewer," Residual: Inf \n");
1745:   } else if (gnorm > 1.e-6) {
1746:     ierr=PetscViewerASCIIPrintf(viewer," Residual: %g \n",(double)gnorm);
1747:   } else if (gnorm > 1.e-11) {
1748:     ierr=PetscViewerASCIIPrintf(viewer," Residual: < 1.0e-6 \n");
1749:   } else {
1750:     ierr=PetscViewerASCIIPrintf(viewer," Residual: < 1.0e-11 \n");
1751:   }
1752:   PetscViewerASCIISetTab(viewer, tabs);
1753:   return(0);
1754: }

1756: /*@
1757:    TaoDefaultCMonitor - same as TaoMonitorDefault() except
1758:    it prints the norm of the constraints function. It can be turned on
1759:    from the command line using the -tao_cmonitor option

1761:    Collective on Tao

1763:    Input Parameters:
1764: +  tao - the Tao context
1765: -  ctx - PetscViewer context or NULL

1767:    Options Database Keys:
1768: .  -tao_cmonitor

1770:    Level: advanced

1772: .seealso: TaoMonitorDefault(), TaoSetMonitor()
1773: @*/
1774: PetscErrorCode TaoDefaultCMonitor(Tao tao, void *ctx)
1775: {
1777:   PetscInt       its, tabs;
1778:   PetscReal      fct,gnorm;
1779:   PetscViewer    viewer = (PetscViewer)ctx;

1783:   its=tao->niter;
1784:   fct=tao->fc;
1785:   gnorm=tao->residual;
1786:   PetscViewerASCIIGetTab(viewer, &tabs);
1787:   PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel);
1788:   ierr=PetscViewerASCIIPrintf(viewer,"iter = %D,",its);
1789:   ierr=PetscViewerASCIIPrintf(viewer," Function value: %g,",(double)fct);
1790:   ierr=PetscViewerASCIIPrintf(viewer,"  Residual: %g ",(double)gnorm);
1791:   PetscViewerASCIIPrintf(viewer,"  Constraint: %g \n",(double)tao->cnorm);
1792:   PetscViewerASCIISetTab(viewer, tabs);
1793:   return(0);
1794: }

1796: /*@C
1797:    TaoSolutionMonitor - Views the solution at each iteration
1798:    It can be turned on from the command line using the
1799:    -tao_view_solution option

1801:    Collective on Tao

1803:    Input Parameters:
1804: +  tao - the Tao context
1805: -  ctx - PetscViewer context or NULL

1807:    Options Database Keys:
1808: .  -tao_view_solution

1810:    Level: advanced

1812: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1813: @*/
1814: PetscErrorCode TaoSolutionMonitor(Tao tao, void *ctx)
1815: {
1817:   PetscViewer    viewer  = (PetscViewer)ctx;

1821:   VecView(tao->solution, viewer);
1822:   return(0);
1823: }

1825: /*@C
1826:    TaoGradientMonitor - Views the gradient at each iteration
1827:    It can be turned on from the command line using the
1828:    -tao_view_gradient option

1830:    Collective on Tao

1832:    Input Parameters:
1833: +  tao - the Tao context
1834: -  ctx - PetscViewer context or NULL

1836:    Options Database Keys:
1837: .  -tao_view_gradient

1839:    Level: advanced

1841: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1842: @*/
1843: PetscErrorCode TaoGradientMonitor(Tao tao, void *ctx)
1844: {
1846:   PetscViewer    viewer = (PetscViewer)ctx;

1850:   VecView(tao->gradient, viewer);
1851:   return(0);
1852: }

1854: /*@C
1855:    TaoStepDirectionMonitor - Views the gradient at each iteration
1856:    It can be turned on from the command line using the
1857:    -tao_view_gradient option

1859:    Collective on Tao

1861:    Input Parameters:
1862: +  tao - the Tao context
1863: -  ctx - PetscViewer context or NULL

1865:    Options Database Keys:
1866: .  -tao_view_gradient

1868:    Level: advanced

1870: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1871: @*/
1872: PetscErrorCode TaoStepDirectionMonitor(Tao tao, void *ctx)
1873: {
1875:   PetscViewer    viewer = (PetscViewer)ctx;

1879:   VecView(tao->stepdirection, viewer);
1880:   return(0);
1881: }

1883: /*@C
1884:    TaoDrawSolutionMonitor - Plots the solution at each iteration
1885:    It can be turned on from the command line using the
1886:    -tao_draw_solution option

1888:    Collective on Tao

1890:    Input Parameters:
1891: +  tao - the Tao context
1892: -  ctx - TaoMonitorDraw context

1894:    Options Database Keys:
1895: .  -tao_draw_solution

1897:    Level: advanced

1899: .seealso: TaoSolutionMonitor(), TaoSetMonitor(), TaoDrawGradientMonitor
1900: @*/
1901: PetscErrorCode TaoDrawSolutionMonitor(Tao tao, void *ctx)
1902: {
1903:   PetscErrorCode    ierr;
1904:   TaoMonitorDrawCtx ictx = (TaoMonitorDrawCtx)ctx;

1907:   if (!(((ictx->howoften > 0) && (!(tao->niter % ictx->howoften))) || ((ictx->howoften == -1) && tao->reason))) return(0);
1908:   VecView(tao->solution,ictx->viewer);
1909:   return(0);
1910: }

1912: /*@C
1913:    TaoDrawGradientMonitor - Plots the gradient at each iteration
1914:    It can be turned on from the command line using the
1915:    -tao_draw_gradient option

1917:    Collective on Tao

1919:    Input Parameters:
1920: +  tao - the Tao context
1921: -  ctx - PetscViewer context

1923:    Options Database Keys:
1924: .  -tao_draw_gradient

1926:    Level: advanced

1928: .seealso: TaoGradientMonitor(), TaoSetMonitor(), TaoDrawSolutionMonitor
1929: @*/
1930: PetscErrorCode TaoDrawGradientMonitor(Tao tao, void *ctx)
1931: {
1932:   PetscErrorCode    ierr;
1933:   TaoMonitorDrawCtx ictx = (TaoMonitorDrawCtx)ctx;

1936:   if (!(((ictx->howoften > 0) && (!(tao->niter % ictx->howoften))) || ((ictx->howoften == -1) && tao->reason))) return(0);
1937:   VecView(tao->gradient,ictx->viewer);
1938:   return(0);
1939: }

1941: /*@C
1942:    TaoDrawStepMonitor - Plots the step direction at each iteration
1943:    It can be turned on from the command line using the
1944:    -tao_draw_step option

1946:    Collective on Tao

1948:    Input Parameters:
1949: +  tao - the Tao context
1950: -  ctx - PetscViewer context

1952:    Options Database Keys:
1953: .  -tao_draw_step

1955:    Level: advanced

1957: .seealso: TaoSetMonitor(), TaoDrawSolutionMonitor
1958: @*/
1959: PetscErrorCode TaoDrawStepMonitor(Tao tao, void *ctx)
1960: {
1962:   PetscViewer    viewer = (PetscViewer)(ctx);

1965:   VecView(tao->stepdirection, viewer);
1966:   return(0);
1967: }

1969: /*@C
1970:    TaoResidualMonitor - Views the least-squares residual at each iteration
1971:    It can be turned on from the command line using the
1972:    -tao_view_ls_residual option

1974:    Collective on Tao

1976:    Input Parameters:
1977: +  tao - the Tao context
1978: -  ctx - PetscViewer context or NULL

1980:    Options Database Keys:
1981: .  -tao_view_ls_residual

1983:    Level: advanced

1985: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1986: @*/
1987: PetscErrorCode TaoResidualMonitor(Tao tao, void *ctx)
1988: {
1990:   PetscViewer    viewer  = (PetscViewer)ctx;

1994:   VecView(tao->ls_res,viewer);
1995:   return(0);
1996: }

1998: /*@
1999:    TaoDefaultConvergenceTest - Determines whether the solver should continue iterating
2000:    or terminate.

2002:    Collective on Tao

2004:    Input Parameters:
2005: +  tao - the Tao context
2006: -  dummy - unused dummy context

2008:    Output Parameter:
2009: .  reason - for terminating

2011:    Notes:
2012:    This routine checks the residual in the optimality conditions, the
2013:    relative residual in the optimity conditions, the number of function
2014:    evaluations, and the function value to test convergence.  Some
2015:    solvers may use different convergence routines.

2017:    Level: developer

2019: .seealso: TaoSetTolerances(),TaoGetConvergedReason(),TaoSetConvergedReason()
2020: @*/

2022: PetscErrorCode TaoDefaultConvergenceTest(Tao tao,void *dummy)
2023: {
2024:   PetscInt           niter=tao->niter, nfuncs=PetscMax(tao->nfuncs,tao->nfuncgrads);
2025:   PetscInt           max_funcs=tao->max_funcs;
2026:   PetscReal          gnorm=tao->residual, gnorm0=tao->gnorm0;
2027:   PetscReal          f=tao->fc, steptol=tao->steptol,trradius=tao->step;
2028:   PetscReal          gatol=tao->gatol,grtol=tao->grtol,gttol=tao->gttol;
2029:   PetscReal          catol=tao->catol,crtol=tao->crtol;
2030:   PetscReal          fmin=tao->fmin, cnorm=tao->cnorm;
2031:   TaoConvergedReason reason=tao->reason;
2032:   PetscErrorCode     ierr;

2036:   if (reason != TAO_CONTINUE_ITERATING) {
2037:     return(0);
2038:   }

2040:   if (PetscIsInfOrNanReal(f)) {
2041:     PetscInfo(tao,"Failed to converged, function value is Inf or NaN\n");
2042:     reason = TAO_DIVERGED_NAN;
2043:   } else if (f <= fmin && cnorm <=catol) {
2044:     PetscInfo2(tao,"Converged due to function value %g < minimum function value %g\n", (double)f,(double)fmin);
2045:     reason = TAO_CONVERGED_MINF;
2046:   } else if (gnorm<= gatol && cnorm <=catol) {
2047:     PetscInfo2(tao,"Converged due to residual norm ||g(X)||=%g < %g\n",(double)gnorm,(double)gatol);
2048:     reason = TAO_CONVERGED_GATOL;
2049:   } else if (f!=0 && PetscAbsReal(gnorm/f) <= grtol && cnorm <= crtol) {
2050:     PetscInfo2(tao,"Converged due to residual ||g(X)||/|f(X)| =%g < %g\n",(double)(gnorm/f),(double)grtol);
2051:     reason = TAO_CONVERGED_GRTOL;
2052:   } else if (gnorm0 != 0 && ((gttol == 0 && gnorm == 0) || gnorm/gnorm0 < gttol) && cnorm <= crtol) {
2053:     PetscInfo2(tao,"Converged due to relative residual norm ||g(X)||/||g(X0)|| = %g < %g\n",(double)(gnorm/gnorm0),(double)gttol);
2054:     reason = TAO_CONVERGED_GTTOL;
2055:   } else if (nfuncs > max_funcs){
2056:     PetscInfo2(tao,"Exceeded maximum number of function evaluations: %D > %D\n", nfuncs,max_funcs);
2057:     reason = TAO_DIVERGED_MAXFCN;
2058:   } else if (tao->lsflag != 0){
2059:     PetscInfo(tao,"Tao Line Search failure.\n");
2060:     reason = TAO_DIVERGED_LS_FAILURE;
2061:   } else if (trradius < steptol && niter > 0){
2062:     PetscInfo2(tao,"Trust region/step size too small: %g < %g\n", (double)trradius,(double)steptol);
2063:     reason = TAO_CONVERGED_STEPTOL;
2064:   } else if (niter >= tao->max_it) {
2065:     PetscInfo2(tao,"Exceeded maximum number of iterations: %D > %D\n",niter,tao->max_it);
2066:     reason = TAO_DIVERGED_MAXITS;
2067:   } else {
2068:     reason = TAO_CONTINUE_ITERATING;
2069:   }
2070:   tao->reason = reason;
2071:   return(0);
2072: }

2074: /*@C
2075:    TaoSetOptionsPrefix - Sets the prefix used for searching for all
2076:    TAO options in the database.


2079:    Logically Collective on Tao

2081:    Input Parameters:
2082: +  tao - the Tao context
2083: -  prefix - the prefix string to prepend to all TAO option requests

2085:    Notes:
2086:    A hyphen (-) must NOT be given at the beginning of the prefix name.
2087:    The first character of all runtime options is AUTOMATICALLY the hyphen.

2089:    For example, to distinguish between the runtime options for two
2090:    different TAO solvers, one could call
2091: .vb
2092:       TaoSetOptionsPrefix(tao1,"sys1_")
2093:       TaoSetOptionsPrefix(tao2,"sys2_")
2094: .ve

2096:    This would enable use of different options for each system, such as
2097: .vb
2098:       -sys1_tao_method blmvm -sys1_tao_gtol 1.e-3
2099:       -sys2_tao_method lmvm  -sys2_tao_gtol 1.e-4
2100: .ve


2103:    Level: advanced

2105: .seealso: TaoAppendOptionsPrefix(), TaoGetOptionsPrefix()
2106: @*/

2108: PetscErrorCode TaoSetOptionsPrefix(Tao tao, const char p[])
2109: {

2113:   PetscObjectSetOptionsPrefix((PetscObject)tao,p);
2114:   if (tao->linesearch) {
2115:     TaoLineSearchSetOptionsPrefix(tao->linesearch,p);
2116:   }
2117:   if (tao->ksp) {
2118:     KSPSetOptionsPrefix(tao->ksp,p);
2119:   }
2120:   return(0);
2121: }

2123: /*@C
2124:    TaoAppendOptionsPrefix - Appends to the prefix used for searching for all
2125:    TAO options in the database.


2128:    Logically Collective on Tao

2130:    Input Parameters:
2131: +  tao - the Tao solver context
2132: -  prefix - the prefix string to prepend to all TAO option requests

2134:    Notes:
2135:    A hyphen (-) must NOT be given at the beginning of the prefix name.
2136:    The first character of all runtime options is AUTOMATICALLY the hyphen.


2139:    Level: advanced

2141: .seealso: TaoSetOptionsPrefix(), TaoGetOptionsPrefix()
2142: @*/
2143: PetscErrorCode TaoAppendOptionsPrefix(Tao tao, const char p[])
2144: {

2148:   PetscObjectAppendOptionsPrefix((PetscObject)tao,p);
2149:   if (tao->linesearch) {
2150:     TaoLineSearchSetOptionsPrefix(tao->linesearch,p);
2151:   }
2152:   if (tao->ksp) {
2153:     KSPSetOptionsPrefix(tao->ksp,p);
2154:   }
2155:   return(0);
2156: }

2158: /*@C
2159:   TaoGetOptionsPrefix - Gets the prefix used for searching for all
2160:   TAO options in the database

2162:   Not Collective

2164:   Input Parameters:
2165: . tao - the Tao context

2167:   Output Parameters:
2168: . prefix - pointer to the prefix string used is returned

2170:   Notes:
2171:     On the fortran side, the user should pass in a string 'prefix' of
2172:   sufficient length to hold the prefix.

2174:   Level: advanced

2176: .seealso: TaoSetOptionsPrefix(), TaoAppendOptionsPrefix()
2177: @*/
2178: PetscErrorCode TaoGetOptionsPrefix(Tao tao, const char *p[])
2179: {
2180:    return PetscObjectGetOptionsPrefix((PetscObject)tao,p);
2181: }

2183: /*@C
2184:    TaoSetType - Sets the method for the unconstrained minimization solver.

2186:    Collective on Tao

2188:    Input Parameters:
2189: +  solver - the Tao solver context
2190: -  type - a known method

2192:    Options Database Key:
2193: .  -tao_type <type> - Sets the method; use -help for a list
2194:    of available methods (for instance, "-tao_type lmvm" or "-tao_type tron")

2196:    Available methods include:
2197: +    nls - Newton's method with line search for unconstrained minimization
2198: .    ntr - Newton's method with trust region for unconstrained minimization
2199: .    ntl - Newton's method with trust region, line search for unconstrained minimization
2200: .    lmvm - Limited memory variable metric method for unconstrained minimization
2201: .    cg - Nonlinear conjugate gradient method for unconstrained minimization
2202: .    nm - Nelder-Mead algorithm for derivate-free unconstrained minimization
2203: .    tron - Newton Trust Region method for bound constrained minimization
2204: .    gpcg - Newton Trust Region method for quadratic bound constrained minimization
2205: .    blmvm - Limited memory variable metric method for bound constrained minimization
2206: -    pounders - Model-based algorithm pounder extended for nonlinear least squares

2208:   Level: intermediate

2210: .seealso: TaoCreate(), TaoGetType(), TaoType

2212: @*/
2213: PetscErrorCode TaoSetType(Tao tao, TaoType type)
2214: {
2216:   PetscErrorCode (*create_xxx)(Tao);
2217:   PetscBool      issame;


2222:   PetscObjectTypeCompare((PetscObject)tao,type,&issame);
2223:   if (issame) return(0);

2225:   PetscFunctionListFind(TaoList, type, (void(**)(void))&create_xxx);
2226:   if (!create_xxx) SETERRQ1(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested Tao type %s",type);

2228:   /* Destroy the existing solver information */
2229:   if (tao->ops->destroy) {
2230:     (*tao->ops->destroy)(tao);
2231:   }
2232:   KSPDestroy(&tao->ksp);
2233:   TaoLineSearchDestroy(&tao->linesearch);
2234:   VecDestroy(&tao->gradient);
2235:   VecDestroy(&tao->stepdirection);

2237:   tao->ops->setup = NULL;
2238:   tao->ops->solve = NULL;
2239:   tao->ops->view  = NULL;
2240:   tao->ops->setfromoptions = NULL;
2241:   tao->ops->destroy = NULL;

2243:   tao->setupcalled = PETSC_FALSE;

2245:   (*create_xxx)(tao);
2246:   PetscObjectChangeTypeName((PetscObject)tao,type);
2247:   return(0);
2248: }

2250: /*MC
2251:    TaoRegister - Adds a method to the TAO package for unconstrained minimization.

2253:    Synopsis:
2254:    TaoRegister(char *name_solver,char *path,char *name_Create,PetscErrorCode (*routine_Create)(Tao))

2256:    Not collective

2258:    Input Parameters:
2259: +  sname - name of a new user-defined solver
2260: -  func - routine to Create method context

2262:    Notes:
2263:    TaoRegister() may be called multiple times to add several user-defined solvers.

2265:    Sample usage:
2266: .vb
2267:    TaoRegister("my_solver",MySolverCreate);
2268: .ve

2270:    Then, your solver can be chosen with the procedural interface via
2271: $     TaoSetType(tao,"my_solver")
2272:    or at runtime via the option
2273: $     -tao_type my_solver

2275:    Level: advanced

2277: .seealso: TaoRegisterAll(), TaoRegisterDestroy()
2278: M*/
2279: PetscErrorCode TaoRegister(const char sname[], PetscErrorCode (*func)(Tao))
2280: {

2284:   TaoInitializePackage();
2285:   PetscFunctionListAdd(&TaoList,sname, (void (*)(void))func);
2286:   return(0);
2287: }

2289: /*@C
2290:    TaoRegisterDestroy - Frees the list of minimization solvers that were
2291:    registered by TaoRegisterDynamic().

2293:    Not Collective

2295:    Level: advanced

2297: .seealso: TaoRegisterAll(), TaoRegister()
2298: @*/
2299: PetscErrorCode TaoRegisterDestroy(void)
2300: {
2303:   PetscFunctionListDestroy(&TaoList);
2304:   TaoRegisterAllCalled = PETSC_FALSE;
2305:   return(0);
2306: }

2308: /*@
2309:    TaoGetIterationNumber - Gets the number of Tao iterations completed
2310:    at this time.

2312:    Not Collective

2314:    Input Parameter:
2315: .  tao - Tao context

2317:    Output Parameter:
2318: .  iter - iteration number

2320:    Notes:
2321:    For example, during the computation of iteration 2 this would return 1.


2324:    Level: intermediate

2326: .seealso:   TaoGetLinearSolveIterations(), TaoGetResidualNorm(), TaoGetObjective()
2327: @*/
2328: PetscErrorCode  TaoGetIterationNumber(Tao tao,PetscInt *iter)
2329: {
2333:   *iter = tao->niter;
2334:   return(0);
2335: }

2337: /*@
2338:    TaoGetObjective - Gets the current value of the objective function
2339:    at this time.

2341:    Not Collective

2343:    Input Parameter:
2344: .  tao - Tao context

2346:    Output Parameter:
2347: .  value - the current value

2349:    Level: intermediate

2351: .seealso:   TaoGetLinearSolveIterations(), TaoGetIterationNumber(), TaoGetResidualNorm()
2352: @*/
2353: PetscErrorCode  TaoGetObjective(Tao tao,PetscReal *value)
2354: {
2358:   *value = tao->fc;
2359:   return(0);
2360: }

2362: /*@
2363:    TaoGetResidualNorm - Gets the current value of the norm of the residual
2364:    at this time.

2366:    Not Collective

2368:    Input Parameter:
2369: .  tao - Tao context

2371:    Output Parameter:
2372: .  value - the current value

2374:    Level: intermediate

2376:    Developer Note: This is the 2-norm of the residual, we cannot use TaoGetGradientNorm() because that has
2377:                    a different meaning. For some reason Tao sometimes calls the gradient the residual.

2379: .seealso:   TaoGetLinearSolveIterations(), TaoGetIterationNumber(), TaoGetObjective()
2380: @*/
2381: PetscErrorCode  TaoGetResidualNorm(Tao tao,PetscReal *value)
2382: {
2386:   *value = tao->residual;
2387:   return(0);
2388: }

2390: /*@
2391:    TaoSetIterationNumber - Sets the current iteration number.

2393:    Not Collective

2395:    Input Parameter:
2396: +  tao - Tao context
2397: -  iter - iteration number

2399:    Level: developer

2401: .seealso:   TaoGetLinearSolveIterations()
2402: @*/
2403: PetscErrorCode  TaoSetIterationNumber(Tao tao,PetscInt iter)
2404: {

2409:   PetscObjectSAWsTakeAccess((PetscObject)tao);
2410:   tao->niter = iter;
2411:   PetscObjectSAWsGrantAccess((PetscObject)tao);
2412:   return(0);
2413: }

2415: /*@
2416:    TaoGetTotalIterationNumber - Gets the total number of Tao iterations
2417:    completed. This number keeps accumulating if multiple solves
2418:    are called with the Tao object.

2420:    Not Collective

2422:    Input Parameter:
2423: .  tao - Tao context

2425:    Output Parameter:
2426: .  iter - iteration number

2428:    Notes:
2429:    The total iteration count is updated after each solve, if there is a current
2430:    TaoSolve() in progress then those iterations are not yet counted.

2432:    Level: intermediate

2434: .seealso:   TaoGetLinearSolveIterations()
2435: @*/
2436: PetscErrorCode  TaoGetTotalIterationNumber(Tao tao,PetscInt *iter)
2437: {
2441:   *iter = tao->ntotalits;
2442:   return(0);
2443: }

2445: /*@
2446:    TaoSetTotalIterationNumber - Sets the current total iteration number.

2448:    Not Collective

2450:    Input Parameter:
2451: +  tao - Tao context
2452: -  iter - iteration number

2454:    Level: developer

2456: .seealso:   TaoGetLinearSolveIterations()
2457: @*/
2458: PetscErrorCode  TaoSetTotalIterationNumber(Tao tao,PetscInt iter)
2459: {

2464:   PetscObjectSAWsTakeAccess((PetscObject)tao);
2465:   tao->ntotalits = iter;
2466:   PetscObjectSAWsGrantAccess((PetscObject)tao);
2467:   return(0);
2468: }

2470: /*@
2471:   TaoSetConvergedReason - Sets the termination flag on a Tao object

2473:   Logically Collective on Tao

2475:   Input Parameters:
2476: + tao - the Tao context
2477: - reason - one of
2478: $     TAO_CONVERGED_ATOL (2),
2479: $     TAO_CONVERGED_RTOL (3),
2480: $     TAO_CONVERGED_STEPTOL (4),
2481: $     TAO_CONVERGED_MINF (5),
2482: $     TAO_CONVERGED_USER (6),
2483: $     TAO_DIVERGED_MAXITS (-2),
2484: $     TAO_DIVERGED_NAN (-4),
2485: $     TAO_DIVERGED_MAXFCN (-5),
2486: $     TAO_DIVERGED_LS_FAILURE (-6),
2487: $     TAO_DIVERGED_TR_REDUCTION (-7),
2488: $     TAO_DIVERGED_USER (-8),
2489: $     TAO_CONTINUE_ITERATING (0)

2491:    Level: intermediate

2493: @*/
2494: PetscErrorCode TaoSetConvergedReason(Tao tao, TaoConvergedReason reason)
2495: {
2498:   tao->reason = reason;
2499:   return(0);
2500: }

2502: /*@
2503:    TaoGetConvergedReason - Gets the reason the Tao iteration was stopped.

2505:    Not Collective

2507:    Input Parameter:
2508: .  tao - the Tao solver context

2510:    Output Parameter:
2511: .  reason - one of
2512: $  TAO_CONVERGED_GATOL (3)           ||g(X)|| < gatol
2513: $  TAO_CONVERGED_GRTOL (4)           ||g(X)|| / f(X)  < grtol
2514: $  TAO_CONVERGED_GTTOL (5)           ||g(X)|| / ||g(X0)|| < gttol
2515: $  TAO_CONVERGED_STEPTOL (6)         step size small
2516: $  TAO_CONVERGED_MINF (7)            F < F_min
2517: $  TAO_CONVERGED_USER (8)            User defined
2518: $  TAO_DIVERGED_MAXITS (-2)          its > maxits
2519: $  TAO_DIVERGED_NAN (-4)             Numerical problems
2520: $  TAO_DIVERGED_MAXFCN (-5)          fevals > max_funcsals
2521: $  TAO_DIVERGED_LS_FAILURE (-6)      line search failure
2522: $  TAO_DIVERGED_TR_REDUCTION (-7)    trust region failure
2523: $  TAO_DIVERGED_USER(-8)             (user defined)
2524:  $  TAO_CONTINUE_ITERATING (0)

2526:    where
2527: +  X - current solution
2528: .  X0 - initial guess
2529: .  f(X) - current function value
2530: .  f(X*) - true solution (estimated)
2531: .  g(X) - current gradient
2532: .  its - current iterate number
2533: .  maxits - maximum number of iterates
2534: .  fevals - number of function evaluations
2535: -  max_funcsals - maximum number of function evaluations

2537:    Level: intermediate

2539: .seealso: TaoSetConvergenceTest(), TaoSetTolerances()

2541: @*/
2542: PetscErrorCode TaoGetConvergedReason(Tao tao, TaoConvergedReason *reason)
2543: {
2547:   *reason = tao->reason;
2548:   return(0);
2549: }

2551: /*@
2552:   TaoGetSolutionStatus - Get the current iterate, objective value,
2553:   residual, infeasibility, and termination

2555:   Not Collective

2557:    Input Parameters:
2558: .  tao - the Tao context

2560:    Output Parameters:
2561: +  iterate - the current iterate number (>=0)
2562: .  f - the current function value
2563: .  gnorm - the square of the gradient norm, duality gap, or other measure indicating distance from optimality.
2564: .  cnorm - the infeasibility of the current solution with regard to the constraints.
2565: .  xdiff - the step length or trust region radius of the most recent iterate.
2566: -  reason - The termination reason, which can equal TAO_CONTINUE_ITERATING

2568:    Level: intermediate

2570:    Note:
2571:    TAO returns the values set by the solvers in the routine TaoMonitor().

2573:    Note:
2574:    If any of the output arguments are set to NULL, no corresponding value will be returned.

2576: .seealso: TaoMonitor(), TaoGetConvergedReason()
2577: @*/
2578: PetscErrorCode TaoGetSolutionStatus(Tao tao, PetscInt *its, PetscReal *f, PetscReal *gnorm, PetscReal *cnorm, PetscReal *xdiff, TaoConvergedReason *reason)
2579: {
2581:   if (its) *its=tao->niter;
2582:   if (f) *f=tao->fc;
2583:   if (gnorm) *gnorm=tao->residual;
2584:   if (cnorm) *cnorm=tao->cnorm;
2585:   if (reason) *reason=tao->reason;
2586:   if (xdiff) *xdiff=tao->step;
2587:   return(0);
2588: }

2590: /*@C
2591:    TaoGetType - Gets the current Tao algorithm.

2593:    Not Collective

2595:    Input Parameter:
2596: .  tao - the Tao solver context

2598:    Output Parameter:
2599: .  type - Tao method

2601:    Level: intermediate

2603: @*/
2604: PetscErrorCode TaoGetType(Tao tao,TaoType *type)
2605: {
2609:   *type=((PetscObject)tao)->type_name;
2610:   return(0);
2611: }

2613: /*@C
2614:   TaoMonitor - Monitor the solver and the current solution.  This
2615:   routine will record the iteration number and residual statistics,
2616:   call any monitors specified by the user, and calls the convergence-check routine.

2618:    Input Parameters:
2619: +  tao - the Tao context
2620: .  its - the current iterate number (>=0)
2621: .  f - the current objective function value
2622: .  res - the gradient norm, square root of the duality gap, or other measure indicating distince from optimality.  This measure will be recorded and
2623:           used for some termination tests.
2624: .  cnorm - the infeasibility of the current solution with regard to the constraints.
2625: -  steplength - multiple of the step direction added to the previous iterate.

2627:    Output Parameters:
2628: .  reason - The termination reason, which can equal TAO_CONTINUE_ITERATING

2630:    Options Database Key:
2631: .  -tao_monitor - Use the default monitor, which prints statistics to standard output

2633: .seealso TaoGetConvergedReason(), TaoMonitorDefault(), TaoSetMonitor()

2635:    Level: developer

2637: @*/
2638: PetscErrorCode TaoMonitor(Tao tao, PetscInt its, PetscReal f, PetscReal res, PetscReal cnorm, PetscReal steplength)
2639: {
2641:   PetscInt       i;

2645:   tao->fc = f;
2646:   tao->residual = res;
2647:   tao->cnorm = cnorm;
2648:   tao->step = steplength;
2649:   if (!its) {
2650:     tao->cnorm0 = cnorm; tao->gnorm0 = res;
2651:   }
2652:   if (PetscIsInfOrNanReal(f) || PetscIsInfOrNanReal(res)) SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_USER, "User provided compute function generated Inf or NaN");
2653:   for (i=0;i<tao->numbermonitors;i++) {
2654:     (*tao->monitor[i])(tao,tao->monitorcontext[i]);
2655:   }
2656:   return(0);
2657: }

2659: /*@
2660:    TaoSetConvergenceHistory - Sets the array used to hold the convergence history.

2662:    Logically Collective on Tao

2664:    Input Parameters:
2665: +  tao - the Tao solver context
2666: .  obj   - array to hold objective value history
2667: .  resid - array to hold residual history
2668: .  cnorm - array to hold constraint violation history
2669: .  lits - integer array holds the number of linear iterations for each Tao iteration
2670: .  na  - size of obj, resid, and cnorm
2671: -  reset - PetscTrue indicates each new minimization resets the history counter to zero,
2672:            else it continues storing new values for new minimizations after the old ones

2674:    Notes:
2675:    If set, TAO will fill the given arrays with the indicated
2676:    information at each iteration.  If 'obj','resid','cnorm','lits' are
2677:    *all* NULL then space (using size na, or 1000 if na is PETSC_DECIDE or
2678:    PETSC_DEFAULT) is allocated for the history.
2679:    If not all are NULL, then only the non-NULL information categories
2680:    will be stored, the others will be ignored.

2682:    Any convergence information after iteration number 'na' will not be stored.

2684:    This routine is useful, e.g., when running a code for purposes
2685:    of accurate performance monitoring, when no I/O should be done
2686:    during the section of code that is being timed.

2688:    Level: intermediate

2690: .seealso: TaoGetConvergenceHistory()

2692: @*/
2693: PetscErrorCode TaoSetConvergenceHistory(Tao tao, PetscReal obj[], PetscReal resid[], PetscReal cnorm[], PetscInt lits[], PetscInt na,PetscBool reset)
2694: {


2704:   if (na == PETSC_DECIDE || na == PETSC_DEFAULT) na = 1000;
2705:   if (!obj && !resid && !cnorm && !lits) {
2706:     PetscCalloc4(na,&obj,na,&resid,na,&cnorm,na,&lits);
2707:     tao->hist_malloc = PETSC_TRUE;
2708:   }

2710:   tao->hist_obj = obj;
2711:   tao->hist_resid = resid;
2712:   tao->hist_cnorm = cnorm;
2713:   tao->hist_lits = lits;
2714:   tao->hist_max   = na;
2715:   tao->hist_reset = reset;
2716:   tao->hist_len = 0;
2717:   return(0);
2718: }

2720: /*@C
2721:    TaoGetConvergenceHistory - Gets the arrays used to hold the convergence history.

2723:    Collective on Tao

2725:    Input Parameter:
2726: .  tao - the Tao context

2728:    Output Parameters:
2729: +  obj   - array used to hold objective value history
2730: .  resid - array used to hold residual history
2731: .  cnorm - array used to hold constraint violation history
2732: .  lits  - integer array used to hold linear solver iteration count
2733: -  nhist  - size of obj, resid, cnorm, and lits

2735:    Notes:
2736:     This routine must be preceded by calls to TaoSetConvergenceHistory()
2737:     and TaoSolve(), otherwise it returns useless information.

2739:     The calling sequence for this routine in Fortran is
2740: $   call TaoGetConvergenceHistory(Tao tao, PetscInt nhist, PetscErrorCode ierr)

2742:    This routine is useful, e.g., when running a code for purposes
2743:    of accurate performance monitoring, when no I/O should be done
2744:    during the section of code that is being timed.

2746:    Level: advanced

2748: .seealso: TaoSetConvergenceHistory()

2750: @*/
2751: PetscErrorCode TaoGetConvergenceHistory(Tao tao, PetscReal **obj, PetscReal **resid, PetscReal **cnorm, PetscInt **lits, PetscInt *nhist)
2752: {
2755:   if (obj)   *obj   = tao->hist_obj;
2756:   if (cnorm) *cnorm = tao->hist_cnorm;
2757:   if (resid) *resid = tao->hist_resid;
2758:   if (nhist) *nhist = tao->hist_len;
2759:   return(0);
2760: }

2762: /*@
2763:    TaoSetApplicationContext - Sets the optional user-defined context for
2764:    a solver.

2766:    Logically Collective on Tao

2768:    Input Parameters:
2769: +  tao  - the Tao context
2770: -  usrP - optional user context

2772:    Level: intermediate

2774: .seealso: TaoGetApplicationContext(), TaoSetApplicationContext()
2775: @*/
2776: PetscErrorCode  TaoSetApplicationContext(Tao tao,void *usrP)
2777: {
2780:   tao->user = usrP;
2781:   return(0);
2782: }

2784: /*@
2785:    TaoGetApplicationContext - Gets the user-defined context for a
2786:    TAO solvers.

2788:    Not Collective

2790:    Input Parameter:
2791: .  tao  - Tao context

2793:    Output Parameter:
2794: .  usrP - user context

2796:    Level: intermediate

2798: .seealso: TaoSetApplicationContext()
2799: @*/
2800: PetscErrorCode  TaoGetApplicationContext(Tao tao,void *usrP)
2801: {
2804:   *(void**)usrP = tao->user;
2805:   return(0);
2806: }

2808: /*@
2809:    TaoSetGradientNorm - Sets the matrix used to define the inner product that measures the size of the gradient.

2811:    Collective on tao

2813:    Input Parameters:
2814: +  tao  - the Tao context
2815: -  M    - gradient norm

2817:    Level: beginner

2819: .seealso: TaoGetGradientNorm(), TaoGradientNorm()
2820: @*/
2821: PetscErrorCode  TaoSetGradientNorm(Tao tao, Mat M)
2822: {

2827:   PetscObjectReference((PetscObject)M);
2828:   MatDestroy(&tao->gradient_norm);
2829:   VecDestroy(&tao->gradient_norm_tmp);
2830:   tao->gradient_norm = M;
2831:   MatCreateVecs(M, NULL, &tao->gradient_norm_tmp);
2832:   return(0);
2833: }

2835: /*@
2836:    TaoGetGradientNorm - Returns the matrix used to define the inner product for measuring the size of the gradient.

2838:    Not Collective

2840:    Input Parameter:
2841: .  tao  - Tao context

2843:    Output Parameter:
2844: .  M - gradient norm

2846:    Level: beginner

2848: .seealso: TaoSetGradientNorm(), TaoGradientNorm()
2849: @*/
2850: PetscErrorCode  TaoGetGradientNorm(Tao tao, Mat *M)
2851: {
2854:   *M = tao->gradient_norm;
2855:   return(0);
2856: }

2858: /*@C
2859:    TaoGradientNorm - Compute the norm with respect to the inner product the user has set.

2861:    Collective on tao

2863:    Input Parameter:
2864: +  tao      - the Tao context
2865: .  gradient - the gradient to be computed
2866: -  norm     - the norm type

2868:    Output Parameter:
2869: .  gnorm    - the gradient norm

2871:    Level: developer

2873: .seealso: TaoSetGradientNorm(), TaoGetGradientNorm()
2874: @*/
2875: PetscErrorCode  TaoGradientNorm(Tao tao, Vec gradient, NormType type, PetscReal *gnorm)
2876: {

2884:   if (tao->gradient_norm) {
2885:     PetscScalar gnorms;

2887:     if (type != NORM_2) SETERRQ(PetscObjectComm((PetscObject)gradient), PETSC_ERR_ARG_WRONG, "Norm type must be NORM_2 if an inner product for the gradient norm is set.");
2888:     MatMult(tao->gradient_norm, gradient, tao->gradient_norm_tmp);
2889:     VecDot(gradient, tao->gradient_norm_tmp, &gnorms);
2890:     *gnorm = PetscRealPart(PetscSqrtScalar(gnorms));
2891:   } else {
2892:     VecNorm(gradient, type, gnorm);
2893:   }
2894:   return(0);
2895: }

2897: /*@C
2898:    TaoMonitorDrawCtxCreate - Creates the monitor context for TaoMonitorDrawCtx

2900:    Collective on Tao

2902:    Output Patameter:
2903: .    ctx - the monitor context

2905:    Options Database:
2906: .   -tao_draw_solution_initial - show initial guess as well as current solution

2908:    Level: intermediate

2910: .seealso: TaoMonitorSet(), TaoMonitorDefault(), VecView(), TaoMonitorDrawCtx()
2911: @*/
2912: PetscErrorCode  TaoMonitorDrawCtxCreate(MPI_Comm comm,const char host[],const char label[],int x,int y,int m,int n,PetscInt howoften,TaoMonitorDrawCtx *ctx)
2913: {
2914:   PetscErrorCode   ierr;

2917:   PetscNew(ctx);
2918:   PetscViewerDrawOpen(comm,host,label,x,y,m,n,&(*ctx)->viewer);
2919:   PetscViewerSetFromOptions((*ctx)->viewer);
2920:   (*ctx)->howoften = howoften;
2921:   return(0);
2922: }

2924: /*@C
2925:    TaoMonitorDrawCtxDestroy - Destroys the monitor context for TaoMonitorDrawSolution()

2927:    Collective on Tao

2929:    Input Parameters:
2930: .    ctx - the monitor context

2932:    Level: intermediate

2934: .seealso: TaoMonitorSet(), TaoMonitorDefault(), VecView(), TaoMonitorDrawSolution()
2935: @*/
2936: PetscErrorCode  TaoMonitorDrawCtxDestroy(TaoMonitorDrawCtx *ictx)
2937: {

2941:   PetscViewerDestroy(&(*ictx)->viewer);
2942:   PetscFree(*ictx);
2943:   return(0);
2944: }