Actual source code: taosolver.c

petsc-3.13.6 2020-09-29
Report Typos and Errors
  1: #define TAO_DLL

  3:  #include <petsc/private/taoimpl.h>

  5: PetscBool TaoRegisterAllCalled = PETSC_FALSE;
  6: PetscFunctionList TaoList = NULL;

  8: PetscClassId TAO_CLASSID;

 10: PetscLogEvent TAO_Solve;
 11: PetscLogEvent TAO_ObjectiveEval;
 12: PetscLogEvent TAO_GradientEval;
 13: PetscLogEvent TAO_ObjGradEval;
 14: PetscLogEvent TAO_HessianEval;
 15: PetscLogEvent TAO_JacobianEval;
 16: PetscLogEvent TAO_ConstraintsEval;

 18: const char *TaoSubSetTypes[] = {"subvec","mask","matrixfree","TaoSubSetType","TAO_SUBSET_",0};

 20: struct _n_TaoMonitorDrawCtx {
 21:   PetscViewer viewer;
 22:   PetscInt    howoften;  /* when > 0 uses iteration % howoften, when negative only final solution plotted */
 23: };

 25: /*@
 26:   TaoCreate - Creates a TAO solver

 28:   Collective

 30:   Input Parameter:
 31: . comm - MPI communicator

 33:   Output Parameter:
 34: . newtao - the new Tao context

 36:   Available methods include:
 37: +    nls - Newton's method with line search for unconstrained minimization
 38: .    ntr - Newton's method with trust region for unconstrained minimization
 39: .    ntl - Newton's method with trust region, line search for unconstrained minimization
 40: .    lmvm - Limited memory variable metric method for unconstrained minimization
 41: .    cg - Nonlinear conjugate gradient method for unconstrained minimization
 42: .    nm - Nelder-Mead algorithm for derivate-free unconstrained minimization
 43: .    tron - Newton Trust Region method for bound constrained minimization
 44: .    gpcg - Newton Trust Region method for quadratic bound constrained minimization
 45: .    blmvm - Limited memory variable metric method for bound constrained minimization
 46: .    lcl - Linearly constrained Lagrangian method for pde-constrained minimization
 47: -    pounders - Model-based algorithm for nonlinear least squares

 49:    Options Database Keys:
 50: .   -tao_type - select which method TAO should use

 52:    Level: beginner

 54: .seealso: TaoSolve(), TaoDestroy()
 55: @*/
 56: PetscErrorCode TaoCreate(MPI_Comm comm, Tao *newtao)
 57: {
 59:   Tao            tao;

 63:   *newtao = NULL;

 65:   TaoInitializePackage();
 66:   TaoLineSearchInitializePackage();

 68:   PetscHeaderCreate(tao,TAO_CLASSID,"Tao","Optimization solver","Tao",comm,TaoDestroy,TaoView);
 69:   tao->ops->computeobjective=0;
 70:   tao->ops->computeobjectiveandgradient=0;
 71:   tao->ops->computegradient=0;
 72:   tao->ops->computehessian=0;
 73:   tao->ops->computeresidual=0;
 74:   tao->ops->computeresidualjacobian=0;
 75:   tao->ops->computeconstraints=0;
 76:   tao->ops->computejacobian=0;
 77:   tao->ops->computejacobianequality=0;
 78:   tao->ops->computejacobianinequality=0;
 79:   tao->ops->computeequalityconstraints=0;
 80:   tao->ops->computeinequalityconstraints=0;
 81:   tao->ops->convergencetest=TaoDefaultConvergenceTest;
 82:   tao->ops->convergencedestroy=0;
 83:   tao->ops->computedual=0;
 84:   tao->ops->setup=0;
 85:   tao->ops->solve=0;
 86:   tao->ops->view=0;
 87:   tao->ops->setfromoptions=0;
 88:   tao->ops->destroy=0;

 90:   tao->solution=NULL;
 91:   tao->gradient=NULL;
 92:   tao->ls_res = NULL;
 93:   tao->ls_jac = NULL;
 94:   tao->constraints=NULL;
 95:   tao->constraints_equality=NULL;
 96:   tao->constraints_inequality=NULL;
 97:   tao->res_weights_v=NULL;
 98:   tao->res_weights_w=NULL;
 99:   tao->stepdirection=NULL;
100:   tao->niter=0;
101:   tao->ntotalits=0;
102:   tao->XL = NULL;
103:   tao->XU = NULL;
104:   tao->IL = NULL;
105:   tao->IU = NULL;
106:   tao->DI = NULL;
107:   tao->DE = NULL;
108:   tao->gradient_norm = NULL;
109:   tao->gradient_norm_tmp = NULL;
110:   tao->hessian = NULL;
111:   tao->hessian_pre = NULL;
112:   tao->jacobian = NULL;
113:   tao->jacobian_pre = NULL;
114:   tao->jacobian_state = NULL;
115:   tao->jacobian_state_pre = NULL;
116:   tao->jacobian_state_inv = NULL;
117:   tao->jacobian_design = NULL;
118:   tao->jacobian_design_pre = NULL;
119:   tao->jacobian_equality = NULL;
120:   tao->jacobian_equality_pre = NULL;
121:   tao->jacobian_inequality = NULL;
122:   tao->jacobian_inequality_pre = NULL;
123:   tao->state_is = NULL;
124:   tao->design_is = NULL;

126:   tao->max_it     = 10000;
127:   tao->max_funcs   = 10000;
128: #if defined(PETSC_USE_REAL_SINGLE)
129:   tao->gatol       = 1e-5;
130:   tao->grtol       = 1e-5;
131:   tao->crtol       = 1e-5;
132:   tao->catol       = 1e-5;
133: #else
134:   tao->gatol       = 1e-8;
135:   tao->grtol       = 1e-8;
136:   tao->crtol       = 1e-8;
137:   tao->catol       = 1e-8;
138: #endif
139:   tao->gttol       = 0.0;
140:   tao->steptol     = 0.0;
141:   tao->trust0      = PETSC_INFINITY;
142:   tao->fmin        = PETSC_NINFINITY;
143:   tao->hist_malloc = PETSC_FALSE;
144:   tao->hist_reset = PETSC_TRUE;
145:   tao->hist_max = 0;
146:   tao->hist_len = 0;
147:   tao->hist_obj = NULL;
148:   tao->hist_resid = NULL;
149:   tao->hist_cnorm = NULL;
150:   tao->hist_lits = NULL;

152:   tao->numbermonitors=0;
153:   tao->viewsolution=PETSC_FALSE;
154:   tao->viewhessian=PETSC_FALSE;
155:   tao->viewgradient=PETSC_FALSE;
156:   tao->viewjacobian=PETSC_FALSE;
157:   tao->viewconstraints = PETSC_FALSE;

159:   tao->bounded = PETSC_FALSE;
160:   tao->constrained = PETSC_FALSE;

162:   tao->header_printed = PETSC_FALSE;

164:   /* These flags prevents algorithms from overriding user options */
165:   tao->max_it_changed   =PETSC_FALSE;
166:   tao->max_funcs_changed=PETSC_FALSE;
167:   tao->gatol_changed    =PETSC_FALSE;
168:   tao->grtol_changed    =PETSC_FALSE;
169:   tao->gttol_changed    =PETSC_FALSE;
170:   tao->steptol_changed  =PETSC_FALSE;
171:   tao->trust0_changed   =PETSC_FALSE;
172:   tao->fmin_changed     =PETSC_FALSE;
173:   tao->catol_changed    =PETSC_FALSE;
174:   tao->crtol_changed    =PETSC_FALSE;
175:   TaoResetStatistics(tao);
176:   *newtao = tao;
177:   return(0);
178: }

180: /*@
181:   TaoSolve - Solves an optimization problem min F(x) s.t. l <= x <= u

183:   Collective on Tao

185:   Input Parameters:
186: . tao - the Tao context

188:   Notes:
189:   The user must set up the Tao with calls to TaoSetInitialVector(),
190:   TaoSetObjectiveRoutine(),
191:   TaoSetGradientRoutine(), and (if using 2nd order method) TaoSetHessianRoutine().

193:   You should call TaoGetConvergedReason() or run with -tao_converged_reason to determine if the optimization algorithm actually succeeded or
194:   why it failed.

196:   Level: beginner

198: .seealso: TaoCreate(), TaoSetObjectiveRoutine(), TaoSetGradientRoutine(), TaoSetHessianRoutine(), TaoGetConvergedReason()
199:  @*/
200: PetscErrorCode TaoSolve(Tao tao)
201: {
202:   PetscErrorCode   ierr;
203:   static PetscBool set = PETSC_FALSE;

207:   PetscCitationsRegister("@TechReport{tao-user-ref,\n"
208:                                 "title   = {Toolkit for Advanced Optimization (TAO) Users Manual},\n"
209:                                 "author  = {Todd Munson and Jason Sarich and Stefan Wild and Steve Benson and Lois Curfman McInnes},\n"
210:                                 "Institution = {Argonne National Laboratory},\n"
211:                                 "Year   = 2014,\n"
212:                                 "Number = {ANL/MCS-TM-322 - Revision 3.5},\n"
213:                                 "url    = {https://www.mcs.anl.gov/research/projects/tao/}\n}\n",&set);
214:   tao->header_printed = PETSC_FALSE;
215:   TaoSetUp(tao);
216:   TaoResetStatistics(tao);
217:   if (tao->linesearch) {
218:     TaoLineSearchReset(tao->linesearch);
219:   }

221:   PetscLogEventBegin(TAO_Solve,tao,0,0,0);
222:   if (tao->ops->solve){ (*tao->ops->solve)(tao); }
223:   PetscLogEventEnd(TAO_Solve,tao,0,0,0);

225:   VecViewFromOptions(tao->solution,(PetscObject)tao,"-tao_view_solution");

227:   tao->ntotalits += tao->niter;
228:   TaoViewFromOptions(tao,NULL,"-tao_view");

230:   if (tao->printreason) {
231:     if (tao->reason > 0) {
232:       PetscPrintf(((PetscObject)tao)->comm,"TAO solve converged due to %s iterations %D\n",TaoConvergedReasons[tao->reason],tao->niter);
233:     } else {
234:       PetscPrintf(((PetscObject)tao)->comm,"TAO solve did not converge due to %s iteration %D\n",TaoConvergedReasons[tao->reason],tao->niter);
235:     }
236:   }
237:   return(0);
238: }

240: /*@
241:   TaoSetUp - Sets up the internal data structures for the later use
242:   of a Tao solver

244:   Collective on tao

246:   Input Parameters:
247: . tao - the TAO context

249:   Notes:
250:   The user will not need to explicitly call TaoSetUp(), as it will
251:   automatically be called in TaoSolve().  However, if the user
252:   desires to call it explicitly, it should come after TaoCreate()
253:   and any TaoSetSomething() routines, but before TaoSolve().

255:   Level: advanced

257: .seealso: TaoCreate(), TaoSolve()
258: @*/
259: PetscErrorCode TaoSetUp(Tao tao)
260: {

265:   if (tao->setupcalled) return(0);

267:   if (!tao->solution) SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"Must call TaoSetInitialVector");
268:   if (tao->ops->setup) {
269:     (*tao->ops->setup)(tao);
270:   }
271:   tao->setupcalled = PETSC_TRUE;
272:   return(0);
273: }

275: /*@
276:   TaoDestroy - Destroys the TAO context that was created with
277:   TaoCreate()

279:   Collective on Tao

281:   Input Parameter:
282: . tao - the Tao context

284:   Level: beginner

286: .seealso: TaoCreate(), TaoSolve()
287: @*/
288: PetscErrorCode TaoDestroy(Tao *tao)
289: {

293:   if (!*tao) return(0);
295:   if (--((PetscObject)*tao)->refct > 0) {*tao=0;return(0);}

297:   if ((*tao)->ops->destroy) {
298:     (*((*tao))->ops->destroy)(*tao);
299:   }
300:   KSPDestroy(&(*tao)->ksp);
301:   TaoLineSearchDestroy(&(*tao)->linesearch);

303:   if ((*tao)->ops->convergencedestroy) {
304:     (*(*tao)->ops->convergencedestroy)((*tao)->cnvP);
305:     if ((*tao)->jacobian_state_inv) {
306:       MatDestroy(&(*tao)->jacobian_state_inv);
307:     }
308:   }
309:   VecDestroy(&(*tao)->solution);
310:   VecDestroy(&(*tao)->gradient);
311:   VecDestroy(&(*tao)->ls_res);

313:   if ((*tao)->gradient_norm) {
314:     PetscObjectDereference((PetscObject)(*tao)->gradient_norm);
315:     VecDestroy(&(*tao)->gradient_norm_tmp);
316:   }

318:   VecDestroy(&(*tao)->XL);
319:   VecDestroy(&(*tao)->XU);
320:   VecDestroy(&(*tao)->IL);
321:   VecDestroy(&(*tao)->IU);
322:   VecDestroy(&(*tao)->DE);
323:   VecDestroy(&(*tao)->DI);
324:   VecDestroy(&(*tao)->constraints_equality);
325:   VecDestroy(&(*tao)->constraints_inequality);
326:   VecDestroy(&(*tao)->stepdirection);
327:   MatDestroy(&(*tao)->hessian_pre);
328:   MatDestroy(&(*tao)->hessian);
329:   MatDestroy(&(*tao)->ls_jac);
330:   MatDestroy(&(*tao)->ls_jac_pre);
331:   MatDestroy(&(*tao)->jacobian_pre);
332:   MatDestroy(&(*tao)->jacobian);
333:   MatDestroy(&(*tao)->jacobian_state_pre);
334:   MatDestroy(&(*tao)->jacobian_state);
335:   MatDestroy(&(*tao)->jacobian_state_inv);
336:   MatDestroy(&(*tao)->jacobian_design);
337:   MatDestroy(&(*tao)->jacobian_equality);
338:   MatDestroy(&(*tao)->jacobian_equality_pre);
339:   MatDestroy(&(*tao)->jacobian_inequality);
340:   MatDestroy(&(*tao)->jacobian_inequality_pre);
341:   ISDestroy(&(*tao)->state_is);
342:   ISDestroy(&(*tao)->design_is);
343:   VecDestroy(&(*tao)->res_weights_v);
344:   TaoCancelMonitors(*tao);
345:   if ((*tao)->hist_malloc) {
346:     PetscFree4((*tao)->hist_obj,(*tao)->hist_resid,(*tao)->hist_cnorm,(*tao)->hist_lits);
347:   }
348:   if ((*tao)->res_weights_n) {
349:     PetscFree((*tao)->res_weights_rows);
350:     PetscFree((*tao)->res_weights_cols);
351:     PetscFree((*tao)->res_weights_w);
352:   }
353:   PetscHeaderDestroy(tao);
354:   return(0);
355: }

357: /*@
358:   TaoSetFromOptions - Sets various Tao parameters from user
359:   options.

361:   Collective on Tao

363:   Input Paremeter:
364: . tao - the Tao solver context

366:   options Database Keys:
367: + -tao_type <type> - The algorithm that TAO uses (lmvm, nls, etc.)
368: . -tao_gatol <gatol> - absolute error tolerance for ||gradient||
369: . -tao_grtol <grtol> - relative error tolerance for ||gradient||
370: . -tao_gttol <gttol> - reduction of ||gradient|| relative to initial gradient
371: . -tao_max_it <max> - sets maximum number of iterations
372: . -tao_max_funcs <max> - sets maximum number of function evaluations
373: . -tao_fmin <fmin> - stop if function value reaches fmin
374: . -tao_steptol <tol> - stop if trust region radius less than <tol>
375: . -tao_trust0 <t> - initial trust region radius
376: . -tao_monitor - prints function value and residual at each iteration
377: . -tao_smonitor - same as tao_monitor, but truncates very small values
378: . -tao_cmonitor - prints function value, residual, and constraint norm at each iteration
379: . -tao_view_solution - prints solution vector at each iteration
380: . -tao_view_ls_residual - prints least-squares residual vector at each iteration
381: . -tao_view_step - prints step direction vector at each iteration
382: . -tao_view_gradient - prints gradient vector at each iteration
383: . -tao_draw_solution - graphically view solution vector at each iteration
384: . -tao_draw_step - graphically view step vector at each iteration
385: . -tao_draw_gradient - graphically view gradient at each iteration
386: . -tao_fd_gradient - use gradient computed with finite differences
387: . -tao_fd_hessian - use hessian computed with finite differences
388: . -tao_mf_hessian - use matrix-free hessian computed with finite differences
389: . -tao_cancelmonitors - cancels all monitors (except those set with command line)
390: . -tao_view - prints information about the Tao after solving
391: - -tao_converged_reason - prints the reason TAO stopped iterating

393:   Notes:
394:   To see all options, run your program with the -help option or consult the
395:   user's manual. Should be called after TaoCreate() but before TaoSolve()

397:   Level: beginner
398: @*/
399: PetscErrorCode TaoSetFromOptions(Tao tao)
400: {
402:   TaoType        default_type = TAOLMVM;
403:   char           type[256], monfilename[PETSC_MAX_PATH_LEN];
404:   PetscViewer    monviewer;
405:   PetscBool      flg;
406:   MPI_Comm       comm;

410:   PetscObjectGetComm((PetscObject)tao,&comm);

412:   /* So no warnings are given about unused options */
413:   PetscOptionsHasName(((PetscObject)tao)->options,((PetscObject)tao)->prefix,"-tao_ls_type",&flg);

415:   PetscObjectOptionsBegin((PetscObject)tao);
416:   {
417:     TaoRegisterAll();
418:     if (((PetscObject)tao)->type_name) {
419:       default_type = ((PetscObject)tao)->type_name;
420:     }
421:     /* Check for type from options */
422:     PetscOptionsFList("-tao_type","Tao Solver type","TaoSetType",TaoList,default_type,type,256,&flg);
423:     if (flg) {
424:       TaoSetType(tao,type);
425:     } else if (!((PetscObject)tao)->type_name) {
426:       TaoSetType(tao,default_type);
427:     }

429:     PetscOptionsReal("-tao_catol","Stop if constraints violations within","TaoSetConstraintTolerances",tao->catol,&tao->catol,&flg);
430:     if (flg) tao->catol_changed=PETSC_TRUE;
431:     PetscOptionsReal("-tao_crtol","Stop if relative contraint violations within","TaoSetConstraintTolerances",tao->crtol,&tao->crtol,&flg);
432:     if (flg) tao->crtol_changed=PETSC_TRUE;
433:     PetscOptionsReal("-tao_gatol","Stop if norm of gradient less than","TaoSetTolerances",tao->gatol,&tao->gatol,&flg);
434:     if (flg) tao->gatol_changed=PETSC_TRUE;
435:     PetscOptionsReal("-tao_grtol","Stop if norm of gradient divided by the function value is less than","TaoSetTolerances",tao->grtol,&tao->grtol,&flg);
436:     if (flg) tao->grtol_changed=PETSC_TRUE;
437:     PetscOptionsReal("-tao_gttol","Stop if the norm of the gradient is less than the norm of the initial gradient times tol","TaoSetTolerances",tao->gttol,&tao->gttol,&flg);
438:     if (flg) tao->gttol_changed=PETSC_TRUE;
439:     PetscOptionsInt("-tao_max_it","Stop if iteration number exceeds","TaoSetMaximumIterations",tao->max_it,&tao->max_it,&flg);
440:     if (flg) tao->max_it_changed=PETSC_TRUE;
441:     PetscOptionsInt("-tao_max_funcs","Stop if number of function evaluations exceeds","TaoSetMaximumFunctionEvaluations",tao->max_funcs,&tao->max_funcs,&flg);
442:     if (flg) tao->max_funcs_changed=PETSC_TRUE;
443:     PetscOptionsReal("-tao_fmin","Stop if function less than","TaoSetFunctionLowerBound",tao->fmin,&tao->fmin,&flg);
444:     if (flg) tao->fmin_changed=PETSC_TRUE;
445:     PetscOptionsReal("-tao_steptol","Stop if step size or trust region radius less than","",tao->steptol,&tao->steptol,&flg);
446:     if (flg) tao->steptol_changed=PETSC_TRUE;
447:     PetscOptionsReal("-tao_trust0","Initial trust region radius","TaoSetTrustRegionRadius",tao->trust0,&tao->trust0,&flg);
448:     if (flg) tao->trust0_changed=PETSC_TRUE;
449:     PetscOptionsString("-tao_view_solution","view solution vector after each evaluation","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
450:     if (flg) {
451:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
452:       TaoSetMonitor(tao,TaoSolutionMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
453:     }

455:     PetscOptionsBool("-tao_converged_reason","Print reason for TAO converged","TaoSolve",tao->printreason,&tao->printreason,NULL);
456:     PetscOptionsString("-tao_view_gradient","view gradient vector after each evaluation","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
457:     if (flg) {
458:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
459:       TaoSetMonitor(tao,TaoGradientMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
460:     }

462:     PetscOptionsString("-tao_view_stepdirection","view step direction vector after each iteration","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
463:     if (flg) {
464:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
465:       TaoSetMonitor(tao,TaoStepDirectionMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
466:     }

468:     PetscOptionsString("-tao_view_residual","view least-squares residual vector after each evaluation","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
469:     if (flg) {
470:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
471:       TaoSetMonitor(tao,TaoResidualMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
472:     }

474:     PetscOptionsString("-tao_monitor","Use the default convergence monitor","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
475:     if (flg) {
476:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
477:       TaoSetMonitor(tao,TaoMonitorDefault,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
478:     }

480:     PetscOptionsString("-tao_gmonitor","Use the convergence monitor with extra globalization info","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
481:     if (flg) {
482:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
483:       TaoSetMonitor(tao,TaoDefaultGMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
484:     }

486:     PetscOptionsString("-tao_smonitor","Use the short convergence monitor","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
487:     if (flg) {
488:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
489:       TaoSetMonitor(tao,TaoDefaultSMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
490:     }

492:     PetscOptionsString("-tao_cmonitor","Use the default convergence monitor with constraint norm","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
493:     if (flg) {
494:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
495:       TaoSetMonitor(tao,TaoDefaultCMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
496:     }


499:     flg = PETSC_FALSE;
500:     PetscOptionsBool("-tao_cancelmonitors","cancel all monitors and call any registered destroy routines","TaoCancelMonitors",flg,&flg,NULL);
501:     if (flg) {TaoCancelMonitors(tao);}

503:     flg = PETSC_FALSE;
504:     PetscOptionsBool("-tao_draw_solution","Plot solution vector at each iteration","TaoSetMonitor",flg,&flg,NULL);
505:     if (flg) {
506:       TaoMonitorDrawCtx drawctx;
507:       PetscInt          howoften = 1;
508:       TaoMonitorDrawCtxCreate(PetscObjectComm((PetscObject)tao),0,0,PETSC_DECIDE,PETSC_DECIDE,300,300,howoften,&drawctx);
509:       TaoSetMonitor(tao,TaoDrawSolutionMonitor,drawctx,(PetscErrorCode (*)(void**))TaoMonitorDrawCtxDestroy);
510:     }

512:     flg = PETSC_FALSE;
513:     PetscOptionsBool("-tao_draw_step","plots step direction at each iteration","TaoSetMonitor",flg,&flg,NULL);
514:     if (flg) {
515:       TaoSetMonitor(tao,TaoDrawStepMonitor,NULL,NULL);
516:     }

518:     flg = PETSC_FALSE;
519:     PetscOptionsBool("-tao_draw_gradient","plots gradient at each iteration","TaoSetMonitor",flg,&flg,NULL);
520:     if (flg) {
521:       TaoMonitorDrawCtx drawctx;
522:       PetscInt          howoften = 1;
523:       TaoMonitorDrawCtxCreate(PetscObjectComm((PetscObject)tao),0,0,PETSC_DECIDE,PETSC_DECIDE,300,300,howoften,&drawctx);
524:       TaoSetMonitor(tao,TaoDrawGradientMonitor,drawctx,(PetscErrorCode (*)(void**))TaoMonitorDrawCtxDestroy);
525:     }
526:     flg = PETSC_FALSE;
527:     PetscOptionsBool("-tao_fd_gradient","compute gradient using finite differences","TaoDefaultComputeGradient",flg,&flg,NULL);
528:     if (flg) {
529:       TaoSetGradientRoutine(tao,TaoDefaultComputeGradient,NULL);
530:     }
531:     flg = PETSC_FALSE;
532:     PetscOptionsBool("-tao_fd_hessian","compute hessian using finite differences","TaoDefaultComputeHessian",flg,&flg,NULL);
533:     if (flg) {
534:       Mat H;

536:       MatCreate(PetscObjectComm((PetscObject)tao),&H);
537:       MatSetType(H,MATAIJ);
538:       TaoSetHessianRoutine(tao,H,H,TaoDefaultComputeHessian,NULL);
539:       MatDestroy(&H);
540:     }
541:     flg = PETSC_FALSE;
542:     PetscOptionsBool("-tao_mf_hessian","compute matrix-free hessian using finite differences","TaoDefaultComputeHessianMFFD",flg,&flg,NULL);
543:     if (flg) {
544:       Mat H;

546:       MatCreate(PetscObjectComm((PetscObject)tao),&H);
547:       TaoSetHessianRoutine(tao,H,H,TaoDefaultComputeHessianMFFD,NULL);
548:       MatDestroy(&H);
549:     }
550:     PetscOptionsEnum("-tao_subset_type","subset type","",TaoSubSetTypes,(PetscEnum)tao->subset_type,(PetscEnum*)&tao->subset_type,NULL);

552:     if (tao->ops->setfromoptions) {
553:       (*tao->ops->setfromoptions)(PetscOptionsObject,tao);
554:     }
555:   }
556:   PetscOptionsEnd();
557:   return(0);
558: }

560: /*@C
561:    TaoViewFromOptions - View from Options

563:    Collective on Tao

565:    Input Parameters:
566: +  A - the  Tao context
567: .  obj - Optional object
568: -  name - command line option

570:    Level: intermediate
571: .seealso:  Tao, TaoView, PetscObjectViewFromOptions(), TaoCreate()
572: @*/
573: PetscErrorCode  TaoViewFromOptions(Tao A,PetscObject obj,const char name[])
574: {

579:   PetscObjectViewFromOptions((PetscObject)A,obj,name);
580:   return(0);
581: }

583: /*@C
584:   TaoView - Prints information about the Tao

586:   Collective on Tao

588:   InputParameters:
589: + tao - the Tao context
590: - viewer - visualization context

592:   Options Database Key:
593: . -tao_view - Calls TaoView() at the end of TaoSolve()

595:   Notes:
596:   The available visualization contexts include
597: +     PETSC_VIEWER_STDOUT_SELF - standard output (default)
598: -     PETSC_VIEWER_STDOUT_WORLD - synchronized standard
599:          output where only the first processor opens
600:          the file.  All other processors send their
601:          data to the first processor to print.

603:   Level: beginner

605: .seealso: PetscViewerASCIIOpen()
606: @*/
607: PetscErrorCode TaoView(Tao tao, PetscViewer viewer)
608: {
609:   PetscErrorCode      ierr;
610:   PetscBool           isascii,isstring;
611:   TaoType             type;

615:   if (!viewer) {
616:     PetscViewerASCIIGetStdout(((PetscObject)tao)->comm,&viewer);
617:   }

621:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
622:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
623:   if (isascii) {
624:     PetscObjectPrintClassNamePrefixType((PetscObject)tao,viewer);

626:     if (tao->ops->view) {
627:       PetscViewerASCIIPushTab(viewer);
628:       (*tao->ops->view)(tao,viewer);
629:       PetscViewerASCIIPopTab(viewer);
630:     }
631:     if (tao->linesearch) {
632:       PetscViewerASCIIPushTab(viewer);
633:       TaoLineSearchView(tao->linesearch,viewer);
634:       PetscViewerASCIIPopTab(viewer);
635:     }
636:     if (tao->ksp) {
637:       PetscViewerASCIIPushTab(viewer);
638:       KSPView(tao->ksp,viewer);
639:       PetscViewerASCIIPrintf(viewer,"total KSP iterations: %D\n",tao->ksp_tot_its);
640:       PetscViewerASCIIPopTab(viewer);
641:     }

643:     PetscViewerASCIIPushTab(viewer);

645:     if (tao->XL || tao->XU) {
646:       PetscViewerASCIIPrintf(viewer,"Active Set subset type: %s\n",TaoSubSetTypes[tao->subset_type]);
647:     }

649:     PetscViewerASCIIPrintf(viewer,"convergence tolerances: gatol=%g,",(double)tao->gatol);
650:     PetscViewerASCIIPrintf(viewer," steptol=%g,",(double)tao->steptol);
651:     PetscViewerASCIIPrintf(viewer," gttol=%g\n",(double)tao->gttol);
652:     PetscViewerASCIIPrintf(viewer,"Residual in Function/Gradient:=%g\n",(double)tao->residual);

654:     if (tao->constrained){
655:       ierr=PetscViewerASCIIPrintf(viewer,"convergence tolerances:");
656:       ierr=PetscViewerASCIIPrintf(viewer," catol=%g,",(double)tao->catol);
657:       ierr=PetscViewerASCIIPrintf(viewer," crtol=%g\n",(double)tao->crtol);
658:       PetscViewerASCIIPrintf(viewer,"Residual in Constraints:=%g\n",(double)tao->cnorm);
659:     }

661:     if (tao->trust < tao->steptol){
662:       ierr=PetscViewerASCIIPrintf(viewer,"convergence tolerances: steptol=%g\n",(double)tao->steptol);
663:       ierr=PetscViewerASCIIPrintf(viewer,"Final trust region radius:=%g\n",(double)tao->trust);
664:     }

666:     if (tao->fmin>-1.e25){
667:       ierr=PetscViewerASCIIPrintf(viewer,"convergence tolerances: function minimum=%g\n",(double)tao->fmin);
668:     }
669:     PetscViewerASCIIPrintf(viewer,"Objective value=%g\n",(double)tao->fc);

671:     PetscViewerASCIIPrintf(viewer,"total number of iterations=%D,          ",tao->niter);
672:     PetscViewerASCIIPrintf(viewer,"              (max: %D)\n",tao->max_it);

674:     if (tao->nfuncs>0){
675:       PetscViewerASCIIPrintf(viewer,"total number of function evaluations=%D,",tao->nfuncs);
676:       PetscViewerASCIIPrintf(viewer,"                max: %D\n",tao->max_funcs);
677:     }
678:     if (tao->ngrads>0){
679:       PetscViewerASCIIPrintf(viewer,"total number of gradient evaluations=%D,",tao->ngrads);
680:       PetscViewerASCIIPrintf(viewer,"                max: %D\n",tao->max_funcs);
681:     }
682:     if (tao->nfuncgrads>0){
683:       PetscViewerASCIIPrintf(viewer,"total number of function/gradient evaluations=%D,",tao->nfuncgrads);
684:       PetscViewerASCIIPrintf(viewer,"    (max: %D)\n",tao->max_funcs);
685:     }
686:     if (tao->nhess>0){
687:       PetscViewerASCIIPrintf(viewer,"total number of Hessian evaluations=%D\n",tao->nhess);
688:     }
689:     /*  if (tao->linear_its>0){
690:      PetscViewerASCIIPrintf(viewer,"  total Krylov method iterations=%D\n",tao->linear_its);
691:      }*/
692:     if (tao->nconstraints>0){
693:       PetscViewerASCIIPrintf(viewer,"total number of constraint function evaluations=%D\n",tao->nconstraints);
694:     }
695:     if (tao->njac>0){
696:       PetscViewerASCIIPrintf(viewer,"total number of Jacobian evaluations=%D\n",tao->njac);
697:     }

699:     if (tao->reason>0){
700:       PetscViewerASCIIPrintf(viewer,    "Solution converged: ");
701:       switch (tao->reason) {
702:       case TAO_CONVERGED_GATOL:
703:         PetscViewerASCIIPrintf(viewer," ||g(X)|| <= gatol\n");
704:         break;
705:       case TAO_CONVERGED_GRTOL:
706:         PetscViewerASCIIPrintf(viewer," ||g(X)||/|f(X)| <= grtol\n");
707:         break;
708:       case TAO_CONVERGED_GTTOL:
709:         PetscViewerASCIIPrintf(viewer," ||g(X)||/||g(X0)|| <= gttol\n");
710:         break;
711:       case TAO_CONVERGED_STEPTOL:
712:         PetscViewerASCIIPrintf(viewer," Steptol -- step size small\n");
713:         break;
714:       case TAO_CONVERGED_MINF:
715:         PetscViewerASCIIPrintf(viewer," Minf --  f < fmin\n");
716:         break;
717:       case TAO_CONVERGED_USER:
718:         PetscViewerASCIIPrintf(viewer," User Terminated\n");
719:         break;
720:       default:
721:         PetscViewerASCIIPrintf(viewer,"\n");
722:         break;
723:       }

725:     } else {
726:       PetscViewerASCIIPrintf(viewer,"Solver terminated: %d",tao->reason);
727:       switch (tao->reason) {
728:       case TAO_DIVERGED_MAXITS:
729:         PetscViewerASCIIPrintf(viewer," Maximum Iterations\n");
730:         break;
731:       case TAO_DIVERGED_NAN:
732:         PetscViewerASCIIPrintf(viewer," NAN or Inf encountered\n");
733:         break;
734:       case TAO_DIVERGED_MAXFCN:
735:         PetscViewerASCIIPrintf(viewer," Maximum Function Evaluations\n");
736:         break;
737:       case TAO_DIVERGED_LS_FAILURE:
738:         PetscViewerASCIIPrintf(viewer," Line Search Failure\n");
739:         break;
740:       case TAO_DIVERGED_TR_REDUCTION:
741:         PetscViewerASCIIPrintf(viewer," Trust Region too small\n");
742:         break;
743:       case TAO_DIVERGED_USER:
744:         PetscViewerASCIIPrintf(viewer," User Terminated\n");
745:         break;
746:       default:
747:         PetscViewerASCIIPrintf(viewer,"\n");
748:         break;
749:       }
750:     }
751:     PetscViewerASCIIPopTab(viewer);
752:   } else if (isstring) {
753:     TaoGetType(tao,&type);
754:     PetscViewerStringSPrintf(viewer," %-3.3s",type);
755:   }
756:   return(0);
757: }

759: /*@
760:   TaoSetTolerances - Sets parameters used in TAO convergence tests

762:   Logically collective on Tao

764:   Input Parameters:
765: + tao - the Tao context
766: . gatol - stop if norm of gradient is less than this
767: . grtol - stop if relative norm of gradient is less than this
768: - gttol - stop if norm of gradient is reduced by this factor

770:   Options Database Keys:
771: + -tao_gatol <gatol> - Sets gatol
772: . -tao_grtol <grtol> - Sets grtol
773: - -tao_gttol <gttol> - Sets gttol

775:   Stopping Criteria:
776: $ ||g(X)||                            <= gatol
777: $ ||g(X)|| / |f(X)|                   <= grtol
778: $ ||g(X)|| / ||g(X0)||                <= gttol

780:   Notes:
781:   Use PETSC_DEFAULT to leave one or more tolerances unchanged.

783:   Level: beginner

785: .seealso: TaoGetTolerances()

787: @*/
788: PetscErrorCode TaoSetTolerances(Tao tao, PetscReal gatol, PetscReal grtol, PetscReal gttol)
789: {


795:   if (gatol != PETSC_DEFAULT) {
796:     if (gatol<0) {
797:       PetscInfo(tao,"Tried to set negative gatol -- ignored.\n");
798:     } else {
799:       tao->gatol = PetscMax(0,gatol);
800:       tao->gatol_changed=PETSC_TRUE;
801:     }
802:   }

804:   if (grtol != PETSC_DEFAULT) {
805:     if (grtol<0) {
806:       PetscInfo(tao,"Tried to set negative grtol -- ignored.\n");
807:     } else {
808:       tao->grtol = PetscMax(0,grtol);
809:       tao->grtol_changed=PETSC_TRUE;
810:     }
811:   }

813:   if (gttol != PETSC_DEFAULT) {
814:     if (gttol<0) {
815:       PetscInfo(tao,"Tried to set negative gttol -- ignored.\n");
816:     } else {
817:       tao->gttol = PetscMax(0,gttol);
818:       tao->gttol_changed=PETSC_TRUE;
819:     }
820:   }
821:   return(0);
822: }

824: /*@
825:   TaoSetConstraintTolerances - Sets constraint tolerance parameters used in TAO  convergence tests

827:   Logically collective on Tao

829:   Input Parameters:
830: + tao - the Tao context
831: . catol - absolute constraint tolerance, constraint norm must be less than catol for used for gatol convergence criteria
832: - crtol - relative contraint tolerance, constraint norm must be less than crtol for used for gatol, gttol convergence criteria

834:   Options Database Keys:
835: + -tao_catol <catol> - Sets catol
836: - -tao_crtol <crtol> - Sets crtol

838:   Notes:
839:   Use PETSC_DEFAULT to leave any tolerance unchanged.

841:   Level: intermediate

843: .seealso: TaoGetTolerances(), TaoGetConstraintTolerances(), TaoSetTolerances()

845: @*/
846: PetscErrorCode TaoSetConstraintTolerances(Tao tao, PetscReal catol, PetscReal crtol)
847: {


853:   if (catol != PETSC_DEFAULT) {
854:     if (catol<0) {
855:       PetscInfo(tao,"Tried to set negative catol -- ignored.\n");
856:     } else {
857:       tao->catol = PetscMax(0,catol);
858:       tao->catol_changed=PETSC_TRUE;
859:     }
860:   }

862:   if (crtol != PETSC_DEFAULT) {
863:     if (crtol<0) {
864:       PetscInfo(tao,"Tried to set negative crtol -- ignored.\n");
865:     } else {
866:       tao->crtol = PetscMax(0,crtol);
867:       tao->crtol_changed=PETSC_TRUE;
868:     }
869:   }
870:   return(0);
871: }

873: /*@
874:   TaoGetConstraintTolerances - Gets constraint tolerance parameters used in TAO  convergence tests

876:   Not ollective

878:   Input Parameter:
879: . tao - the Tao context

881:   Output Parameter:
882: + catol - absolute constraint tolerance, constraint norm must be less than catol for used for gatol convergence criteria
883: - crtol - relative contraint tolerance, constraint norm must be less than crtol for used for gatol, gttol convergence criteria

885:   Level: intermediate

887: .seealso: TaoGetTolerances(), TaoSetTolerances(), TaoSetConstraintTolerances()

889: @*/
890: PetscErrorCode TaoGetConstraintTolerances(Tao tao, PetscReal *catol, PetscReal *crtol)
891: {
894:   if (catol) *catol = tao->catol;
895:   if (crtol) *crtol = tao->crtol;
896:   return(0);
897: }

899: /*@
900:    TaoSetFunctionLowerBound - Sets a bound on the solution objective value.
901:    When an approximate solution with an objective value below this number
902:    has been found, the solver will terminate.

904:    Logically Collective on Tao

906:    Input Parameters:
907: +  tao - the Tao solver context
908: -  fmin - the tolerance

910:    Options Database Keys:
911: .    -tao_fmin <fmin> - sets the minimum function value

913:    Level: intermediate

915: .seealso: TaoSetTolerances()
916: @*/
917: PetscErrorCode TaoSetFunctionLowerBound(Tao tao,PetscReal fmin)
918: {
921:   tao->fmin = fmin;
922:   tao->fmin_changed=PETSC_TRUE;
923:   return(0);
924: }

926: /*@
927:    TaoGetFunctionLowerBound - Gets the bound on the solution objective value.
928:    When an approximate solution with an objective value below this number
929:    has been found, the solver will terminate.

931:    Not collective on Tao

933:    Input Parameters:
934: .  tao - the Tao solver context

936:    OutputParameters:
937: .  fmin - the minimum function value

939:    Level: intermediate

941: .seealso: TaoSetFunctionLowerBound()
942: @*/
943: PetscErrorCode TaoGetFunctionLowerBound(Tao tao,PetscReal *fmin)
944: {
947:   *fmin = tao->fmin;
948:   return(0);
949: }

951: /*@
952:    TaoSetMaximumFunctionEvaluations - Sets a maximum number of
953:    function evaluations.

955:    Logically Collective on Tao

957:    Input Parameters:
958: +  tao - the Tao solver context
959: -  nfcn - the maximum number of function evaluations (>=0)

961:    Options Database Keys:
962: .    -tao_max_funcs <nfcn> - sets the maximum number of function evaluations

964:    Level: intermediate

966: .seealso: TaoSetTolerances(), TaoSetMaximumIterations()
967: @*/

969: PetscErrorCode TaoSetMaximumFunctionEvaluations(Tao tao,PetscInt nfcn)
970: {
973:   tao->max_funcs = PetscMax(0,nfcn);
974:   tao->max_funcs_changed=PETSC_TRUE;
975:   return(0);
976: }

978: /*@
979:    TaoGetMaximumFunctionEvaluations - Sets a maximum number of
980:    function evaluations.

982:    Not Collective

984:    Input Parameters:
985: .  tao - the Tao solver context

987:    Output Parameters:
988: .  nfcn - the maximum number of function evaluations

990:    Level: intermediate

992: .seealso: TaoSetMaximumFunctionEvaluations(), TaoGetMaximumIterations()
993: @*/

995: PetscErrorCode TaoGetMaximumFunctionEvaluations(Tao tao,PetscInt *nfcn)
996: {
999:   *nfcn = tao->max_funcs;
1000:   return(0);
1001: }

1003: /*@
1004:    TaoGetCurrentFunctionEvaluations - Get current number of
1005:    function evaluations.

1007:    Not Collective

1009:    Input Parameters:
1010: .  tao - the Tao solver context

1012:    Output Parameters:
1013: .  nfuncs - the current number of function evaluations

1015:    Level: intermediate

1017: .seealso: TaoSetMaximumFunctionEvaluations(), TaoGetMaximumFunctionEvaluations(), TaoGetMaximumIterations()
1018: @*/

1020: PetscErrorCode TaoGetCurrentFunctionEvaluations(Tao tao,PetscInt *nfuncs)
1021: {
1024:   *nfuncs=PetscMax(tao->nfuncs,tao->nfuncgrads);
1025:   return(0);
1026: }

1028: /*@
1029:    TaoSetMaximumIterations - Sets a maximum number of iterates.

1031:    Logically Collective on Tao

1033:    Input Parameters:
1034: +  tao - the Tao solver context
1035: -  maxits - the maximum number of iterates (>=0)

1037:    Options Database Keys:
1038: .    -tao_max_it <its> - sets the maximum number of iterations

1040:    Level: intermediate

1042: .seealso: TaoSetTolerances(), TaoSetMaximumFunctionEvaluations()
1043: @*/
1044: PetscErrorCode TaoSetMaximumIterations(Tao tao,PetscInt maxits)
1045: {
1048:   tao->max_it = PetscMax(0,maxits);
1049:   tao->max_it_changed=PETSC_TRUE;
1050:   return(0);
1051: }

1053: /*@
1054:    TaoGetMaximumIterations - Sets a maximum number of iterates.

1056:    Not Collective

1058:    Input Parameters:
1059: .  tao - the Tao solver context

1061:    Output Parameters:
1062: .  maxits - the maximum number of iterates

1064:    Level: intermediate

1066: .seealso: TaoSetMaximumIterations(), TaoGetMaximumFunctionEvaluations()
1067: @*/
1068: PetscErrorCode TaoGetMaximumIterations(Tao tao,PetscInt *maxits)
1069: {
1072:   *maxits = tao->max_it;
1073:   return(0);
1074: }

1076: /*@
1077:    TaoSetInitialTrustRegionRadius - Sets the initial trust region radius.

1079:    Logically collective on Tao

1081:    Input Parameter:
1082: +  tao - a TAO optimization solver
1083: -  radius - the trust region radius

1085:    Level: intermediate

1087:    Options Database Key:
1088: .  -tao_trust0 <t0> - sets initial trust region radius

1090: .seealso: TaoGetTrustRegionRadius(), TaoSetTrustRegionTolerance()
1091: @*/
1092: PetscErrorCode TaoSetInitialTrustRegionRadius(Tao tao, PetscReal radius)
1093: {
1096:   tao->trust0 = PetscMax(0.0,radius);
1097:   tao->trust0_changed=PETSC_TRUE;
1098:   return(0);
1099: }

1101: /*@
1102:    TaoGetInitialTrustRegionRadius - Sets the initial trust region radius.

1104:    Not Collective

1106:    Input Parameter:
1107: .  tao - a TAO optimization solver

1109:    Output Parameter:
1110: .  radius - the trust region radius

1112:    Level: intermediate

1114: .seealso: TaoSetInitialTrustRegionRadius(), TaoGetCurrentTrustRegionRadius()
1115: @*/
1116: PetscErrorCode TaoGetInitialTrustRegionRadius(Tao tao, PetscReal *radius)
1117: {
1120:   *radius = tao->trust0;
1121:   return(0);
1122: }

1124: /*@
1125:    TaoGetCurrentTrustRegionRadius - Gets the current trust region radius.

1127:    Not Collective

1129:    Input Parameter:
1130: .  tao - a TAO optimization solver

1132:    Output Parameter:
1133: .  radius - the trust region radius

1135:    Level: intermediate

1137: .seealso: TaoSetInitialTrustRegionRadius(), TaoGetInitialTrustRegionRadius()
1138: @*/
1139: PetscErrorCode TaoGetCurrentTrustRegionRadius(Tao tao, PetscReal *radius)
1140: {
1143:   *radius = tao->trust;
1144:   return(0);
1145: }

1147: /*@
1148:   TaoGetTolerances - gets the current values of tolerances

1150:   Not Collective

1152:   Input Parameters:
1153: . tao - the Tao context

1155:   Output Parameters:
1156: + gatol - stop if norm of gradient is less than this
1157: . grtol - stop if relative norm of gradient is less than this
1158: - gttol - stop if norm of gradient is reduced by a this factor

1160:   Note: NULL can be used as an argument if not all tolerances values are needed

1162: .seealso TaoSetTolerances()

1164:   Level: intermediate
1165: @*/
1166: PetscErrorCode TaoGetTolerances(Tao tao, PetscReal *gatol, PetscReal *grtol, PetscReal *gttol)
1167: {
1170:   if (gatol) *gatol=tao->gatol;
1171:   if (grtol) *grtol=tao->grtol;
1172:   if (gttol) *gttol=tao->gttol;
1173:   return(0);
1174: }

1176: /*@
1177:   TaoGetKSP - Gets the linear solver used by the optimization solver.
1178:   Application writers should use TaoGetKSP if they need direct access
1179:   to the PETSc KSP object.

1181:   Not Collective

1183:    Input Parameters:
1184: .  tao - the TAO solver

1186:    Output Parameters:
1187: .  ksp - the KSP linear solver used in the optimization solver

1189:    Level: intermediate

1191: @*/
1192: PetscErrorCode TaoGetKSP(Tao tao, KSP *ksp)
1193: {
1195:   *ksp = tao->ksp;
1196:   return(0);
1197: }

1199: /*@
1200:    TaoGetLinearSolveIterations - Gets the total number of linear iterations
1201:    used by the TAO solver

1203:    Not Collective

1205:    Input Parameter:
1206: .  tao - TAO context

1208:    Output Parameter:
1209: .  lits - number of linear iterations

1211:    Notes:
1212:    This counter is reset to zero for each successive call to TaoSolve()

1214:    Level: intermediate

1216: .seealso:  TaoGetKSP()
1217: @*/
1218: PetscErrorCode  TaoGetLinearSolveIterations(Tao tao,PetscInt *lits)
1219: {
1223:   *lits = tao->ksp_tot_its;
1224:   return(0);
1225: }

1227: /*@
1228:   TaoGetLineSearch - Gets the line search used by the optimization solver.
1229:   Application writers should use TaoGetLineSearch if they need direct access
1230:   to the TaoLineSearch object.

1232:   Not Collective

1234:    Input Parameters:
1235: .  tao - the TAO solver

1237:    Output Parameters:
1238: .  ls - the line search used in the optimization solver

1240:    Level: intermediate

1242: @*/
1243: PetscErrorCode TaoGetLineSearch(Tao tao, TaoLineSearch *ls)
1244: {
1246:   *ls = tao->linesearch;
1247:   return(0);
1248: }

1250: /*@
1251:   TaoAddLineSearchCounts - Adds the number of function evaluations spent
1252:   in the line search to the running total.

1254:    Input Parameters:
1255: +  tao - the TAO solver
1256: -  ls - the line search used in the optimization solver

1258:    Level: developer

1260: .seealso: TaoLineSearchApply()
1261: @*/
1262: PetscErrorCode TaoAddLineSearchCounts(Tao tao)
1263: {
1265:   PetscBool      flg;
1266:   PetscInt       nfeval,ngeval,nfgeval;

1270:   if (tao->linesearch) {
1271:     TaoLineSearchIsUsingTaoRoutines(tao->linesearch,&flg);
1272:     if (!flg) {
1273:       TaoLineSearchGetNumberFunctionEvaluations(tao->linesearch,&nfeval,&ngeval,&nfgeval);
1274:       tao->nfuncs+=nfeval;
1275:       tao->ngrads+=ngeval;
1276:       tao->nfuncgrads+=nfgeval;
1277:     }
1278:   }
1279:   return(0);
1280: }

1282: /*@
1283:   TaoGetSolutionVector - Returns the vector with the current TAO solution

1285:   Not Collective

1287:   Input Parameter:
1288: . tao - the Tao context

1290:   Output Parameter:
1291: . X - the current solution

1293:   Level: intermediate

1295:   Note:  The returned vector will be the same object that was passed into TaoSetInitialVector()
1296: @*/
1297: PetscErrorCode TaoGetSolutionVector(Tao tao, Vec *X)
1298: {
1301:   *X = tao->solution;
1302:   return(0);
1303: }

1305: /*@
1306:   TaoGetGradientVector - Returns the vector with the current TAO gradient

1308:   Not Collective

1310:   Input Parameter:
1311: . tao - the Tao context

1313:   Output Parameter:
1314: . G - the current solution

1316:   Level: intermediate
1317: @*/
1318: PetscErrorCode TaoGetGradientVector(Tao tao, Vec *G)
1319: {
1322:   *G = tao->gradient;
1323:   return(0);
1324: }

1326: /*@
1327:    TaoResetStatistics - Initialize the statistics used by TAO for all of the solvers.
1328:    These statistics include the iteration number, residual norms, and convergence status.
1329:    This routine gets called before solving each optimization problem.

1331:    Collective on Tao

1333:    Input Parameters:
1334: .  solver - the Tao context

1336:    Level: developer

1338: .seealso: TaoCreate(), TaoSolve()
1339: @*/
1340: PetscErrorCode TaoResetStatistics(Tao tao)
1341: {
1344:   tao->niter        = 0;
1345:   tao->nfuncs       = 0;
1346:   tao->nfuncgrads   = 0;
1347:   tao->ngrads       = 0;
1348:   tao->nhess        = 0;
1349:   tao->njac         = 0;
1350:   tao->nconstraints = 0;
1351:   tao->ksp_its      = 0;
1352:   tao->ksp_tot_its  = 0;
1353:   tao->reason       = TAO_CONTINUE_ITERATING;
1354:   tao->residual     = 0.0;
1355:   tao->cnorm        = 0.0;
1356:   tao->step         = 0.0;
1357:   tao->lsflag       = PETSC_FALSE;
1358:   if (tao->hist_reset) tao->hist_len=0;
1359:   return(0);
1360: }

1362: /*@C
1363:   TaoSetUpdate - Sets the general-purpose update function called
1364:   at the beginning of every iteration of the nonlinear solve. Specifically
1365:   it is called at the top of every iteration, after the new solution and the gradient
1366:   is determined, but before the Hessian is computed (if applicable).

1368:   Logically Collective on Tao

1370:   Input Parameters:
1371: + tao - The tao solver context
1372: - func - The function

1374:   Calling sequence of func:
1375: $ func (Tao tao, PetscInt step);

1377: . step - The current step of the iteration

1379:   Level: advanced

1381: .seealso TaoSolve()
1382: @*/
1383: PetscErrorCode  TaoSetUpdate(Tao tao, PetscErrorCode (*func)(Tao, PetscInt,void*), void *ctx)
1384: {
1387:   tao->ops->update = func;
1388:   tao->user_update = ctx;
1389:   return(0);
1390: }

1392: /*@C
1393:   TaoSetConvergenceTest - Sets the function that is to be used to test
1394:   for convergence o fthe iterative minimization solution.  The new convergence
1395:   testing routine will replace TAO's default convergence test.

1397:   Logically Collective on Tao

1399:   Input Parameters:
1400: + tao - the Tao object
1401: . conv - the routine to test for convergence
1402: - ctx - [optional] context for private data for the convergence routine
1403:         (may be NULL)

1405:   Calling sequence of conv:
1406: $   PetscErrorCode conv(Tao tao, void *ctx)

1408: + tao - the Tao object
1409: - ctx - [optional] convergence context

1411:   Note: The new convergence testing routine should call TaoSetConvergedReason().

1413:   Level: advanced

1415: .seealso: TaoSetConvergedReason(), TaoGetSolutionStatus(), TaoGetTolerances(), TaoSetMonitor

1417: @*/
1418: PetscErrorCode TaoSetConvergenceTest(Tao tao, PetscErrorCode (*conv)(Tao,void*), void *ctx)
1419: {
1422:   (tao)->ops->convergencetest = conv;
1423:   (tao)->cnvP = ctx;
1424:   return(0);
1425: }

1427: /*@C
1428:    TaoSetMonitor - Sets an ADDITIONAL function that is to be used at every
1429:    iteration of the solver to display the iteration's
1430:    progress.

1432:    Logically Collective on Tao

1434:    Input Parameters:
1435: +  tao - the Tao solver context
1436: .  mymonitor - monitoring routine
1437: -  mctx - [optional] user-defined context for private data for the
1438:           monitor routine (may be NULL)

1440:    Calling sequence of mymonitor:
1441: $     PetscErrorCode mymonitor(Tao tao,void *mctx)

1443: +    tao - the Tao solver context
1444: -    mctx - [optional] monitoring context


1447:    Options Database Keys:
1448: +    -tao_monitor        - sets TaoMonitorDefault()
1449: .    -tao_smonitor       - sets short monitor
1450: .    -tao_cmonitor       - same as smonitor plus constraint norm
1451: .    -tao_view_solution   - view solution at each iteration
1452: .    -tao_view_gradient   - view gradient at each iteration
1453: .    -tao_view_ls_residual - view least-squares residual vector at each iteration
1454: -    -tao_cancelmonitors - cancels all monitors that have been hardwired into a code by calls to TaoSetMonitor(), but does not cancel those set via the options database.


1457:    Notes:
1458:    Several different monitoring routines may be set by calling
1459:    TaoSetMonitor() multiple times; all will be called in the
1460:    order in which they were set.

1462:    Fortran Notes:
1463:     Only one monitor function may be set

1465:    Level: intermediate

1467: .seealso: TaoMonitorDefault(), TaoCancelMonitors(),  TaoSetDestroyRoutine()
1468: @*/
1469: PetscErrorCode TaoSetMonitor(Tao tao, PetscErrorCode (*func)(Tao, void*), void *ctx,PetscErrorCode (*dest)(void**))
1470: {
1472:   PetscInt       i;
1473:   PetscBool      identical;

1477:   if (tao->numbermonitors >= MAXTAOMONITORS) SETERRQ1(PetscObjectComm((PetscObject)tao),1,"Cannot attach another monitor -- max=",MAXTAOMONITORS);

1479:   for (i=0; i<tao->numbermonitors;i++) {
1480:     PetscMonitorCompare((PetscErrorCode (*)(void))func,ctx,dest,(PetscErrorCode (*)(void))tao->monitor[i],tao->monitorcontext[i],tao->monitordestroy[i],&identical);
1481:     if (identical) return(0);
1482:   }
1483:   tao->monitor[tao->numbermonitors] = func;
1484:   tao->monitorcontext[tao->numbermonitors] = (void*)ctx;
1485:   tao->monitordestroy[tao->numbermonitors] = dest;
1486:   ++tao->numbermonitors;
1487:   return(0);
1488: }

1490: /*@
1491:    TaoCancelMonitors - Clears all the monitor functions for a Tao object.

1493:    Logically Collective on Tao

1495:    Input Parameters:
1496: .  tao - the Tao solver context

1498:    Options Database:
1499: .  -tao_cancelmonitors - cancels all monitors that have been hardwired
1500:     into a code by calls to TaoSetMonitor(), but does not cancel those
1501:     set via the options database

1503:    Notes:
1504:    There is no way to clear one specific monitor from a Tao object.

1506:    Level: advanced

1508: .seealso: TaoMonitorDefault(), TaoSetMonitor()
1509: @*/
1510: PetscErrorCode TaoCancelMonitors(Tao tao)
1511: {
1512:   PetscInt       i;

1517:   for (i=0;i<tao->numbermonitors;i++) {
1518:     if (tao->monitordestroy[i]) {
1519:       (*tao->monitordestroy[i])(&tao->monitorcontext[i]);
1520:     }
1521:   }
1522:   tao->numbermonitors=0;
1523:   return(0);
1524: }

1526: /*@
1527:    TaoMonitorDefault - Default routine for monitoring progress of the
1528:    Tao solvers (default).  This monitor prints the function value and gradient
1529:    norm at each iteration.  It can be turned on from the command line using the
1530:    -tao_monitor option

1532:    Collective on Tao

1534:    Input Parameters:
1535: +  tao - the Tao context
1536: -  ctx - PetscViewer context or NULL

1538:    Options Database Keys:
1539: .  -tao_monitor

1541:    Level: advanced

1543: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1544: @*/
1545: PetscErrorCode TaoMonitorDefault(Tao tao, void *ctx)
1546: {
1548:   PetscInt       its, tabs;
1549:   PetscReal      fct,gnorm;
1550:   PetscViewer    viewer = (PetscViewer)ctx;

1554:   its=tao->niter;
1555:   fct=tao->fc;
1556:   gnorm=tao->residual;
1557:   PetscViewerASCIIGetTab(viewer, &tabs);
1558:   PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel);
1559:   if (its == 0 && ((PetscObject)tao)->prefix && !tao->header_printed) {
1560:      PetscViewerASCIIPrintf(viewer,"  Iteration information for %s solve.\n",((PetscObject)tao)->prefix);
1561:      tao->header_printed = PETSC_TRUE;
1562:    }
1563:   ierr=PetscViewerASCIIPrintf(viewer,"%3D TAO,",its);
1564:   ierr=PetscViewerASCIIPrintf(viewer,"  Function value: %g,",(double)fct);
1565:   if (gnorm >= PETSC_INFINITY) {
1566:     ierr=PetscViewerASCIIPrintf(viewer,"  Residual: Inf \n");
1567:   } else {
1568:     ierr=PetscViewerASCIIPrintf(viewer,"  Residual: %g \n",(double)gnorm);
1569:   }
1570:   PetscViewerASCIISetTab(viewer, tabs);
1571:   return(0);
1572: }

1574: /*@
1575:    TaoDefaultGMonitor - Default routine for monitoring progress of the
1576:    Tao solvers (default) with extra detail on the globalization method.
1577:    This monitor prints the function value and gradient norm at each
1578:    iteration, as well as the step size and trust radius. Note that the
1579:    step size and trust radius may be the same for some algorithms.
1580:    It can be turned on from the command line using the
1581:    -tao_gmonitor option

1583:    Collective on Tao

1585:    Input Parameters:
1586: +  tao - the Tao context
1587: -  ctx - PetscViewer context or NULL

1589:    Options Database Keys:
1590: .  -tao_monitor

1592:    Level: advanced

1594: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1595: @*/
1596: PetscErrorCode TaoDefaultGMonitor(Tao tao, void *ctx)
1597: {
1599:   PetscInt       its, tabs;
1600:   PetscReal      fct,gnorm,stp,tr;
1601:   PetscViewer    viewer = (PetscViewer)ctx;

1605:   its=tao->niter;
1606:   fct=tao->fc;
1607:   gnorm=tao->residual;
1608:   stp=tao->step;
1609:   tr=tao->trust;
1610:   PetscViewerASCIIGetTab(viewer, &tabs);
1611:   PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel);
1612:   if (its == 0 && ((PetscObject)tao)->prefix && !tao->header_printed) {
1613:      PetscViewerASCIIPrintf(viewer,"  Iteration information for %s solve.\n",((PetscObject)tao)->prefix);
1614:      tao->header_printed = PETSC_TRUE;
1615:    }
1616:   ierr=PetscViewerASCIIPrintf(viewer,"%3D TAO,",its);
1617:   ierr=PetscViewerASCIIPrintf(viewer,"  Function value: %g,",(double)fct);
1618:   if (gnorm >= PETSC_INFINITY) {
1619:     ierr=PetscViewerASCIIPrintf(viewer,"  Residual: Inf,");
1620:   } else {
1621:     ierr=PetscViewerASCIIPrintf(viewer,"  Residual: %g,",(double)gnorm);
1622:   }
1623:   PetscViewerASCIIPrintf(viewer,"  Step: %g,  Trust: %g\n",(double)stp,(double)tr);
1624:   PetscViewerASCIISetTab(viewer, tabs);
1625:   return(0);
1626: }

1628: /*@
1629:    TaoDefaultSMonitor - Default routine for monitoring progress of the
1630:    solver. Same as TaoMonitorDefault() except
1631:    it prints fewer digits of the residual as the residual gets smaller.
1632:    This is because the later digits are meaningless and are often
1633:    different on different machines; by using this routine different
1634:    machines will usually generate the same output. It can be turned on
1635:    by using the -tao_smonitor option

1637:    Collective on Tao

1639:    Input Parameters:
1640: +  tao - the Tao context
1641: -  ctx - PetscViewer context of type ASCII

1643:    Options Database Keys:
1644: .  -tao_smonitor

1646:    Level: advanced

1648: .seealso: TaoMonitorDefault(), TaoSetMonitor()
1649: @*/
1650: PetscErrorCode TaoDefaultSMonitor(Tao tao, void *ctx)
1651: {
1653:   PetscInt       its, tabs;
1654:   PetscReal      fct,gnorm;
1655:   PetscViewer    viewer = (PetscViewer)ctx;

1659:   its=tao->niter;
1660:   fct=tao->fc;
1661:   gnorm=tao->residual;
1662:   PetscViewerASCIIGetTab(viewer, &tabs);
1663:   PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel);
1664:   ierr=PetscViewerASCIIPrintf(viewer,"iter = %3D,",its);
1665:   ierr=PetscViewerASCIIPrintf(viewer," Function value %g,",(double)fct);
1666:   if (gnorm >= PETSC_INFINITY) {
1667:     ierr=PetscViewerASCIIPrintf(viewer," Residual: Inf \n");
1668:   } else if (gnorm > 1.e-6) {
1669:     ierr=PetscViewerASCIIPrintf(viewer," Residual: %g \n",(double)gnorm);
1670:   } else if (gnorm > 1.e-11) {
1671:     ierr=PetscViewerASCIIPrintf(viewer," Residual: < 1.0e-6 \n");
1672:   } else {
1673:     ierr=PetscViewerASCIIPrintf(viewer," Residual: < 1.0e-11 \n");
1674:   }
1675:   PetscViewerASCIISetTab(viewer, tabs);
1676:   return(0);
1677: }

1679: /*@
1680:    TaoDefaultCMonitor - same as TaoMonitorDefault() except
1681:    it prints the norm of the constraints function. It can be turned on
1682:    from the command line using the -tao_cmonitor option

1684:    Collective on Tao

1686:    Input Parameters:
1687: +  tao - the Tao context
1688: -  ctx - PetscViewer context or NULL

1690:    Options Database Keys:
1691: .  -tao_cmonitor

1693:    Level: advanced

1695: .seealso: TaoMonitorDefault(), TaoSetMonitor()
1696: @*/
1697: PetscErrorCode TaoDefaultCMonitor(Tao tao, void *ctx)
1698: {
1700:   PetscInt       its, tabs;
1701:   PetscReal      fct,gnorm;
1702:   PetscViewer    viewer = (PetscViewer)ctx;

1706:   its=tao->niter;
1707:   fct=tao->fc;
1708:   gnorm=tao->residual;
1709:   PetscViewerASCIIGetTab(viewer, &tabs);
1710:   PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel);
1711:   ierr=PetscViewerASCIIPrintf(viewer,"iter = %D,",its);
1712:   ierr=PetscViewerASCIIPrintf(viewer," Function value: %g,",(double)fct);
1713:   ierr=PetscViewerASCIIPrintf(viewer,"  Residual: %g ",(double)gnorm);
1714:   PetscViewerASCIIPrintf(viewer,"  Constraint: %g \n",(double)tao->cnorm);
1715:   PetscViewerASCIISetTab(viewer, tabs);
1716:   return(0);
1717: }

1719: /*@C
1720:    TaoSolutionMonitor - Views the solution at each iteration
1721:    It can be turned on from the command line using the
1722:    -tao_view_solution option

1724:    Collective on Tao

1726:    Input Parameters:
1727: +  tao - the Tao context
1728: -  ctx - PetscViewer context or NULL

1730:    Options Database Keys:
1731: .  -tao_view_solution

1733:    Level: advanced

1735: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1736: @*/
1737: PetscErrorCode TaoSolutionMonitor(Tao tao, void *ctx)
1738: {
1740:   PetscViewer    viewer  = (PetscViewer)ctx;

1744:   VecView(tao->solution, viewer);
1745:   return(0);
1746: }

1748: /*@C
1749:    TaoGradientMonitor - Views the gradient at each iteration
1750:    It can be turned on from the command line using the
1751:    -tao_view_gradient option

1753:    Collective on Tao

1755:    Input Parameters:
1756: +  tao - the Tao context
1757: -  ctx - PetscViewer context or NULL

1759:    Options Database Keys:
1760: .  -tao_view_gradient

1762:    Level: advanced

1764: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1765: @*/
1766: PetscErrorCode TaoGradientMonitor(Tao tao, void *ctx)
1767: {
1769:   PetscViewer    viewer = (PetscViewer)ctx;

1773:   VecView(tao->gradient, viewer);
1774:   return(0);
1775: }

1777: /*@C
1778:    TaoStepDirectionMonitor - Views the gradient at each iteration
1779:    It can be turned on from the command line using the
1780:    -tao_view_gradient option

1782:    Collective on Tao

1784:    Input Parameters:
1785: +  tao - the Tao context
1786: -  ctx - PetscViewer context or NULL

1788:    Options Database Keys:
1789: .  -tao_view_gradient

1791:    Level: advanced

1793: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1794: @*/
1795: PetscErrorCode TaoStepDirectionMonitor(Tao tao, void *ctx)
1796: {
1798:   PetscViewer    viewer = (PetscViewer)ctx;

1802:   VecView(tao->stepdirection, viewer);
1803:   return(0);
1804: }

1806: /*@C
1807:    TaoDrawSolutionMonitor - Plots the solution at each iteration
1808:    It can be turned on from the command line using the
1809:    -tao_draw_solution option

1811:    Collective on Tao

1813:    Input Parameters:
1814: +  tao - the Tao context
1815: -  ctx - TaoMonitorDraw context

1817:    Options Database Keys:
1818: .  -tao_draw_solution

1820:    Level: advanced

1822: .seealso: TaoSolutionMonitor(), TaoSetMonitor(), TaoDrawGradientMonitor
1823: @*/
1824: PetscErrorCode TaoDrawSolutionMonitor(Tao tao, void *ctx)
1825: {
1826:   PetscErrorCode    ierr;
1827:   TaoMonitorDrawCtx ictx = (TaoMonitorDrawCtx)ctx;

1830:   if (!(((ictx->howoften > 0) && (!(tao->niter % ictx->howoften))) || ((ictx->howoften == -1) && tao->reason))) return(0);
1831:   VecView(tao->solution,ictx->viewer);
1832:   return(0);
1833: }

1835: /*@C
1836:    TaoDrawGradientMonitor - Plots the gradient at each iteration
1837:    It can be turned on from the command line using the
1838:    -tao_draw_gradient option

1840:    Collective on Tao

1842:    Input Parameters:
1843: +  tao - the Tao context
1844: -  ctx - PetscViewer context

1846:    Options Database Keys:
1847: .  -tao_draw_gradient

1849:    Level: advanced

1851: .seealso: TaoGradientMonitor(), TaoSetMonitor(), TaoDrawSolutionMonitor
1852: @*/
1853: PetscErrorCode TaoDrawGradientMonitor(Tao tao, void *ctx)
1854: {
1855:   PetscErrorCode    ierr;
1856:   TaoMonitorDrawCtx ictx = (TaoMonitorDrawCtx)ctx;

1859:   if (!(((ictx->howoften > 0) && (!(tao->niter % ictx->howoften))) || ((ictx->howoften == -1) && tao->reason))) return(0);
1860:   VecView(tao->gradient,ictx->viewer);
1861:   return(0);
1862: }

1864: /*@C
1865:    TaoDrawStepMonitor - Plots the step direction at each iteration
1866:    It can be turned on from the command line using the
1867:    -tao_draw_step option

1869:    Collective on Tao

1871:    Input Parameters:
1872: +  tao - the Tao context
1873: -  ctx - PetscViewer context

1875:    Options Database Keys:
1876: .  -tao_draw_step

1878:    Level: advanced

1880: .seealso: TaoSetMonitor(), TaoDrawSolutionMonitor
1881: @*/
1882: PetscErrorCode TaoDrawStepMonitor(Tao tao, void *ctx)
1883: {
1885:   PetscViewer    viewer = (PetscViewer)(ctx);

1888:   VecView(tao->stepdirection, viewer);
1889:   return(0);
1890: }

1892: /*@C
1893:    TaoResidualMonitor - Views the least-squares residual at each iteration
1894:    It can be turned on from the command line using the
1895:    -tao_view_ls_residual option

1897:    Collective on Tao

1899:    Input Parameters:
1900: +  tao - the Tao context
1901: -  ctx - PetscViewer context or NULL

1903:    Options Database Keys:
1904: .  -tao_view_ls_residual

1906:    Level: advanced

1908: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1909: @*/
1910: PetscErrorCode TaoResidualMonitor(Tao tao, void *ctx)
1911: {
1913:   PetscViewer    viewer  = (PetscViewer)ctx;

1917:   VecView(tao->ls_res,viewer);
1918:   return(0);
1919: }

1921: /*@
1922:    TaoDefaultConvergenceTest - Determines whether the solver should continue iterating
1923:    or terminate.

1925:    Collective on Tao

1927:    Input Parameters:
1928: +  tao - the Tao context
1929: -  dummy - unused dummy context

1931:    Output Parameter:
1932: .  reason - for terminating

1934:    Notes:
1935:    This routine checks the residual in the optimality conditions, the
1936:    relative residual in the optimity conditions, the number of function
1937:    evaluations, and the function value to test convergence.  Some
1938:    solvers may use different convergence routines.

1940:    Level: developer

1942: .seealso: TaoSetTolerances(),TaoGetConvergedReason(),TaoSetConvergedReason()
1943: @*/

1945: PetscErrorCode TaoDefaultConvergenceTest(Tao tao,void *dummy)
1946: {
1947:   PetscInt           niter=tao->niter, nfuncs=PetscMax(tao->nfuncs,tao->nfuncgrads);
1948:   PetscInt           max_funcs=tao->max_funcs;
1949:   PetscReal          gnorm=tao->residual, gnorm0=tao->gnorm0;
1950:   PetscReal          f=tao->fc, steptol=tao->steptol,trradius=tao->step;
1951:   PetscReal          gatol=tao->gatol,grtol=tao->grtol,gttol=tao->gttol;
1952:   PetscReal          catol=tao->catol,crtol=tao->crtol;
1953:   PetscReal          fmin=tao->fmin, cnorm=tao->cnorm;
1954:   TaoConvergedReason reason=tao->reason;
1955:   PetscErrorCode     ierr;

1959:   if (reason != TAO_CONTINUE_ITERATING) {
1960:     return(0);
1961:   }

1963:   if (PetscIsInfOrNanReal(f)) {
1964:     PetscInfo(tao,"Failed to converged, function value is Inf or NaN\n");
1965:     reason = TAO_DIVERGED_NAN;
1966:   } else if (f <= fmin && cnorm <=catol) {
1967:     PetscInfo2(tao,"Converged due to function value %g < minimum function value %g\n", (double)f,(double)fmin);
1968:     reason = TAO_CONVERGED_MINF;
1969:   } else if (gnorm<= gatol && cnorm <=catol) {
1970:     PetscInfo2(tao,"Converged due to residual norm ||g(X)||=%g < %g\n",(double)gnorm,(double)gatol);
1971:     reason = TAO_CONVERGED_GATOL;
1972:   } else if ( f!=0 && PetscAbsReal(gnorm/f) <= grtol && cnorm <= crtol) {
1973:     PetscInfo2(tao,"Converged due to residual ||g(X)||/|f(X)| =%g < %g\n",(double)(gnorm/f),(double)grtol);
1974:     reason = TAO_CONVERGED_GRTOL;
1975:   } else if (gnorm0 != 0 && ((gttol == 0 && gnorm == 0) || gnorm/gnorm0 < gttol) && cnorm <= crtol) {
1976:     PetscInfo2(tao,"Converged due to relative residual norm ||g(X)||/||g(X0)|| = %g < %g\n",(double)(gnorm/gnorm0),(double)gttol);
1977:     reason = TAO_CONVERGED_GTTOL;
1978:   } else if (nfuncs > max_funcs){
1979:     PetscInfo2(tao,"Exceeded maximum number of function evaluations: %D > %D\n", nfuncs,max_funcs);
1980:     reason = TAO_DIVERGED_MAXFCN;
1981:   } else if ( tao->lsflag != 0 ){
1982:     PetscInfo(tao,"Tao Line Search failure.\n");
1983:     reason = TAO_DIVERGED_LS_FAILURE;
1984:   } else if (trradius < steptol && niter > 0){
1985:     PetscInfo2(tao,"Trust region/step size too small: %g < %g\n", (double)trradius,(double)steptol);
1986:     reason = TAO_CONVERGED_STEPTOL;
1987:   } else if (niter >= tao->max_it) {
1988:     PetscInfo2(tao,"Exceeded maximum number of iterations: %D > %D\n",niter,tao->max_it);
1989:     reason = TAO_DIVERGED_MAXITS;
1990:   } else {
1991:     reason = TAO_CONTINUE_ITERATING;
1992:   }
1993:   tao->reason = reason;
1994:   return(0);
1995: }

1997: /*@C
1998:    TaoSetOptionsPrefix - Sets the prefix used for searching for all
1999:    TAO options in the database.


2002:    Logically Collective on Tao

2004:    Input Parameters:
2005: +  tao - the Tao context
2006: -  prefix - the prefix string to prepend to all TAO option requests

2008:    Notes:
2009:    A hyphen (-) must NOT be given at the beginning of the prefix name.
2010:    The first character of all runtime options is AUTOMATICALLY the hyphen.

2012:    For example, to distinguish between the runtime options for two
2013:    different TAO solvers, one could call
2014: .vb
2015:       TaoSetOptionsPrefix(tao1,"sys1_")
2016:       TaoSetOptionsPrefix(tao2,"sys2_")
2017: .ve

2019:    This would enable use of different options for each system, such as
2020: .vb
2021:       -sys1_tao_method blmvm -sys1_tao_gtol 1.e-3
2022:       -sys2_tao_method lmvm  -sys2_tao_gtol 1.e-4
2023: .ve


2026:    Level: advanced

2028: .seealso: TaoAppendOptionsPrefix(), TaoGetOptionsPrefix()
2029: @*/

2031: PetscErrorCode TaoSetOptionsPrefix(Tao tao, const char p[])
2032: {

2036:   PetscObjectSetOptionsPrefix((PetscObject)tao,p);
2037:   if (tao->linesearch) {
2038:     TaoLineSearchSetOptionsPrefix(tao->linesearch,p);
2039:   }
2040:   if (tao->ksp) {
2041:     KSPSetOptionsPrefix(tao->ksp,p);
2042:   }
2043:   return(0);
2044: }

2046: /*@C
2047:    TaoAppendOptionsPrefix - Appends to the prefix used for searching for all
2048:    TAO options in the database.


2051:    Logically Collective on Tao

2053:    Input Parameters:
2054: +  tao - the Tao solver context
2055: -  prefix - the prefix string to prepend to all TAO option requests

2057:    Notes:
2058:    A hyphen (-) must NOT be given at the beginning of the prefix name.
2059:    The first character of all runtime options is AUTOMATICALLY the hyphen.


2062:    Level: advanced

2064: .seealso: TaoSetOptionsPrefix(), TaoGetOptionsPrefix()
2065: @*/
2066: PetscErrorCode TaoAppendOptionsPrefix(Tao tao, const char p[])
2067: {

2071:   PetscObjectAppendOptionsPrefix((PetscObject)tao,p);
2072:   if (tao->linesearch) {
2073:     TaoLineSearchSetOptionsPrefix(tao->linesearch,p);
2074:   }
2075:   if (tao->ksp) {
2076:     KSPSetOptionsPrefix(tao->ksp,p);
2077:   }
2078:   return(0);
2079: }

2081: /*@C
2082:   TaoGetOptionsPrefix - Gets the prefix used for searching for all
2083:   TAO options in the database

2085:   Not Collective

2087:   Input Parameters:
2088: . tao - the Tao context

2090:   Output Parameters:
2091: . prefix - pointer to the prefix string used is returned

2093:   Notes:
2094:     On the fortran side, the user should pass in a string 'prefix' of
2095:   sufficient length to hold the prefix.

2097:   Level: advanced

2099: .seealso: TaoSetOptionsPrefix(), TaoAppendOptionsPrefix()
2100: @*/
2101: PetscErrorCode TaoGetOptionsPrefix(Tao tao, const char *p[])
2102: {
2103:    return PetscObjectGetOptionsPrefix((PetscObject)tao,p);
2104: }

2106: /*@C
2107:    TaoSetType - Sets the method for the unconstrained minimization solver.

2109:    Collective on Tao

2111:    Input Parameters:
2112: +  solver - the Tao solver context
2113: -  type - a known method

2115:    Options Database Key:
2116: .  -tao_type <type> - Sets the method; use -help for a list
2117:    of available methods (for instance, "-tao_type lmvm" or "-tao_type tron")

2119:    Available methods include:
2120: +    nls - Newton's method with line search for unconstrained minimization
2121: .    ntr - Newton's method with trust region for unconstrained minimization
2122: .    ntl - Newton's method with trust region, line search for unconstrained minimization
2123: .    lmvm - Limited memory variable metric method for unconstrained minimization
2124: .    cg - Nonlinear conjugate gradient method for unconstrained minimization
2125: .    nm - Nelder-Mead algorithm for derivate-free unconstrained minimization
2126: .    tron - Newton Trust Region method for bound constrained minimization
2127: .    gpcg - Newton Trust Region method for quadratic bound constrained minimization
2128: .    blmvm - Limited memory variable metric method for bound constrained minimization
2129: -    pounders - Model-based algorithm pounder extended for nonlinear least squares

2131:   Level: intermediate

2133: .seealso: TaoCreate(), TaoGetType(), TaoType

2135: @*/
2136: PetscErrorCode TaoSetType(Tao tao, TaoType type)
2137: {
2139:   PetscErrorCode (*create_xxx)(Tao);
2140:   PetscBool      issame;


2145:   PetscObjectTypeCompare((PetscObject)tao,type,&issame);
2146:   if (issame) return(0);

2148:   PetscFunctionListFind(TaoList, type, (void(**)(void))&create_xxx);
2149:   if (!create_xxx) SETERRQ1(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested Tao type %s",type);

2151:   /* Destroy the existing solver information */
2152:   if (tao->ops->destroy) {
2153:     (*tao->ops->destroy)(tao);
2154:   }
2155:   KSPDestroy(&tao->ksp);
2156:   TaoLineSearchDestroy(&tao->linesearch);
2157:   VecDestroy(&tao->gradient);
2158:   VecDestroy(&tao->stepdirection);

2160:   tao->ops->setup = 0;
2161:   tao->ops->solve = 0;
2162:   tao->ops->view  = 0;
2163:   tao->ops->setfromoptions = 0;
2164:   tao->ops->destroy = 0;

2166:   tao->setupcalled = PETSC_FALSE;

2168:   (*create_xxx)(tao);
2169:   PetscObjectChangeTypeName((PetscObject)tao,type);
2170:   return(0);
2171: }

2173: /*MC
2174:    TaoRegister - Adds a method to the TAO package for unconstrained minimization.

2176:    Synopsis:
2177:    TaoRegister(char *name_solver,char *path,char *name_Create,PetscErrorCode (*routine_Create)(Tao))

2179:    Not collective

2181:    Input Parameters:
2182: +  sname - name of a new user-defined solver
2183: -  func - routine to Create method context

2185:    Notes:
2186:    TaoRegister() may be called multiple times to add several user-defined solvers.

2188:    Sample usage:
2189: .vb
2190:    TaoRegister("my_solver",MySolverCreate);
2191: .ve

2193:    Then, your solver can be chosen with the procedural interface via
2194: $     TaoSetType(tao,"my_solver")
2195:    or at runtime via the option
2196: $     -tao_type my_solver

2198:    Level: advanced

2200: .seealso: TaoRegisterAll(), TaoRegisterDestroy()
2201: M*/
2202: PetscErrorCode TaoRegister(const char sname[], PetscErrorCode (*func)(Tao))
2203: {

2207:   TaoInitializePackage();
2208:   PetscFunctionListAdd(&TaoList,sname, (void (*)(void))func);
2209:   return(0);
2210: }

2212: /*@C
2213:    TaoRegisterDestroy - Frees the list of minimization solvers that were
2214:    registered by TaoRegisterDynamic().

2216:    Not Collective

2218:    Level: advanced

2220: .seealso: TaoRegisterAll(), TaoRegister()
2221: @*/
2222: PetscErrorCode TaoRegisterDestroy(void)
2223: {
2226:   PetscFunctionListDestroy(&TaoList);
2227:   TaoRegisterAllCalled = PETSC_FALSE;
2228:   return(0);
2229: }

2231: /*@
2232:    TaoGetIterationNumber - Gets the number of Tao iterations completed
2233:    at this time.

2235:    Not Collective

2237:    Input Parameter:
2238: .  tao - Tao context

2240:    Output Parameter:
2241: .  iter - iteration number

2243:    Notes:
2244:    For example, during the computation of iteration 2 this would return 1.


2247:    Level: intermediate

2249: .seealso:   TaoGetLinearSolveIterations(), TaoGetResidualNorm(), TaoGetObjective()
2250: @*/
2251: PetscErrorCode  TaoGetIterationNumber(Tao tao,PetscInt *iter)
2252: {
2256:   *iter = tao->niter;
2257:   return(0);
2258: }

2260: /*@
2261:    TaoGetObjective - Gets the current value of the objective function
2262:    at this time.

2264:    Not Collective

2266:    Input Parameter:
2267: .  tao - Tao context

2269:    Output Parameter:
2270: .  value - the current value

2272:    Level: intermediate

2274: .seealso:   TaoGetLinearSolveIterations(), TaoGetIterationNumber(), TaoGetResidualNorm()
2275: @*/
2276: PetscErrorCode  TaoGetObjective(Tao tao,PetscReal *value)
2277: {
2281:   *value = tao->fc;
2282:   return(0);
2283: }

2285: /*@
2286:    TaoGetResidualNorm - Gets the current value of the norm of the residual
2287:    at this time.

2289:    Not Collective

2291:    Input Parameter:
2292: .  tao - Tao context

2294:    Output Parameter:
2295: .  value - the current value

2297:    Level: intermediate

2299:    Developer Note: This is the 2-norm of the residual, we cannot use TaoGetGradientNorm() because that has
2300:                    a different meaning. For some reason Tao sometimes calls the gradient the residual.

2302: .seealso:   TaoGetLinearSolveIterations(), TaoGetIterationNumber(), TaoGetObjective()
2303: @*/
2304: PetscErrorCode  TaoGetResidualNorm(Tao tao,PetscReal *value)
2305: {
2309:   *value = tao->residual;
2310:   return(0);
2311: }

2313: /*@
2314:    TaoSetIterationNumber - Sets the current iteration number.

2316:    Not Collective

2318:    Input Parameter:
2319: +  tao - Tao context
2320: -  iter - iteration number

2322:    Level: developer

2324: .seealso:   TaoGetLinearSolveIterations()
2325: @*/
2326: PetscErrorCode  TaoSetIterationNumber(Tao tao,PetscInt iter)
2327: {

2332:   PetscObjectSAWsTakeAccess((PetscObject)tao);
2333:   tao->niter = iter;
2334:   PetscObjectSAWsGrantAccess((PetscObject)tao);
2335:   return(0);
2336: }

2338: /*@
2339:    TaoGetTotalIterationNumber - Gets the total number of Tao iterations
2340:    completed. This number keeps accumulating if multiple solves
2341:    are called with the Tao object.

2343:    Not Collective

2345:    Input Parameter:
2346: .  tao - Tao context

2348:    Output Parameter:
2349: .  iter - iteration number

2351:    Notes:
2352:    The total iteration count is updated after each solve, if there is a current
2353:    TaoSolve() in progress then those iterations are not yet counted.

2355:    Level: intermediate

2357: .seealso:   TaoGetLinearSolveIterations()
2358: @*/
2359: PetscErrorCode  TaoGetTotalIterationNumber(Tao tao,PetscInt *iter)
2360: {
2364:   *iter = tao->ntotalits;
2365:   return(0);
2366: }

2368: /*@
2369:    TaoSetTotalIterationNumber - Sets the current total iteration number.

2371:    Not Collective

2373:    Input Parameter:
2374: +  tao - Tao context
2375: -  iter - iteration number

2377:    Level: developer

2379: .seealso:   TaoGetLinearSolveIterations()
2380: @*/
2381: PetscErrorCode  TaoSetTotalIterationNumber(Tao tao,PetscInt iter)
2382: {

2387:   PetscObjectSAWsTakeAccess((PetscObject)tao);
2388:   tao->ntotalits = iter;
2389:   PetscObjectSAWsGrantAccess((PetscObject)tao);
2390:   return(0);
2391: }

2393: /*@
2394:   TaoSetConvergedReason - Sets the termination flag on a Tao object

2396:   Logically Collective on Tao

2398:   Input Parameters:
2399: + tao - the Tao context
2400: - reason - one of
2401: $     TAO_CONVERGED_ATOL (2),
2402: $     TAO_CONVERGED_RTOL (3),
2403: $     TAO_CONVERGED_STEPTOL (4),
2404: $     TAO_CONVERGED_MINF (5),
2405: $     TAO_CONVERGED_USER (6),
2406: $     TAO_DIVERGED_MAXITS (-2),
2407: $     TAO_DIVERGED_NAN (-4),
2408: $     TAO_DIVERGED_MAXFCN (-5),
2409: $     TAO_DIVERGED_LS_FAILURE (-6),
2410: $     TAO_DIVERGED_TR_REDUCTION (-7),
2411: $     TAO_DIVERGED_USER (-8),
2412: $     TAO_CONTINUE_ITERATING (0)

2414:    Level: intermediate

2416: @*/
2417: PetscErrorCode TaoSetConvergedReason(Tao tao, TaoConvergedReason reason)
2418: {
2421:   tao->reason = reason;
2422:   return(0);
2423: }

2425: /*@
2426:    TaoGetConvergedReason - Gets the reason the Tao iteration was stopped.

2428:    Not Collective

2430:    Input Parameter:
2431: .  tao - the Tao solver context

2433:    Output Parameter:
2434: .  reason - one of
2435: $  TAO_CONVERGED_GATOL (3)           ||g(X)|| < gatol
2436: $  TAO_CONVERGED_GRTOL (4)           ||g(X)|| / f(X)  < grtol
2437: $  TAO_CONVERGED_GTTOL (5)           ||g(X)|| / ||g(X0)|| < gttol
2438: $  TAO_CONVERGED_STEPTOL (6)         step size small
2439: $  TAO_CONVERGED_MINF (7)            F < F_min
2440: $  TAO_CONVERGED_USER (8)            User defined
2441: $  TAO_DIVERGED_MAXITS (-2)          its > maxits
2442: $  TAO_DIVERGED_NAN (-4)             Numerical problems
2443: $  TAO_DIVERGED_MAXFCN (-5)          fevals > max_funcsals
2444: $  TAO_DIVERGED_LS_FAILURE (-6)      line search failure
2445: $  TAO_DIVERGED_TR_REDUCTION (-7)    trust region failure
2446: $  TAO_DIVERGED_USER(-8)             (user defined)
2447:  $  TAO_CONTINUE_ITERATING (0)

2449:    where
2450: +  X - current solution
2451: .  X0 - initial guess
2452: .  f(X) - current function value
2453: .  f(X*) - true solution (estimated)
2454: .  g(X) - current gradient
2455: .  its - current iterate number
2456: .  maxits - maximum number of iterates
2457: .  fevals - number of function evaluations
2458: -  max_funcsals - maximum number of function evaluations

2460:    Level: intermediate

2462: .seealso: TaoSetConvergenceTest(), TaoSetTolerances()

2464: @*/
2465: PetscErrorCode TaoGetConvergedReason(Tao tao, TaoConvergedReason *reason)
2466: {
2470:   *reason = tao->reason;
2471:   return(0);
2472: }

2474: /*@
2475:   TaoGetSolutionStatus - Get the current iterate, objective value,
2476:   residual, infeasibility, and termination

2478:   Not Collective

2480:    Input Parameters:
2481: .  tao - the Tao context

2483:    Output Parameters:
2484: +  iterate - the current iterate number (>=0)
2485: .  f - the current function value
2486: .  gnorm - the square of the gradient norm, duality gap, or other measure indicating distance from optimality.
2487: .  cnorm - the infeasibility of the current solution with regard to the constraints.
2488: .  xdiff - the step length or trust region radius of the most recent iterate.
2489: -  reason - The termination reason, which can equal TAO_CONTINUE_ITERATING

2491:    Level: intermediate

2493:    Note:
2494:    TAO returns the values set by the solvers in the routine TaoMonitor().

2496:    Note:
2497:    If any of the output arguments are set to NULL, no corresponding value will be returned.

2499: .seealso: TaoMonitor(), TaoGetConvergedReason()
2500: @*/
2501: PetscErrorCode TaoGetSolutionStatus(Tao tao, PetscInt *its, PetscReal *f, PetscReal *gnorm, PetscReal *cnorm, PetscReal *xdiff, TaoConvergedReason *reason)
2502: {
2504:   if (its) *its=tao->niter;
2505:   if (f) *f=tao->fc;
2506:   if (gnorm) *gnorm=tao->residual;
2507:   if (cnorm) *cnorm=tao->cnorm;
2508:   if (reason) *reason=tao->reason;
2509:   if (xdiff) *xdiff=tao->step;
2510:   return(0);
2511: }

2513: /*@C
2514:    TaoGetType - Gets the current Tao algorithm.

2516:    Not Collective

2518:    Input Parameter:
2519: .  tao - the Tao solver context

2521:    Output Parameter:
2522: .  type - Tao method

2524:    Level: intermediate

2526: @*/
2527: PetscErrorCode TaoGetType(Tao tao,TaoType *type)
2528: {
2532:   *type=((PetscObject)tao)->type_name;
2533:   return(0);
2534: }

2536: /*@C
2537:   TaoMonitor - Monitor the solver and the current solution.  This
2538:   routine will record the iteration number and residual statistics,
2539:   call any monitors specified by the user, and calls the convergence-check routine.

2541:    Input Parameters:
2542: +  tao - the Tao context
2543: .  its - the current iterate number (>=0)
2544: .  f - the current objective function value
2545: .  res - the gradient norm, square root of the duality gap, or other measure indicating distince from optimality.  This measure will be recorded and
2546:           used for some termination tests.
2547: .  cnorm - the infeasibility of the current solution with regard to the constraints.
2548: -  steplength - multiple of the step direction added to the previous iterate.

2550:    Output Parameters:
2551: .  reason - The termination reason, which can equal TAO_CONTINUE_ITERATING

2553:    Options Database Key:
2554: .  -tao_monitor - Use the default monitor, which prints statistics to standard output

2556: .seealso TaoGetConvergedReason(), TaoMonitorDefault(), TaoSetMonitor()

2558:    Level: developer

2560: @*/
2561: PetscErrorCode TaoMonitor(Tao tao, PetscInt its, PetscReal f, PetscReal res, PetscReal cnorm, PetscReal steplength)
2562: {
2564:   PetscInt       i;

2568:   tao->fc = f;
2569:   tao->residual = res;
2570:   tao->cnorm = cnorm;
2571:   tao->step = steplength;
2572:   if (!its) {
2573:     tao->cnorm0 = cnorm; tao->gnorm0 = res;
2574:   }
2575:   if (PetscIsInfOrNanReal(f) || PetscIsInfOrNanReal(res)) SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_USER, "User provided compute function generated Inf or NaN");
2576:   for (i=0;i<tao->numbermonitors;i++) {
2577:     (*tao->monitor[i])(tao,tao->monitorcontext[i]);
2578:   }
2579:   return(0);
2580: }

2582: /*@
2583:    TaoSetConvergenceHistory - Sets the array used to hold the convergence history.

2585:    Logically Collective on Tao

2587:    Input Parameters:
2588: +  tao - the Tao solver context
2589: .  obj   - array to hold objective value history
2590: .  resid - array to hold residual history
2591: .  cnorm - array to hold constraint violation history
2592: .  lits - integer array holds the number of linear iterations for each Tao iteration
2593: .  na  - size of obj, resid, and cnorm
2594: -  reset - PetscTrue indicates each new minimization resets the history counter to zero,
2595:            else it continues storing new values for new minimizations after the old ones

2597:    Notes:
2598:    If set, TAO will fill the given arrays with the indicated
2599:    information at each iteration.  If 'obj','resid','cnorm','lits' are
2600:    *all* NULL then space (using size na, or 1000 if na is PETSC_DECIDE or
2601:    PETSC_DEFAULT) is allocated for the history.
2602:    If not all are NULL, then only the non-NULL information categories
2603:    will be stored, the others will be ignored.

2605:    Any convergence information after iteration number 'na' will not be stored.

2607:    This routine is useful, e.g., when running a code for purposes
2608:    of accurate performance monitoring, when no I/O should be done
2609:    during the section of code that is being timed.

2611:    Level: intermediate

2613: .seealso: TaoGetConvergenceHistory()

2615: @*/
2616: PetscErrorCode TaoSetConvergenceHistory(Tao tao, PetscReal obj[], PetscReal resid[], PetscReal cnorm[], PetscInt lits[], PetscInt na,PetscBool reset)
2617: {


2627:   if (na == PETSC_DECIDE || na == PETSC_DEFAULT) na = 1000;
2628:   if (!obj && !resid && !cnorm && !lits) {
2629:     PetscCalloc4(na,&obj,na,&resid,na,&cnorm,na,&lits);
2630:     tao->hist_malloc = PETSC_TRUE;
2631:   }

2633:   tao->hist_obj = obj;
2634:   tao->hist_resid = resid;
2635:   tao->hist_cnorm = cnorm;
2636:   tao->hist_lits = lits;
2637:   tao->hist_max   = na;
2638:   tao->hist_reset = reset;
2639:   tao->hist_len = 0;
2640:   return(0);
2641: }

2643: /*@C
2644:    TaoGetConvergenceHistory - Gets the arrays used to hold the convergence history.

2646:    Collective on Tao

2648:    Input Parameter:
2649: .  tao - the Tao context

2651:    Output Parameters:
2652: +  obj   - array used to hold objective value history
2653: .  resid - array used to hold residual history
2654: .  cnorm - array used to hold constraint violation history
2655: .  lits  - integer array used to hold linear solver iteration count
2656: -  nhist  - size of obj, resid, cnorm, and lits

2658:    Notes:
2659:     This routine must be preceded by calls to TaoSetConvergenceHistory()
2660:     and TaoSolve(), otherwise it returns useless information.

2662:     The calling sequence for this routine in Fortran is
2663: $   call TaoGetConvergenceHistory(Tao tao, PetscInt nhist, PetscErrorCode ierr)

2665:    This routine is useful, e.g., when running a code for purposes
2666:    of accurate performance monitoring, when no I/O should be done
2667:    during the section of code that is being timed.

2669:    Level: advanced

2671: .seealso: TaoSetConvergenceHistory()

2673: @*/
2674: PetscErrorCode TaoGetConvergenceHistory(Tao tao, PetscReal **obj, PetscReal **resid, PetscReal **cnorm, PetscInt **lits, PetscInt *nhist)
2675: {
2678:   if (obj)   *obj   = tao->hist_obj;
2679:   if (cnorm) *cnorm = tao->hist_cnorm;
2680:   if (resid) *resid = tao->hist_resid;
2681:   if (nhist) *nhist = tao->hist_len;
2682:   return(0);
2683: }

2685: /*@
2686:    TaoSetApplicationContext - Sets the optional user-defined context for
2687:    a solver.

2689:    Logically Collective on Tao

2691:    Input Parameters:
2692: +  tao  - the Tao context
2693: -  usrP - optional user context

2695:    Level: intermediate

2697: .seealso: TaoGetApplicationContext(), TaoSetApplicationContext()
2698: @*/
2699: PetscErrorCode  TaoSetApplicationContext(Tao tao,void *usrP)
2700: {
2703:   tao->user = usrP;
2704:   return(0);
2705: }

2707: /*@
2708:    TaoGetApplicationContext - Gets the user-defined context for a
2709:    TAO solvers.

2711:    Not Collective

2713:    Input Parameter:
2714: .  tao  - Tao context

2716:    Output Parameter:
2717: .  usrP - user context

2719:    Level: intermediate

2721: .seealso: TaoSetApplicationContext()
2722: @*/
2723: PetscErrorCode  TaoGetApplicationContext(Tao tao,void *usrP)
2724: {
2727:   *(void**)usrP = tao->user;
2728:   return(0);
2729: }

2731: /*@
2732:    TaoSetGradientNorm - Sets the matrix used to define the inner product that measures the size of the gradient.

2734:    Collective on tao

2736:    Input Parameters:
2737: +  tao  - the Tao context
2738: -  M    - gradient norm

2740:    Level: beginner

2742: .seealso: TaoGetGradientNorm(), TaoGradientNorm()
2743: @*/
2744: PetscErrorCode  TaoSetGradientNorm(Tao tao, Mat M)
2745: {

2750:   PetscObjectReference((PetscObject)M);
2751:   MatDestroy(&tao->gradient_norm);
2752:   VecDestroy(&tao->gradient_norm_tmp);
2753:   tao->gradient_norm = M;
2754:   MatCreateVecs(M, NULL, &tao->gradient_norm_tmp);
2755:   return(0);
2756: }

2758: /*@
2759:    TaoGetGradientNorm - Returns the matrix used to define the inner product for measuring the size of the gradient.

2761:    Not Collective

2763:    Input Parameter:
2764: .  tao  - Tao context

2766:    Output Parameter:
2767: .  M - gradient norm

2769:    Level: beginner

2771: .seealso: TaoSetGradientNorm(), TaoGradientNorm()
2772: @*/
2773: PetscErrorCode  TaoGetGradientNorm(Tao tao, Mat *M)
2774: {
2777:   *M = tao->gradient_norm;
2778:   return(0);
2779: }

2781: /*c
2782:    TaoGradientNorm - Compute the norm with respect to the inner product the user has set.

2784:    Collective on tao

2786:    Input Parameter:
2787: .  tao      - the Tao context
2788: .  gradient - the gradient to be computed
2789: .  norm     - the norm type

2791:    Output Parameter:
2792: .  gnorm    - the gradient norm

2794:    Level: developer

2796: .seealso: TaoSetGradientNorm(), TaoGetGradientNorm()
2797: @*/
2798: PetscErrorCode  TaoGradientNorm(Tao tao, Vec gradient, NormType type, PetscReal *gnorm)
2799: {

2807:   if (tao->gradient_norm) {
2808:     PetscScalar gnorms;

2810:     if (type != NORM_2) SETERRQ(PetscObjectComm((PetscObject)gradient), PETSC_ERR_ARG_WRONG, "Norm type must be NORM_2 if an inner product for the gradient norm is set.");
2811:     MatMult(tao->gradient_norm, gradient, tao->gradient_norm_tmp);
2812:     VecDot(gradient, tao->gradient_norm_tmp, &gnorms);
2813:     *gnorm = PetscRealPart(PetscSqrtScalar(gnorms));
2814:   } else {
2815:     VecNorm(gradient, type, gnorm);
2816:   }
2817:   return(0);
2818: }

2820: /*@C
2821:    TaoMonitorDrawCtxCreate - Creates the monitor context for TaoMonitorDrawCtx

2823:    Collective on Tao

2825:    Output Patameter:
2826: .    ctx - the monitor context

2828:    Options Database:
2829: .   -tao_draw_solution_initial - show initial guess as well as current solution

2831:    Level: intermediate

2833: .seealso: TaoMonitorSet(), TaoMonitorDefault(), VecView(), TaoMonitorDrawCtx()
2834: @*/
2835: PetscErrorCode  TaoMonitorDrawCtxCreate(MPI_Comm comm,const char host[],const char label[],int x,int y,int m,int n,PetscInt howoften,TaoMonitorDrawCtx *ctx)
2836: {
2837:   PetscErrorCode   ierr;

2840:   PetscNew(ctx);
2841:   PetscViewerDrawOpen(comm,host,label,x,y,m,n,&(*ctx)->viewer);
2842:   PetscViewerSetFromOptions((*ctx)->viewer);
2843:   (*ctx)->howoften = howoften;
2844:   return(0);
2845: }

2847: /*@C
2848:    TaoMonitorDrawCtxDestroy - Destroys the monitor context for TaoMonitorDrawSolution()

2850:    Collective on Tao

2852:    Input Parameters:
2853: .    ctx - the monitor context

2855:    Level: intermediate

2857: .seealso: TaoMonitorSet(), TaoMonitorDefault(), VecView(), TaoMonitorDrawSolution()
2858: @*/
2859: PetscErrorCode  TaoMonitorDrawCtxDestroy(TaoMonitorDrawCtx *ictx)
2860: {

2864:   PetscViewerDestroy(&(*ictx)->viewer);
2865:   PetscFree(*ictx);
2866:   return(0);
2867: }