Actual source code: taosolver.c

petsc-3.12.5 2020-03-29
Report Typos and Errors
  1: #define TAO_DLL

  3:  #include <petsc/private/taoimpl.h>

  5: PetscBool TaoRegisterAllCalled = PETSC_FALSE;
  6: PetscFunctionList TaoList = NULL;

  8: PetscClassId TAO_CLASSID;

 10: PetscLogEvent TAO_Solve;
 11: PetscLogEvent TAO_ObjectiveEval;
 12: PetscLogEvent TAO_GradientEval;
 13: PetscLogEvent TAO_ObjGradEval;
 14: PetscLogEvent TAO_HessianEval;
 15: PetscLogEvent TAO_JacobianEval;
 16: PetscLogEvent TAO_ConstraintsEval;

 18: const char *TaoSubSetTypes[] = {"subvec","mask","matrixfree","TaoSubSetType","TAO_SUBSET_",0};

 20: struct _n_TaoMonitorDrawCtx {
 21:   PetscViewer viewer;
 22:   PetscInt    howoften;  /* when > 0 uses iteration % howoften, when negative only final solution plotted */
 23: };

 25: /*@
 26:   TaoCreate - Creates a TAO solver

 28:   Collective

 30:   Input Parameter:
 31: . comm - MPI communicator

 33:   Output Parameter:
 34: . newtao - the new Tao context

 36:   Available methods include:
 37: +    nls - Newton's method with line search for unconstrained minimization
 38: .    ntr - Newton's method with trust region for unconstrained minimization
 39: .    ntl - Newton's method with trust region, line search for unconstrained minimization
 40: .    lmvm - Limited memory variable metric method for unconstrained minimization
 41: .    cg - Nonlinear conjugate gradient method for unconstrained minimization
 42: .    nm - Nelder-Mead algorithm for derivate-free unconstrained minimization
 43: .    tron - Newton Trust Region method for bound constrained minimization
 44: .    gpcg - Newton Trust Region method for quadratic bound constrained minimization
 45: .    blmvm - Limited memory variable metric method for bound constrained minimization
 46: .    lcl - Linearly constrained Lagrangian method for pde-constrained minimization
 47: -    pounders - Model-based algorithm for nonlinear least squares

 49:    Options Database Keys:
 50: .   -tao_type - select which method TAO should use

 52:    Level: beginner

 54: .seealso: TaoSolve(), TaoDestroy()
 55: @*/
 56: PetscErrorCode TaoCreate(MPI_Comm comm, Tao *newtao)
 57: {
 59:   Tao            tao;

 63:   *newtao = NULL;

 65:   TaoInitializePackage();
 66:   TaoLineSearchInitializePackage();

 68:   PetscHeaderCreate(tao,TAO_CLASSID,"Tao","Optimization solver","Tao",comm,TaoDestroy,TaoView);
 69:   tao->ops->computeobjective=0;
 70:   tao->ops->computeobjectiveandgradient=0;
 71:   tao->ops->computegradient=0;
 72:   tao->ops->computehessian=0;
 73:   tao->ops->computeresidual=0;
 74:   tao->ops->computeresidualjacobian=0;
 75:   tao->ops->computeconstraints=0;
 76:   tao->ops->computejacobian=0;
 77:   tao->ops->computejacobianequality=0;
 78:   tao->ops->computejacobianinequality=0;
 79:   tao->ops->computeequalityconstraints=0;
 80:   tao->ops->computeinequalityconstraints=0;
 81:   tao->ops->convergencetest=TaoDefaultConvergenceTest;
 82:   tao->ops->convergencedestroy=0;
 83:   tao->ops->computedual=0;
 84:   tao->ops->setup=0;
 85:   tao->ops->solve=0;
 86:   tao->ops->view=0;
 87:   tao->ops->setfromoptions=0;
 88:   tao->ops->destroy=0;

 90:   tao->solution=NULL;
 91:   tao->gradient=NULL;
 92:   tao->ls_res = NULL;
 93:   tao->ls_jac = NULL;
 94:   tao->constraints=NULL;
 95:   tao->constraints_equality=NULL;
 96:   tao->constraints_inequality=NULL;
 97:   tao->res_weights_v=NULL;
 98:   tao->res_weights_w=NULL;
 99:   tao->stepdirection=NULL;
100:   tao->niter=0;
101:   tao->ntotalits=0;
102:   tao->XL = NULL;
103:   tao->XU = NULL;
104:   tao->IL = NULL;
105:   tao->IU = NULL;
106:   tao->DI = NULL;
107:   tao->DE = NULL;
108:   tao->gradient_norm = NULL;
109:   tao->gradient_norm_tmp = NULL;
110:   tao->hessian = NULL;
111:   tao->hessian_pre = NULL;
112:   tao->jacobian = NULL;
113:   tao->jacobian_pre = NULL;
114:   tao->jacobian_state = NULL;
115:   tao->jacobian_state_pre = NULL;
116:   tao->jacobian_state_inv = NULL;
117:   tao->jacobian_design = NULL;
118:   tao->jacobian_design_pre = NULL;
119:   tao->jacobian_equality = NULL;
120:   tao->jacobian_equality_pre = NULL;
121:   tao->jacobian_inequality = NULL;
122:   tao->jacobian_inequality_pre = NULL;
123:   tao->state_is = NULL;
124:   tao->design_is = NULL;

126:   tao->max_it     = 10000;
127:   tao->max_funcs   = 10000;
128: #if defined(PETSC_USE_REAL_SINGLE)
129:   tao->gatol       = 1e-5;
130:   tao->grtol       = 1e-5;
131: #else
132:   tao->gatol       = 1e-8;
133:   tao->grtol       = 1e-8;
134: #endif
135:   tao->crtol       = 0.0;
136:   tao->catol       = 0.0;
137:   tao->gttol       = 0.0;
138:   tao->steptol     = 0.0;
139:   tao->trust0      = PETSC_INFINITY;
140:   tao->fmin        = PETSC_NINFINITY;
141:   tao->hist_malloc = PETSC_FALSE;
142:   tao->hist_reset = PETSC_TRUE;
143:   tao->hist_max = 0;
144:   tao->hist_len = 0;
145:   tao->hist_obj = NULL;
146:   tao->hist_resid = NULL;
147:   tao->hist_cnorm = NULL;
148:   tao->hist_lits = NULL;

150:   tao->numbermonitors=0;
151:   tao->viewsolution=PETSC_FALSE;
152:   tao->viewhessian=PETSC_FALSE;
153:   tao->viewgradient=PETSC_FALSE;
154:   tao->viewjacobian=PETSC_FALSE;
155:   tao->viewconstraints = PETSC_FALSE;

157:   tao->bounded = PETSC_FALSE;

159:   tao->header_printed = PETSC_FALSE;

161:   /* These flags prevents algorithms from overriding user options */
162:   tao->max_it_changed   =PETSC_FALSE;
163:   tao->max_funcs_changed=PETSC_FALSE;
164:   tao->gatol_changed    =PETSC_FALSE;
165:   tao->grtol_changed    =PETSC_FALSE;
166:   tao->gttol_changed    =PETSC_FALSE;
167:   tao->steptol_changed  =PETSC_FALSE;
168:   tao->trust0_changed   =PETSC_FALSE;
169:   tao->fmin_changed     =PETSC_FALSE;
170:   tao->catol_changed    =PETSC_FALSE;
171:   tao->crtol_changed    =PETSC_FALSE;
172:   TaoResetStatistics(tao);
173:   *newtao = tao;
174:   return(0);
175: }

177: /*@
178:   TaoSolve - Solves an optimization problem min F(x) s.t. l <= x <= u

180:   Collective on Tao

182:   Input Parameters:
183: . tao - the Tao context

185:   Notes:
186:   The user must set up the Tao with calls to TaoSetInitialVector(),
187:   TaoSetObjectiveRoutine(),
188:   TaoSetGradientRoutine(), and (if using 2nd order method) TaoSetHessianRoutine().

190:   You should call TaoGetConvergedReason() or run with -tao_converged_reason to determine if the optimization algorithm actually succeeded or
191:   why it failed.

193:   Level: beginner

195: .seealso: TaoCreate(), TaoSetObjectiveRoutine(), TaoSetGradientRoutine(), TaoSetHessianRoutine(), TaoGetConvergedReason()
196:  @*/
197: PetscErrorCode TaoSolve(Tao tao)
198: {
199:   PetscErrorCode   ierr;
200:   static PetscBool set = PETSC_FALSE;

204:   PetscCitationsRegister("@TechReport{tao-user-ref,\n"
205:                                 "title   = {Toolkit for Advanced Optimization (TAO) Users Manual},\n"
206:                                 "author  = {Todd Munson and Jason Sarich and Stefan Wild and Steve Benson and Lois Curfman McInnes},\n"
207:                                 "Institution = {Argonne National Laboratory},\n"
208:                                 "Year   = 2014,\n"
209:                                 "Number = {ANL/MCS-TM-322 - Revision 3.5},\n"
210:                                 "url    = {https://www.mcs.anl.gov/research/projects/tao/}\n}\n",&set);
211:   tao->header_printed = PETSC_FALSE;
212:   TaoSetUp(tao);
213:   TaoResetStatistics(tao);
214:   if (tao->linesearch) {
215:     TaoLineSearchReset(tao->linesearch);
216:   }

218:   PetscLogEventBegin(TAO_Solve,tao,0,0,0);
219:   if (tao->ops->solve){ (*tao->ops->solve)(tao); }
220:   PetscLogEventEnd(TAO_Solve,tao,0,0,0);

222:   VecViewFromOptions(tao->solution,(PetscObject)tao,"-tao_view_solution");

224:   tao->ntotalits += tao->niter;
225:   TaoViewFromOptions(tao,NULL,"-tao_view");

227:   if (tao->printreason) {
228:     if (tao->reason > 0) {
229:       PetscPrintf(((PetscObject)tao)->comm,"TAO solve converged due to %s iterations %D\n",TaoConvergedReasons[tao->reason],tao->niter);
230:     } else {
231:       PetscPrintf(((PetscObject)tao)->comm,"TAO solve did not converge due to %s iteration %D\n",TaoConvergedReasons[tao->reason],tao->niter);
232:     }
233:   }
234:   return(0);
235: }

237: /*@
238:   TaoSetUp - Sets up the internal data structures for the later use
239:   of a Tao solver

241:   Collective on tao

243:   Input Parameters:
244: . tao - the TAO context

246:   Notes:
247:   The user will not need to explicitly call TaoSetUp(), as it will
248:   automatically be called in TaoSolve().  However, if the user
249:   desires to call it explicitly, it should come after TaoCreate()
250:   and any TaoSetSomething() routines, but before TaoSolve().

252:   Level: advanced

254: .seealso: TaoCreate(), TaoSolve()
255: @*/
256: PetscErrorCode TaoSetUp(Tao tao)
257: {

262:   if (tao->setupcalled) return(0);

264:   if (!tao->solution) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Must call TaoSetInitialVector");
265:   if (tao->ops->setup) {
266:     (*tao->ops->setup)(tao);
267:   }
268:   tao->setupcalled = PETSC_TRUE;
269:   return(0);
270: }

272: /*@
273:   TaoDestroy - Destroys the TAO context that was created with
274:   TaoCreate()

276:   Collective on Tao

278:   Input Parameter:
279: . tao - the Tao context

281:   Level: beginner

283: .seealso: TaoCreate(), TaoSolve()
284: @*/
285: PetscErrorCode TaoDestroy(Tao *tao)
286: {

290:   if (!*tao) return(0);
292:   if (--((PetscObject)*tao)->refct > 0) {*tao=0;return(0);}

294:   if ((*tao)->ops->destroy) {
295:     (*((*tao))->ops->destroy)(*tao);
296:   }
297:   KSPDestroy(&(*tao)->ksp);
298:   TaoLineSearchDestroy(&(*tao)->linesearch);

300:   if ((*tao)->ops->convergencedestroy) {
301:     (*(*tao)->ops->convergencedestroy)((*tao)->cnvP);
302:     if ((*tao)->jacobian_state_inv) {
303:       MatDestroy(&(*tao)->jacobian_state_inv);
304:     }
305:   }
306:   VecDestroy(&(*tao)->solution);
307:   VecDestroy(&(*tao)->gradient);
308:   VecDestroy(&(*tao)->ls_res);

310:   if ((*tao)->gradient_norm) {
311:     PetscObjectDereference((PetscObject)(*tao)->gradient_norm);
312:     VecDestroy(&(*tao)->gradient_norm_tmp);
313:   }

315:   VecDestroy(&(*tao)->XL);
316:   VecDestroy(&(*tao)->XU);
317:   VecDestroy(&(*tao)->IL);
318:   VecDestroy(&(*tao)->IU);
319:   VecDestroy(&(*tao)->DE);
320:   VecDestroy(&(*tao)->DI);
321:   VecDestroy(&(*tao)->constraints_equality);
322:   VecDestroy(&(*tao)->constraints_inequality);
323:   VecDestroy(&(*tao)->stepdirection);
324:   MatDestroy(&(*tao)->hessian_pre);
325:   MatDestroy(&(*tao)->hessian);
326:   MatDestroy(&(*tao)->ls_jac);
327:   MatDestroy(&(*tao)->ls_jac_pre);
328:   MatDestroy(&(*tao)->jacobian_pre);
329:   MatDestroy(&(*tao)->jacobian);
330:   MatDestroy(&(*tao)->jacobian_state_pre);
331:   MatDestroy(&(*tao)->jacobian_state);
332:   MatDestroy(&(*tao)->jacobian_state_inv);
333:   MatDestroy(&(*tao)->jacobian_design);
334:   MatDestroy(&(*tao)->jacobian_equality);
335:   MatDestroy(&(*tao)->jacobian_equality_pre);
336:   MatDestroy(&(*tao)->jacobian_inequality);
337:   MatDestroy(&(*tao)->jacobian_inequality_pre);
338:   ISDestroy(&(*tao)->state_is);
339:   ISDestroy(&(*tao)->design_is);
340:   VecDestroy(&(*tao)->res_weights_v);
341:   TaoCancelMonitors(*tao);
342:   if ((*tao)->hist_malloc) {
343:     PetscFree4((*tao)->hist_obj,(*tao)->hist_resid,(*tao)->hist_cnorm,(*tao)->hist_lits);
344:   }
345:   if ((*tao)->res_weights_n) {
346:     PetscFree((*tao)->res_weights_rows);
347:     PetscFree((*tao)->res_weights_cols);
348:     PetscFree((*tao)->res_weights_w);
349:   }
350:   PetscHeaderDestroy(tao);
351:   return(0);
352: }

354: /*@
355:   TaoSetFromOptions - Sets various Tao parameters from user
356:   options.

358:   Collective on Tao

360:   Input Paremeter:
361: . tao - the Tao solver context

363:   options Database Keys:
364: + -tao_type <type> - The algorithm that TAO uses (lmvm, nls, etc.)
365: . -tao_gatol <gatol> - absolute error tolerance for ||gradient||
366: . -tao_grtol <grtol> - relative error tolerance for ||gradient||
367: . -tao_gttol <gttol> - reduction of ||gradient|| relative to initial gradient
368: . -tao_max_it <max> - sets maximum number of iterations
369: . -tao_max_funcs <max> - sets maximum number of function evaluations
370: . -tao_fmin <fmin> - stop if function value reaches fmin
371: . -tao_steptol <tol> - stop if trust region radius less than <tol>
372: . -tao_trust0 <t> - initial trust region radius
373: . -tao_monitor - prints function value and residual at each iteration
374: . -tao_smonitor - same as tao_monitor, but truncates very small values
375: . -tao_cmonitor - prints function value, residual, and constraint norm at each iteration
376: . -tao_view_solution - prints solution vector at each iteration
377: . -tao_view_ls_residual - prints least-squares residual vector at each iteration
378: . -tao_view_step - prints step direction vector at each iteration
379: . -tao_view_gradient - prints gradient vector at each iteration
380: . -tao_draw_solution - graphically view solution vector at each iteration
381: . -tao_draw_step - graphically view step vector at each iteration
382: . -tao_draw_gradient - graphically view gradient at each iteration
383: . -tao_fd_gradient - use gradient computed with finite differences
384: . -tao_fd_hessian - use hessian computed with finite differences
385: . -tao_mf_hessian - use matrix-free hessian computed with finite differences
386: . -tao_cancelmonitors - cancels all monitors (except those set with command line)
387: . -tao_view - prints information about the Tao after solving
388: - -tao_converged_reason - prints the reason TAO stopped iterating

390:   Notes:
391:   To see all options, run your program with the -help option or consult the
392:   user's manual. Should be called after TaoCreate() but before TaoSolve()

394:   Level: beginner
395: @*/
396: PetscErrorCode TaoSetFromOptions(Tao tao)
397: {
399:   TaoType        default_type = TAOLMVM;
400:   char           type[256], monfilename[PETSC_MAX_PATH_LEN];
401:   PetscViewer    monviewer;
402:   PetscBool      flg;
403:   MPI_Comm       comm;

407:   PetscObjectGetComm((PetscObject)tao,&comm);

409:   /* So no warnings are given about unused options */
410:   PetscOptionsHasName(((PetscObject)tao)->options,((PetscObject)tao)->prefix,"-tao_ls_type",&flg);

412:   PetscObjectOptionsBegin((PetscObject)tao);
413:   {
414:     TaoRegisterAll();
415:     if (((PetscObject)tao)->type_name) {
416:       default_type = ((PetscObject)tao)->type_name;
417:     }
418:     /* Check for type from options */
419:     PetscOptionsFList("-tao_type","Tao Solver type","TaoSetType",TaoList,default_type,type,256,&flg);
420:     if (flg) {
421:       TaoSetType(tao,type);
422:     } else if (!((PetscObject)tao)->type_name) {
423:       TaoSetType(tao,default_type);
424:     }

426:     PetscOptionsReal("-tao_catol","Stop if constraints violations within","TaoSetConstraintTolerances",tao->catol,&tao->catol,&flg);
427:     if (flg) tao->catol_changed=PETSC_TRUE;
428:     PetscOptionsReal("-tao_crtol","Stop if relative contraint violations within","TaoSetConstraintTolerances",tao->crtol,&tao->crtol,&flg);
429:     if (flg) tao->crtol_changed=PETSC_TRUE;
430:     PetscOptionsReal("-tao_gatol","Stop if norm of gradient less than","TaoSetTolerances",tao->gatol,&tao->gatol,&flg);
431:     if (flg) tao->gatol_changed=PETSC_TRUE;
432:     PetscOptionsReal("-tao_grtol","Stop if norm of gradient divided by the function value is less than","TaoSetTolerances",tao->grtol,&tao->grtol,&flg);
433:     if (flg) tao->grtol_changed=PETSC_TRUE;
434:     PetscOptionsReal("-tao_gttol","Stop if the norm of the gradient is less than the norm of the initial gradient times tol","TaoSetTolerances",tao->gttol,&tao->gttol,&flg);
435:     if (flg) tao->gttol_changed=PETSC_TRUE;
436:     PetscOptionsInt("-tao_max_it","Stop if iteration number exceeds","TaoSetMaximumIterations",tao->max_it,&tao->max_it,&flg);
437:     if (flg) tao->max_it_changed=PETSC_TRUE;
438:     PetscOptionsInt("-tao_max_funcs","Stop if number of function evaluations exceeds","TaoSetMaximumFunctionEvaluations",tao->max_funcs,&tao->max_funcs,&flg);
439:     if (flg) tao->max_funcs_changed=PETSC_TRUE;
440:     PetscOptionsReal("-tao_fmin","Stop if function less than","TaoSetFunctionLowerBound",tao->fmin,&tao->fmin,&flg);
441:     if (flg) tao->fmin_changed=PETSC_TRUE;
442:     PetscOptionsReal("-tao_steptol","Stop if step size or trust region radius less than","",tao->steptol,&tao->steptol,&flg);
443:     if (flg) tao->steptol_changed=PETSC_TRUE;
444:     PetscOptionsReal("-tao_trust0","Initial trust region radius","TaoSetTrustRegionRadius",tao->trust0,&tao->trust0,&flg);
445:     if (flg) tao->trust0_changed=PETSC_TRUE;
446:     PetscOptionsString("-tao_view_solution","view solution vector after each evaluation","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
447:     if (flg) {
448:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
449:       TaoSetMonitor(tao,TaoSolutionMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
450:     }

452:     PetscOptionsBool("-tao_converged_reason","Print reason for TAO converged","TaoSolve",tao->printreason,&tao->printreason,NULL);
453:     PetscOptionsString("-tao_view_gradient","view gradient vector after each evaluation","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
454:     if (flg) {
455:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
456:       TaoSetMonitor(tao,TaoGradientMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
457:     }

459:     PetscOptionsString("-tao_view_stepdirection","view step direction vector after each iteration","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
460:     if (flg) {
461:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
462:       TaoSetMonitor(tao,TaoStepDirectionMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
463:     }

465:     PetscOptionsString("-tao_view_residual","view least-squares residual vector after each evaluation","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
466:     if (flg) {
467:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
468:       TaoSetMonitor(tao,TaoResidualMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
469:     }

471:     PetscOptionsString("-tao_monitor","Use the default convergence monitor","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
472:     if (flg) {
473:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
474:       TaoSetMonitor(tao,TaoMonitorDefault,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
475:     }

477:     PetscOptionsString("-tao_gmonitor","Use the convergence monitor with extra globalization info","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
478:     if (flg) {
479:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
480:       TaoSetMonitor(tao,TaoDefaultGMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
481:     }

483:     PetscOptionsString("-tao_smonitor","Use the short convergence monitor","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
484:     if (flg) {
485:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
486:       TaoSetMonitor(tao,TaoDefaultSMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
487:     }

489:     PetscOptionsString("-tao_cmonitor","Use the default convergence monitor with constraint norm","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
490:     if (flg) {
491:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
492:       TaoSetMonitor(tao,TaoDefaultCMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
493:     }


496:     flg = PETSC_FALSE;
497:     PetscOptionsBool("-tao_cancelmonitors","cancel all monitors and call any registered destroy routines","TaoCancelMonitors",flg,&flg,NULL);
498:     if (flg) {TaoCancelMonitors(tao);}

500:     flg = PETSC_FALSE;
501:     PetscOptionsBool("-tao_draw_solution","Plot solution vector at each iteration","TaoSetMonitor",flg,&flg,NULL);
502:     if (flg) {
503:       TaoMonitorDrawCtx drawctx;
504:       PetscInt          howoften = 1;
505:       TaoMonitorDrawCtxCreate(PetscObjectComm((PetscObject)tao),0,0,PETSC_DECIDE,PETSC_DECIDE,300,300,howoften,&drawctx);
506:       TaoSetMonitor(tao,TaoDrawSolutionMonitor,drawctx,(PetscErrorCode (*)(void**))TaoMonitorDrawCtxDestroy);
507:     }

509:     flg = PETSC_FALSE;
510:     PetscOptionsBool("-tao_draw_step","plots step direction at each iteration","TaoSetMonitor",flg,&flg,NULL);
511:     if (flg) {
512:       TaoSetMonitor(tao,TaoDrawStepMonitor,NULL,NULL);
513:     }

515:     flg = PETSC_FALSE;
516:     PetscOptionsBool("-tao_draw_gradient","plots gradient at each iteration","TaoSetMonitor",flg,&flg,NULL);
517:     if (flg) {
518:       TaoMonitorDrawCtx drawctx;
519:       PetscInt          howoften = 1;
520:       TaoMonitorDrawCtxCreate(PetscObjectComm((PetscObject)tao),0,0,PETSC_DECIDE,PETSC_DECIDE,300,300,howoften,&drawctx);
521:       TaoSetMonitor(tao,TaoDrawGradientMonitor,drawctx,(PetscErrorCode (*)(void**))TaoMonitorDrawCtxDestroy);
522:     }
523:     flg = PETSC_FALSE;
524:     PetscOptionsBool("-tao_fd_gradient","compute gradient using finite differences","TaoDefaultComputeGradient",flg,&flg,NULL);
525:     if (flg) {
526:       TaoSetGradientRoutine(tao,TaoDefaultComputeGradient,NULL);
527:     }
528:     flg = PETSC_FALSE;
529:     PetscOptionsBool("-tao_fd_hessian","compute hessian using finite differences","TaoDefaultComputeHessian",flg,&flg,NULL);
530:     if (flg) {
531:       Mat H;

533:       MatCreate(PetscObjectComm((PetscObject)tao),&H);
534:       MatSetType(H,MATAIJ);
535:       TaoSetHessianRoutine(tao,H,H,TaoDefaultComputeHessian,NULL);
536:       MatDestroy(&H);
537:     }
538:     flg = PETSC_FALSE;
539:     PetscOptionsBool("-tao_mf_hessian","compute matrix-free hessian using finite differences","TaoDefaultComputeHessianMFFD",flg,&flg,NULL);
540:     if (flg) {
541:       Mat H;

543:       MatCreate(PetscObjectComm((PetscObject)tao),&H);
544:       TaoSetHessianRoutine(tao,H,H,TaoDefaultComputeHessianMFFD,NULL);
545:       MatDestroy(&H);
546:     }
547:     PetscOptionsEnum("-tao_subset_type","subset type","",TaoSubSetTypes,(PetscEnum)tao->subset_type,(PetscEnum*)&tao->subset_type,NULL);

549:     if (tao->ops->setfromoptions) {
550:       (*tao->ops->setfromoptions)(PetscOptionsObject,tao);
551:     }
552:   }
553:   PetscOptionsEnd();
554:   return(0);
555: }

557: /*@C
558:   TaoView - Prints information about the Tao

560:   Collective on Tao

562:   InputParameters:
563: + tao - the Tao context
564: - viewer - visualization context

566:   Options Database Key:
567: . -tao_view - Calls TaoView() at the end of TaoSolve()

569:   Notes:
570:   The available visualization contexts include
571: +     PETSC_VIEWER_STDOUT_SELF - standard output (default)
572: -     PETSC_VIEWER_STDOUT_WORLD - synchronized standard
573:          output where only the first processor opens
574:          the file.  All other processors send their
575:          data to the first processor to print.

577:   Level: beginner

579: .seealso: PetscViewerASCIIOpen()
580: @*/
581: PetscErrorCode TaoView(Tao tao, PetscViewer viewer)
582: {
583:   PetscErrorCode      ierr;
584:   PetscBool           isascii,isstring;
585:   TaoType             type;

589:   if (!viewer) {
590:     PetscViewerASCIIGetStdout(((PetscObject)tao)->comm,&viewer);
591:   }

595:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
596:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
597:   if (isascii) {
598:     PetscObjectPrintClassNamePrefixType((PetscObject)tao,viewer);

600:     if (tao->ops->view) {
601:       PetscViewerASCIIPushTab(viewer);
602:       (*tao->ops->view)(tao,viewer);
603:       PetscViewerASCIIPopTab(viewer);
604:     }
605:     if (tao->linesearch) {
606:       PetscViewerASCIIPushTab(viewer);
607:       TaoLineSearchView(tao->linesearch,viewer);
608:       PetscViewerASCIIPopTab(viewer);
609:     }
610:     if (tao->ksp) {
611:       PetscViewerASCIIPushTab(viewer);
612:       KSPView(tao->ksp,viewer);
613:       PetscViewerASCIIPrintf(viewer,"total KSP iterations: %D\n",tao->ksp_tot_its);
614:       PetscViewerASCIIPopTab(viewer);
615:     }

617:     PetscViewerASCIIPushTab(viewer);

619:     if (tao->XL || tao->XU) {
620:       PetscViewerASCIIPrintf(viewer,"Active Set subset type: %s\n",TaoSubSetTypes[tao->subset_type]);
621:     }

623:     PetscViewerASCIIPrintf(viewer,"convergence tolerances: gatol=%g,",(double)tao->gatol);
624:     PetscViewerASCIIPrintf(viewer," steptol=%g,",(double)tao->steptol);
625:     PetscViewerASCIIPrintf(viewer," gttol=%g\n",(double)tao->gttol);
626:     PetscViewerASCIIPrintf(viewer,"Residual in Function/Gradient:=%g\n",(double)tao->residual);

628:     if (tao->cnorm>0 || tao->catol>0 || tao->crtol>0){
629:       ierr=PetscViewerASCIIPrintf(viewer,"convergence tolerances:");
630:       ierr=PetscViewerASCIIPrintf(viewer," catol=%g,",(double)tao->catol);
631:       ierr=PetscViewerASCIIPrintf(viewer," crtol=%g\n",(double)tao->crtol);
632:       PetscViewerASCIIPrintf(viewer,"Residual in Constraints:=%g\n",(double)tao->cnorm);
633:     }

635:     if (tao->trust < tao->steptol){
636:       ierr=PetscViewerASCIIPrintf(viewer,"convergence tolerances: steptol=%g\n",(double)tao->steptol);
637:       ierr=PetscViewerASCIIPrintf(viewer,"Final trust region radius:=%g\n",(double)tao->trust);
638:     }

640:     if (tao->fmin>-1.e25){
641:       ierr=PetscViewerASCIIPrintf(viewer,"convergence tolerances: function minimum=%g\n",(double)tao->fmin);
642:     }
643:     PetscViewerASCIIPrintf(viewer,"Objective value=%g\n",(double)tao->fc);

645:     PetscViewerASCIIPrintf(viewer,"total number of iterations=%D,          ",tao->niter);
646:     PetscViewerASCIIPrintf(viewer,"              (max: %D)\n",tao->max_it);

648:     if (tao->nfuncs>0){
649:       PetscViewerASCIIPrintf(viewer,"total number of function evaluations=%D,",tao->nfuncs);
650:       PetscViewerASCIIPrintf(viewer,"                max: %D\n",tao->max_funcs);
651:     }
652:     if (tao->ngrads>0){
653:       PetscViewerASCIIPrintf(viewer,"total number of gradient evaluations=%D,",tao->ngrads);
654:       PetscViewerASCIIPrintf(viewer,"                max: %D\n",tao->max_funcs);
655:     }
656:     if (tao->nfuncgrads>0){
657:       PetscViewerASCIIPrintf(viewer,"total number of function/gradient evaluations=%D,",tao->nfuncgrads);
658:       PetscViewerASCIIPrintf(viewer,"    (max: %D)\n",tao->max_funcs);
659:     }
660:     if (tao->nhess>0){
661:       PetscViewerASCIIPrintf(viewer,"total number of Hessian evaluations=%D\n",tao->nhess);
662:     }
663:     /*  if (tao->linear_its>0){
664:      PetscViewerASCIIPrintf(viewer,"  total Krylov method iterations=%D\n",tao->linear_its);
665:      }*/
666:     if (tao->nconstraints>0){
667:       PetscViewerASCIIPrintf(viewer,"total number of constraint function evaluations=%D\n",tao->nconstraints);
668:     }
669:     if (tao->njac>0){
670:       PetscViewerASCIIPrintf(viewer,"total number of Jacobian evaluations=%D\n",tao->njac);
671:     }

673:     if (tao->reason>0){
674:       PetscViewerASCIIPrintf(viewer,    "Solution converged: ");
675:       switch (tao->reason) {
676:       case TAO_CONVERGED_GATOL:
677:         PetscViewerASCIIPrintf(viewer," ||g(X)|| <= gatol\n");
678:         break;
679:       case TAO_CONVERGED_GRTOL:
680:         PetscViewerASCIIPrintf(viewer," ||g(X)||/|f(X)| <= grtol\n");
681:         break;
682:       case TAO_CONVERGED_GTTOL:
683:         PetscViewerASCIIPrintf(viewer," ||g(X)||/||g(X0)|| <= gttol\n");
684:         break;
685:       case TAO_CONVERGED_STEPTOL:
686:         PetscViewerASCIIPrintf(viewer," Steptol -- step size small\n");
687:         break;
688:       case TAO_CONVERGED_MINF:
689:         PetscViewerASCIIPrintf(viewer," Minf --  f < fmin\n");
690:         break;
691:       case TAO_CONVERGED_USER:
692:         PetscViewerASCIIPrintf(viewer," User Terminated\n");
693:         break;
694:       default:
695:         PetscViewerASCIIPrintf(viewer,"\n");
696:         break;
697:       }

699:     } else {
700:       PetscViewerASCIIPrintf(viewer,"Solver terminated: %d",tao->reason);
701:       switch (tao->reason) {
702:       case TAO_DIVERGED_MAXITS:
703:         PetscViewerASCIIPrintf(viewer," Maximum Iterations\n");
704:         break;
705:       case TAO_DIVERGED_NAN:
706:         PetscViewerASCIIPrintf(viewer," NAN or Inf encountered\n");
707:         break;
708:       case TAO_DIVERGED_MAXFCN:
709:         PetscViewerASCIIPrintf(viewer," Maximum Function Evaluations\n");
710:         break;
711:       case TAO_DIVERGED_LS_FAILURE:
712:         PetscViewerASCIIPrintf(viewer," Line Search Failure\n");
713:         break;
714:       case TAO_DIVERGED_TR_REDUCTION:
715:         PetscViewerASCIIPrintf(viewer," Trust Region too small\n");
716:         break;
717:       case TAO_DIVERGED_USER:
718:         PetscViewerASCIIPrintf(viewer," User Terminated\n");
719:         break;
720:       default:
721:         PetscViewerASCIIPrintf(viewer,"\n");
722:         break;
723:       }
724:     }
725:     PetscViewerASCIIPopTab(viewer);
726:   } else if (isstring) {
727:     TaoGetType(tao,&type);
728:     PetscViewerStringSPrintf(viewer," %-3.3s",type);
729:   }
730:   return(0);
731: }

733: /*@
734:   TaoSetTolerances - Sets parameters used in TAO convergence tests

736:   Logically collective on Tao

738:   Input Parameters:
739: + tao - the Tao context
740: . gatol - stop if norm of gradient is less than this
741: . grtol - stop if relative norm of gradient is less than this
742: - gttol - stop if norm of gradient is reduced by this factor

744:   Options Database Keys:
745: + -tao_gatol <gatol> - Sets gatol
746: . -tao_grtol <grtol> - Sets grtol
747: - -tao_gttol <gttol> - Sets gttol

749:   Stopping Criteria:
750: $ ||g(X)||                            <= gatol
751: $ ||g(X)|| / |f(X)|                   <= grtol
752: $ ||g(X)|| / ||g(X0)||                <= gttol

754:   Notes:
755:   Use PETSC_DEFAULT to leave one or more tolerances unchanged.

757:   Level: beginner

759: .seealso: TaoGetTolerances()

761: @*/
762: PetscErrorCode TaoSetTolerances(Tao tao, PetscReal gatol, PetscReal grtol, PetscReal gttol)
763: {


769:   if (gatol != PETSC_DEFAULT) {
770:     if (gatol<0) {
771:       PetscInfo(tao,"Tried to set negative gatol -- ignored.\n");
772:     } else {
773:       tao->gatol = PetscMax(0,gatol);
774:       tao->gatol_changed=PETSC_TRUE;
775:     }
776:   }

778:   if (grtol != PETSC_DEFAULT) {
779:     if (grtol<0) {
780:       PetscInfo(tao,"Tried to set negative grtol -- ignored.\n");
781:     } else {
782:       tao->grtol = PetscMax(0,grtol);
783:       tao->grtol_changed=PETSC_TRUE;
784:     }
785:   }

787:   if (gttol != PETSC_DEFAULT) {
788:     if (gttol<0) {
789:       PetscInfo(tao,"Tried to set negative gttol -- ignored.\n");
790:     } else {
791:       tao->gttol = PetscMax(0,gttol);
792:       tao->gttol_changed=PETSC_TRUE;
793:     }
794:   }
795:   return(0);
796: }

798: /*@
799:   TaoSetConstraintTolerances - Sets constraint tolerance parameters used in TAO  convergence tests

801:   Logically collective on Tao

803:   Input Parameters:
804: + tao - the Tao context
805: . catol - absolute constraint tolerance, constraint norm must be less than catol for used for gatol convergence criteria
806: - crtol - relative contraint tolerance, constraint norm must be less than crtol for used for gatol, gttol convergence criteria

808:   Options Database Keys:
809: + -tao_catol <catol> - Sets catol
810: - -tao_crtol <crtol> - Sets crtol

812:   Notes:
813:   Use PETSC_DEFAULT to leave any tolerance unchanged.

815:   Level: intermediate

817: .seealso: TaoGetTolerances(), TaoGetConstraintTolerances(), TaoSetTolerances()

819: @*/
820: PetscErrorCode TaoSetConstraintTolerances(Tao tao, PetscReal catol, PetscReal crtol)
821: {


827:   if (catol != PETSC_DEFAULT) {
828:     if (catol<0) {
829:       PetscInfo(tao,"Tried to set negative catol -- ignored.\n");
830:     } else {
831:       tao->catol = PetscMax(0,catol);
832:       tao->catol_changed=PETSC_TRUE;
833:     }
834:   }

836:   if (crtol != PETSC_DEFAULT) {
837:     if (crtol<0) {
838:       PetscInfo(tao,"Tried to set negative crtol -- ignored.\n");
839:     } else {
840:       tao->crtol = PetscMax(0,crtol);
841:       tao->crtol_changed=PETSC_TRUE;
842:     }
843:   }
844:   return(0);
845: }

847: /*@
848:   TaoGetConstraintTolerances - Gets constraint tolerance parameters used in TAO  convergence tests

850:   Not ollective

852:   Input Parameter:
853: . tao - the Tao context

855:   Output Parameter:
856: + catol - absolute constraint tolerance, constraint norm must be less than catol for used for gatol convergence criteria
857: - crtol - relative contraint tolerance, constraint norm must be less than crtol for used for gatol, gttol convergence criteria

859:   Level: intermediate

861: .seealso: TaoGetTolerances(), TaoSetTolerances(), TaoSetConstraintTolerances()

863: @*/
864: PetscErrorCode TaoGetConstraintTolerances(Tao tao, PetscReal *catol, PetscReal *crtol)
865: {
868:   if (catol) *catol = tao->catol;
869:   if (crtol) *crtol = tao->crtol;
870:   return(0);
871: }

873: /*@
874:    TaoSetFunctionLowerBound - Sets a bound on the solution objective value.
875:    When an approximate solution with an objective value below this number
876:    has been found, the solver will terminate.

878:    Logically Collective on Tao

880:    Input Parameters:
881: +  tao - the Tao solver context
882: -  fmin - the tolerance

884:    Options Database Keys:
885: .    -tao_fmin <fmin> - sets the minimum function value

887:    Level: intermediate

889: .seealso: TaoSetTolerances()
890: @*/
891: PetscErrorCode TaoSetFunctionLowerBound(Tao tao,PetscReal fmin)
892: {
895:   tao->fmin = fmin;
896:   tao->fmin_changed=PETSC_TRUE;
897:   return(0);
898: }

900: /*@
901:    TaoGetFunctionLowerBound - Gets the bound on the solution objective value.
902:    When an approximate solution with an objective value below this number
903:    has been found, the solver will terminate.

905:    Not collective on Tao

907:    Input Parameters:
908: .  tao - the Tao solver context

910:    OutputParameters:
911: .  fmin - the minimum function value

913:    Level: intermediate

915: .seealso: TaoSetFunctionLowerBound()
916: @*/
917: PetscErrorCode TaoGetFunctionLowerBound(Tao tao,PetscReal *fmin)
918: {
921:   *fmin = tao->fmin;
922:   return(0);
923: }

925: /*@
926:    TaoSetMaximumFunctionEvaluations - Sets a maximum number of
927:    function evaluations.

929:    Logically Collective on Tao

931:    Input Parameters:
932: +  tao - the Tao solver context
933: -  nfcn - the maximum number of function evaluations (>=0)

935:    Options Database Keys:
936: .    -tao_max_funcs <nfcn> - sets the maximum number of function evaluations

938:    Level: intermediate

940: .seealso: TaoSetTolerances(), TaoSetMaximumIterations()
941: @*/

943: PetscErrorCode TaoSetMaximumFunctionEvaluations(Tao tao,PetscInt nfcn)
944: {
947:   tao->max_funcs = PetscMax(0,nfcn);
948:   tao->max_funcs_changed=PETSC_TRUE;
949:   return(0);
950: }

952: /*@
953:    TaoGetMaximumFunctionEvaluations - Sets a maximum number of
954:    function evaluations.

956:    Not Collective

958:    Input Parameters:
959: .  tao - the Tao solver context

961:    Output Parameters:
962: .  nfcn - the maximum number of function evaluations

964:    Level: intermediate

966: .seealso: TaoSetMaximumFunctionEvaluations(), TaoGetMaximumIterations()
967: @*/

969: PetscErrorCode TaoGetMaximumFunctionEvaluations(Tao tao,PetscInt *nfcn)
970: {
973:   *nfcn = tao->max_funcs;
974:   return(0);
975: }

977: /*@
978:    TaoGetCurrentFunctionEvaluations - Get current number of
979:    function evaluations.

981:    Not Collective

983:    Input Parameters:
984: .  tao - the Tao solver context

986:    Output Parameters:
987: .  nfuncs - the current number of function evaluations

989:    Level: intermediate

991: .seealso: TaoSetMaximumFunctionEvaluations(), TaoGetMaximumFunctionEvaluations(), TaoGetMaximumIterations()
992: @*/

994: PetscErrorCode TaoGetCurrentFunctionEvaluations(Tao tao,PetscInt *nfuncs)
995: {
998:   *nfuncs=PetscMax(tao->nfuncs,tao->nfuncgrads);
999:   return(0);
1000: }

1002: /*@
1003:    TaoSetMaximumIterations - Sets a maximum number of iterates.

1005:    Logically Collective on Tao

1007:    Input Parameters:
1008: +  tao - the Tao solver context
1009: -  maxits - the maximum number of iterates (>=0)

1011:    Options Database Keys:
1012: .    -tao_max_it <its> - sets the maximum number of iterations

1014:    Level: intermediate

1016: .seealso: TaoSetTolerances(), TaoSetMaximumFunctionEvaluations()
1017: @*/
1018: PetscErrorCode TaoSetMaximumIterations(Tao tao,PetscInt maxits)
1019: {
1022:   tao->max_it = PetscMax(0,maxits);
1023:   tao->max_it_changed=PETSC_TRUE;
1024:   return(0);
1025: }

1027: /*@
1028:    TaoGetMaximumIterations - Sets a maximum number of iterates.

1030:    Not Collective

1032:    Input Parameters:
1033: .  tao - the Tao solver context

1035:    Output Parameters:
1036: .  maxits - the maximum number of iterates

1038:    Level: intermediate

1040: .seealso: TaoSetMaximumIterations(), TaoGetMaximumFunctionEvaluations()
1041: @*/
1042: PetscErrorCode TaoGetMaximumIterations(Tao tao,PetscInt *maxits)
1043: {
1046:   *maxits = tao->max_it;
1047:   return(0);
1048: }

1050: /*@
1051:    TaoSetInitialTrustRegionRadius - Sets the initial trust region radius.

1053:    Logically collective on Tao

1055:    Input Parameter:
1056: +  tao - a TAO optimization solver
1057: -  radius - the trust region radius

1059:    Level: intermediate

1061:    Options Database Key:
1062: .  -tao_trust0 <t0> - sets initial trust region radius

1064: .seealso: TaoGetTrustRegionRadius(), TaoSetTrustRegionTolerance()
1065: @*/
1066: PetscErrorCode TaoSetInitialTrustRegionRadius(Tao tao, PetscReal radius)
1067: {
1070:   tao->trust0 = PetscMax(0.0,radius);
1071:   tao->trust0_changed=PETSC_TRUE;
1072:   return(0);
1073: }

1075: /*@
1076:    TaoGetInitialTrustRegionRadius - Sets the initial trust region radius.

1078:    Not Collective

1080:    Input Parameter:
1081: .  tao - a TAO optimization solver

1083:    Output Parameter:
1084: .  radius - the trust region radius

1086:    Level: intermediate

1088: .seealso: TaoSetInitialTrustRegionRadius(), TaoGetCurrentTrustRegionRadius()
1089: @*/
1090: PetscErrorCode TaoGetInitialTrustRegionRadius(Tao tao, PetscReal *radius)
1091: {
1094:   *radius = tao->trust0;
1095:   return(0);
1096: }

1098: /*@
1099:    TaoGetCurrentTrustRegionRadius - Gets the current trust region radius.

1101:    Not Collective

1103:    Input Parameter:
1104: .  tao - a TAO optimization solver

1106:    Output Parameter:
1107: .  radius - the trust region radius

1109:    Level: intermediate

1111: .seealso: TaoSetInitialTrustRegionRadius(), TaoGetInitialTrustRegionRadius()
1112: @*/
1113: PetscErrorCode TaoGetCurrentTrustRegionRadius(Tao tao, PetscReal *radius)
1114: {
1117:   *radius = tao->trust;
1118:   return(0);
1119: }

1121: /*@
1122:   TaoGetTolerances - gets the current values of tolerances

1124:   Not Collective

1126:   Input Parameters:
1127: . tao - the Tao context

1129:   Output Parameters:
1130: + gatol - stop if norm of gradient is less than this
1131: . grtol - stop if relative norm of gradient is less than this
1132: - gttol - stop if norm of gradient is reduced by a this factor

1134:   Note: NULL can be used as an argument if not all tolerances values are needed

1136: .seealso TaoSetTolerances()

1138:   Level: intermediate
1139: @*/
1140: PetscErrorCode TaoGetTolerances(Tao tao, PetscReal *gatol, PetscReal *grtol, PetscReal *gttol)
1141: {
1144:   if (gatol) *gatol=tao->gatol;
1145:   if (grtol) *grtol=tao->grtol;
1146:   if (gttol) *gttol=tao->gttol;
1147:   return(0);
1148: }

1150: /*@
1151:   TaoGetKSP - Gets the linear solver used by the optimization solver.
1152:   Application writers should use TaoGetKSP if they need direct access
1153:   to the PETSc KSP object.

1155:   Not Collective

1157:    Input Parameters:
1158: .  tao - the TAO solver

1160:    Output Parameters:
1161: .  ksp - the KSP linear solver used in the optimization solver

1163:    Level: intermediate

1165: @*/
1166: PetscErrorCode TaoGetKSP(Tao tao, KSP *ksp)
1167: {
1169:   *ksp = tao->ksp;
1170:   return(0);
1171: }

1173: /*@
1174:    TaoGetLinearSolveIterations - Gets the total number of linear iterations
1175:    used by the TAO solver

1177:    Not Collective

1179:    Input Parameter:
1180: .  tao - TAO context

1182:    Output Parameter:
1183: .  lits - number of linear iterations

1185:    Notes:
1186:    This counter is reset to zero for each successive call to TaoSolve()

1188:    Level: intermediate

1190: .seealso:  TaoGetKSP()
1191: @*/
1192: PetscErrorCode  TaoGetLinearSolveIterations(Tao tao,PetscInt *lits)
1193: {
1197:   *lits = tao->ksp_tot_its;
1198:   return(0);
1199: }

1201: /*@
1202:   TaoGetLineSearch - Gets the line search used by the optimization solver.
1203:   Application writers should use TaoGetLineSearch if they need direct access
1204:   to the TaoLineSearch object.

1206:   Not Collective

1208:    Input Parameters:
1209: .  tao - the TAO solver

1211:    Output Parameters:
1212: .  ls - the line search used in the optimization solver

1214:    Level: intermediate

1216: @*/
1217: PetscErrorCode TaoGetLineSearch(Tao tao, TaoLineSearch *ls)
1218: {
1220:   *ls = tao->linesearch;
1221:   return(0);
1222: }

1224: /*@
1225:   TaoAddLineSearchCounts - Adds the number of function evaluations spent
1226:   in the line search to the running total.

1228:    Input Parameters:
1229: +  tao - the TAO solver
1230: -  ls - the line search used in the optimization solver

1232:    Level: developer

1234: .seealso: TaoLineSearchApply()
1235: @*/
1236: PetscErrorCode TaoAddLineSearchCounts(Tao tao)
1237: {
1239:   PetscBool      flg;
1240:   PetscInt       nfeval,ngeval,nfgeval;

1244:   if (tao->linesearch) {
1245:     TaoLineSearchIsUsingTaoRoutines(tao->linesearch,&flg);
1246:     if (!flg) {
1247:       TaoLineSearchGetNumberFunctionEvaluations(tao->linesearch,&nfeval,&ngeval,&nfgeval);
1248:       tao->nfuncs+=nfeval;
1249:       tao->ngrads+=ngeval;
1250:       tao->nfuncgrads+=nfgeval;
1251:     }
1252:   }
1253:   return(0);
1254: }

1256: /*@
1257:   TaoGetSolutionVector - Returns the vector with the current TAO solution

1259:   Not Collective

1261:   Input Parameter:
1262: . tao - the Tao context

1264:   Output Parameter:
1265: . X - the current solution

1267:   Level: intermediate

1269:   Note:  The returned vector will be the same object that was passed into TaoSetInitialVector()
1270: @*/
1271: PetscErrorCode TaoGetSolutionVector(Tao tao, Vec *X)
1272: {
1275:   *X = tao->solution;
1276:   return(0);
1277: }

1279: /*@
1280:   TaoGetGradientVector - Returns the vector with the current TAO gradient

1282:   Not Collective

1284:   Input Parameter:
1285: . tao - the Tao context

1287:   Output Parameter:
1288: . G - the current solution

1290:   Level: intermediate
1291: @*/
1292: PetscErrorCode TaoGetGradientVector(Tao tao, Vec *G)
1293: {
1296:   *G = tao->gradient;
1297:   return(0);
1298: }

1300: /*@
1301:    TaoResetStatistics - Initialize the statistics used by TAO for all of the solvers.
1302:    These statistics include the iteration number, residual norms, and convergence status.
1303:    This routine gets called before solving each optimization problem.

1305:    Collective on Tao

1307:    Input Parameters:
1308: .  solver - the Tao context

1310:    Level: developer

1312: .seealso: TaoCreate(), TaoSolve()
1313: @*/
1314: PetscErrorCode TaoResetStatistics(Tao tao)
1315: {
1318:   tao->niter        = 0;
1319:   tao->nfuncs       = 0;
1320:   tao->nfuncgrads   = 0;
1321:   tao->ngrads       = 0;
1322:   tao->nhess        = 0;
1323:   tao->njac         = 0;
1324:   tao->nconstraints = 0;
1325:   tao->ksp_its      = 0;
1326:   tao->ksp_tot_its  = 0;
1327:   tao->reason       = TAO_CONTINUE_ITERATING;
1328:   tao->residual     = 0.0;
1329:   tao->cnorm        = 0.0;
1330:   tao->step         = 0.0;
1331:   tao->lsflag       = PETSC_FALSE;
1332:   if (tao->hist_reset) tao->hist_len=0;
1333:   return(0);
1334: }

1336: /*@C
1337:   TaoSetUpdate - Sets the general-purpose update function called
1338:   at the beginning of every iteration of the nonlinear solve. Specifically
1339:   it is called at the top of every iteration, after the new solution and the gradient
1340:   is determined, but before the Hessian is computed (if applicable).

1342:   Logically Collective on Tao

1344:   Input Parameters:
1345: + tao - The tao solver context
1346: - func - The function

1348:   Calling sequence of func:
1349: $ func (Tao tao, PetscInt step);

1351: . step - The current step of the iteration

1353:   Level: advanced

1355: .seealso TaoSolve()
1356: @*/
1357: PetscErrorCode  TaoSetUpdate(Tao tao, PetscErrorCode (*func)(Tao, PetscInt,void*), void *ctx)
1358: {
1361:   tao->ops->update = func;
1362:   tao->user_update = ctx;
1363:   return(0);
1364: }

1366: /*@C
1367:   TaoSetConvergenceTest - Sets the function that is to be used to test
1368:   for convergence o fthe iterative minimization solution.  The new convergence
1369:   testing routine will replace TAO's default convergence test.

1371:   Logically Collective on Tao

1373:   Input Parameters:
1374: + tao - the Tao object
1375: . conv - the routine to test for convergence
1376: - ctx - [optional] context for private data for the convergence routine
1377:         (may be NULL)

1379:   Calling sequence of conv:
1380: $   PetscErrorCode conv(Tao tao, void *ctx)

1382: + tao - the Tao object
1383: - ctx - [optional] convergence context

1385:   Note: The new convergence testing routine should call TaoSetConvergedReason().

1387:   Level: advanced

1389: .seealso: TaoSetConvergedReason(), TaoGetSolutionStatus(), TaoGetTolerances(), TaoSetMonitor

1391: @*/
1392: PetscErrorCode TaoSetConvergenceTest(Tao tao, PetscErrorCode (*conv)(Tao,void*), void *ctx)
1393: {
1396:   (tao)->ops->convergencetest = conv;
1397:   (tao)->cnvP = ctx;
1398:   return(0);
1399: }

1401: /*@C
1402:    TaoSetMonitor - Sets an ADDITIONAL function that is to be used at every
1403:    iteration of the solver to display the iteration's
1404:    progress.

1406:    Logically Collective on Tao

1408:    Input Parameters:
1409: +  tao - the Tao solver context
1410: .  mymonitor - monitoring routine
1411: -  mctx - [optional] user-defined context for private data for the
1412:           monitor routine (may be NULL)

1414:    Calling sequence of mymonitor:
1415: $     PetscErrorCode mymonitor(Tao tao,void *mctx)

1417: +    tao - the Tao solver context
1418: -    mctx - [optional] monitoring context


1421:    Options Database Keys:
1422: +    -tao_monitor        - sets TaoMonitorDefault()
1423: .    -tao_smonitor       - sets short monitor
1424: .    -tao_cmonitor       - same as smonitor plus constraint norm
1425: .    -tao_view_solution   - view solution at each iteration
1426: .    -tao_view_gradient   - view gradient at each iteration
1427: .    -tao_view_ls_residual - view least-squares residual vector at each iteration
1428: -    -tao_cancelmonitors - cancels all monitors that have been hardwired into a code by calls to TaoSetMonitor(), but does not cancel those set via the options database.


1431:    Notes:
1432:    Several different monitoring routines may be set by calling
1433:    TaoSetMonitor() multiple times; all will be called in the
1434:    order in which they were set.

1436:    Fortran Notes:
1437:     Only one monitor function may be set

1439:    Level: intermediate

1441: .seealso: TaoMonitorDefault(), TaoCancelMonitors(),  TaoSetDestroyRoutine()
1442: @*/
1443: PetscErrorCode TaoSetMonitor(Tao tao, PetscErrorCode (*func)(Tao, void*), void *ctx,PetscErrorCode (*dest)(void**))
1444: {
1446:   PetscInt       i;
1447:   PetscBool      identical;

1451:   if (tao->numbermonitors >= MAXTAOMONITORS) SETERRQ1(PETSC_COMM_SELF,1,"Cannot attach another monitor -- max=",MAXTAOMONITORS);

1453:   for (i=0; i<tao->numbermonitors;i++) {
1454:     PetscMonitorCompare((PetscErrorCode (*)(void))func,ctx,dest,(PetscErrorCode (*)(void))tao->monitor[i],tao->monitorcontext[i],tao->monitordestroy[i],&identical);
1455:     if (identical) return(0);
1456:   }
1457:   tao->monitor[tao->numbermonitors] = func;
1458:   tao->monitorcontext[tao->numbermonitors] = (void*)ctx;
1459:   tao->monitordestroy[tao->numbermonitors] = dest;
1460:   ++tao->numbermonitors;
1461:   return(0);
1462: }

1464: /*@
1465:    TaoCancelMonitors - Clears all the monitor functions for a Tao object.

1467:    Logically Collective on Tao

1469:    Input Parameters:
1470: .  tao - the Tao solver context

1472:    Options Database:
1473: .  -tao_cancelmonitors - cancels all monitors that have been hardwired
1474:     into a code by calls to TaoSetMonitor(), but does not cancel those
1475:     set via the options database

1477:    Notes:
1478:    There is no way to clear one specific monitor from a Tao object.

1480:    Level: advanced

1482: .seealso: TaoMonitorDefault(), TaoSetMonitor()
1483: @*/
1484: PetscErrorCode TaoCancelMonitors(Tao tao)
1485: {
1486:   PetscInt       i;

1491:   for (i=0;i<tao->numbermonitors;i++) {
1492:     if (tao->monitordestroy[i]) {
1493:       (*tao->monitordestroy[i])(&tao->monitorcontext[i]);
1494:     }
1495:   }
1496:   tao->numbermonitors=0;
1497:   return(0);
1498: }

1500: /*@
1501:    TaoMonitorDefault - Default routine for monitoring progress of the
1502:    Tao solvers (default).  This monitor prints the function value and gradient
1503:    norm at each iteration.  It can be turned on from the command line using the
1504:    -tao_monitor option

1506:    Collective on Tao

1508:    Input Parameters:
1509: +  tao - the Tao context
1510: -  ctx - PetscViewer context or NULL

1512:    Options Database Keys:
1513: .  -tao_monitor

1515:    Level: advanced

1517: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1518: @*/
1519: PetscErrorCode TaoMonitorDefault(Tao tao, void *ctx)
1520: {
1522:   PetscInt       its, tabs;
1523:   PetscReal      fct,gnorm;
1524:   PetscViewer    viewer = (PetscViewer)ctx;

1528:   its=tao->niter;
1529:   fct=tao->fc;
1530:   gnorm=tao->residual;
1531:   PetscViewerASCIIGetTab(viewer, &tabs);
1532:   PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel);
1533:   if (its == 0 && ((PetscObject)tao)->prefix && !tao->header_printed) {
1534:      PetscViewerASCIIPrintf(viewer,"  Iteration information for %s solve.\n",((PetscObject)tao)->prefix);
1535:      tao->header_printed = PETSC_TRUE;
1536:    }
1537:   ierr=PetscViewerASCIIPrintf(viewer,"%3D TAO,",its);
1538:   ierr=PetscViewerASCIIPrintf(viewer,"  Function value: %g,",(double)fct);
1539:   if (gnorm >= PETSC_INFINITY) {
1540:     ierr=PetscViewerASCIIPrintf(viewer,"  Residual: Inf \n");
1541:   } else {
1542:     ierr=PetscViewerASCIIPrintf(viewer,"  Residual: %g \n",(double)gnorm);
1543:   }
1544:   PetscViewerASCIISetTab(viewer, tabs);
1545:   return(0);
1546: }

1548: /*@
1549:    TaoDefaultGMonitor - Default routine for monitoring progress of the
1550:    Tao solvers (default) with extra detail on the globalization method.
1551:    This monitor prints the function value and gradient norm at each
1552:    iteration, as well as the step size and trust radius. Note that the
1553:    step size and trust radius may be the same for some algorithms.
1554:    It can be turned on from the command line using the
1555:    -tao_gmonitor option

1557:    Collective on Tao

1559:    Input Parameters:
1560: +  tao - the Tao context
1561: -  ctx - PetscViewer context or NULL

1563:    Options Database Keys:
1564: .  -tao_monitor

1566:    Level: advanced

1568: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1569: @*/
1570: PetscErrorCode TaoDefaultGMonitor(Tao tao, void *ctx)
1571: {
1573:   PetscInt       its, tabs;
1574:   PetscReal      fct,gnorm,stp,tr;
1575:   PetscViewer    viewer = (PetscViewer)ctx;

1579:   its=tao->niter;
1580:   fct=tao->fc;
1581:   gnorm=tao->residual;
1582:   stp=tao->step;
1583:   tr=tao->trust;
1584:   PetscViewerASCIIGetTab(viewer, &tabs);
1585:   PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel);
1586:   if (its == 0 && ((PetscObject)tao)->prefix && !tao->header_printed) {
1587:      PetscViewerASCIIPrintf(viewer,"  Iteration information for %s solve.\n",((PetscObject)tao)->prefix);
1588:      tao->header_printed = PETSC_TRUE;
1589:    }
1590:   ierr=PetscViewerASCIIPrintf(viewer,"%3D TAO,",its);
1591:   ierr=PetscViewerASCIIPrintf(viewer,"  Function value: %g,",(double)fct);
1592:   if (gnorm >= PETSC_INFINITY) {
1593:     ierr=PetscViewerASCIIPrintf(viewer,"  Residual: Inf,");
1594:   } else {
1595:     ierr=PetscViewerASCIIPrintf(viewer,"  Residual: %g,",(double)gnorm);
1596:   }
1597:   PetscViewerASCIIPrintf(viewer,"  Step: %g,  Trust: %g\n",(double)stp,(double)tr);
1598:   PetscViewerASCIISetTab(viewer, tabs);
1599:   return(0);
1600: }

1602: /*@
1603:    TaoDefaultSMonitor - Default routine for monitoring progress of the
1604:    solver. Same as TaoMonitorDefault() except
1605:    it prints fewer digits of the residual as the residual gets smaller.
1606:    This is because the later digits are meaningless and are often
1607:    different on different machines; by using this routine different
1608:    machines will usually generate the same output. It can be turned on
1609:    by using the -tao_smonitor option

1611:    Collective on Tao

1613:    Input Parameters:
1614: +  tao - the Tao context
1615: -  ctx - PetscViewer context of type ASCII

1617:    Options Database Keys:
1618: .  -tao_smonitor

1620:    Level: advanced

1622: .seealso: TaoMonitorDefault(), TaoSetMonitor()
1623: @*/
1624: PetscErrorCode TaoDefaultSMonitor(Tao tao, void *ctx)
1625: {
1627:   PetscInt       its, tabs;
1628:   PetscReal      fct,gnorm;
1629:   PetscViewer    viewer = (PetscViewer)ctx;

1633:   its=tao->niter;
1634:   fct=tao->fc;
1635:   gnorm=tao->residual;
1636:   PetscViewerASCIIGetTab(viewer, &tabs);
1637:   PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel);
1638:   ierr=PetscViewerASCIIPrintf(viewer,"iter = %3D,",its);
1639:   ierr=PetscViewerASCIIPrintf(viewer," Function value %g,",(double)fct);
1640:   if (gnorm >= PETSC_INFINITY) {
1641:     ierr=PetscViewerASCIIPrintf(viewer," Residual: Inf \n");
1642:   } else if (gnorm > 1.e-6) {
1643:     ierr=PetscViewerASCIIPrintf(viewer," Residual: %g \n",(double)gnorm);
1644:   } else if (gnorm > 1.e-11) {
1645:     ierr=PetscViewerASCIIPrintf(viewer," Residual: < 1.0e-6 \n");
1646:   } else {
1647:     ierr=PetscViewerASCIIPrintf(viewer," Residual: < 1.0e-11 \n");
1648:   }
1649:   PetscViewerASCIISetTab(viewer, tabs);
1650:   return(0);
1651: }

1653: /*@
1654:    TaoDefaultCMonitor - same as TaoMonitorDefault() except
1655:    it prints the norm of the constraints function. It can be turned on
1656:    from the command line using the -tao_cmonitor option

1658:    Collective on Tao

1660:    Input Parameters:
1661: +  tao - the Tao context
1662: -  ctx - PetscViewer context or NULL

1664:    Options Database Keys:
1665: .  -tao_cmonitor

1667:    Level: advanced

1669: .seealso: TaoMonitorDefault(), TaoSetMonitor()
1670: @*/
1671: PetscErrorCode TaoDefaultCMonitor(Tao tao, void *ctx)
1672: {
1674:   PetscInt       its, tabs;
1675:   PetscReal      fct,gnorm;
1676:   PetscViewer    viewer = (PetscViewer)ctx;

1680:   its=tao->niter;
1681:   fct=tao->fc;
1682:   gnorm=tao->residual;
1683:   PetscViewerASCIIGetTab(viewer, &tabs);
1684:   PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel);
1685:   ierr=PetscViewerASCIIPrintf(viewer,"iter = %D,",its);
1686:   ierr=PetscViewerASCIIPrintf(viewer," Function value: %g,",(double)fct);
1687:   ierr=PetscViewerASCIIPrintf(viewer,"  Residual: %g ",(double)gnorm);
1688:   PetscViewerASCIIPrintf(viewer,"  Constraint: %g \n",(double)tao->cnorm);
1689:   PetscViewerASCIISetTab(viewer, tabs);
1690:   return(0);
1691: }

1693: /*@C
1694:    TaoSolutionMonitor - Views the solution at each iteration
1695:    It can be turned on from the command line using the
1696:    -tao_view_solution option

1698:    Collective on Tao

1700:    Input Parameters:
1701: +  tao - the Tao context
1702: -  ctx - PetscViewer context or NULL

1704:    Options Database Keys:
1705: .  -tao_view_solution

1707:    Level: advanced

1709: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1710: @*/
1711: PetscErrorCode TaoSolutionMonitor(Tao tao, void *ctx)
1712: {
1714:   PetscViewer    viewer  = (PetscViewer)ctx;

1718:   VecView(tao->solution, viewer);
1719:   return(0);
1720: }

1722: /*@C
1723:    TaoGradientMonitor - Views the gradient at each iteration
1724:    It can be turned on from the command line using the
1725:    -tao_view_gradient option

1727:    Collective on Tao

1729:    Input Parameters:
1730: +  tao - the Tao context
1731: -  ctx - PetscViewer context or NULL

1733:    Options Database Keys:
1734: .  -tao_view_gradient

1736:    Level: advanced

1738: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1739: @*/
1740: PetscErrorCode TaoGradientMonitor(Tao tao, void *ctx)
1741: {
1743:   PetscViewer    viewer = (PetscViewer)ctx;

1747:   VecView(tao->gradient, viewer);
1748:   return(0);
1749: }

1751: /*@C
1752:    TaoStepDirectionMonitor - Views the gradient at each iteration
1753:    It can be turned on from the command line using the
1754:    -tao_view_gradient option

1756:    Collective on Tao

1758:    Input Parameters:
1759: +  tao - the Tao context
1760: -  ctx - PetscViewer context or NULL

1762:    Options Database Keys:
1763: .  -tao_view_gradient

1765:    Level: advanced

1767: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1768: @*/
1769: PetscErrorCode TaoStepDirectionMonitor(Tao tao, void *ctx)
1770: {
1772:   PetscViewer    viewer = (PetscViewer)ctx;

1776:   VecView(tao->stepdirection, viewer);
1777:   return(0);
1778: }

1780: /*@C
1781:    TaoDrawSolutionMonitor - Plots the solution at each iteration
1782:    It can be turned on from the command line using the
1783:    -tao_draw_solution option

1785:    Collective on Tao

1787:    Input Parameters:
1788: +  tao - the Tao context
1789: -  ctx - TaoMonitorDraw context

1791:    Options Database Keys:
1792: .  -tao_draw_solution

1794:    Level: advanced

1796: .seealso: TaoSolutionMonitor(), TaoSetMonitor(), TaoDrawGradientMonitor
1797: @*/
1798: PetscErrorCode TaoDrawSolutionMonitor(Tao tao, void *ctx)
1799: {
1800:   PetscErrorCode    ierr;
1801:   TaoMonitorDrawCtx ictx = (TaoMonitorDrawCtx)ctx;

1804:   if (!(((ictx->howoften > 0) && (!(tao->niter % ictx->howoften))) || ((ictx->howoften == -1) && tao->reason))) return(0);
1805:   VecView(tao->solution,ictx->viewer);
1806:   return(0);
1807: }

1809: /*@C
1810:    TaoDrawGradientMonitor - Plots the gradient at each iteration
1811:    It can be turned on from the command line using the
1812:    -tao_draw_gradient option

1814:    Collective on Tao

1816:    Input Parameters:
1817: +  tao - the Tao context
1818: -  ctx - PetscViewer context

1820:    Options Database Keys:
1821: .  -tao_draw_gradient

1823:    Level: advanced

1825: .seealso: TaoGradientMonitor(), TaoSetMonitor(), TaoDrawSolutionMonitor
1826: @*/
1827: PetscErrorCode TaoDrawGradientMonitor(Tao tao, void *ctx)
1828: {
1829:   PetscErrorCode    ierr;
1830:   TaoMonitorDrawCtx ictx = (TaoMonitorDrawCtx)ctx;

1833:   if (!(((ictx->howoften > 0) && (!(tao->niter % ictx->howoften))) || ((ictx->howoften == -1) && tao->reason))) return(0);
1834:   VecView(tao->gradient,ictx->viewer);
1835:   return(0);
1836: }

1838: /*@C
1839:    TaoDrawStepMonitor - Plots the step direction at each iteration
1840:    It can be turned on from the command line using the
1841:    -tao_draw_step option

1843:    Collective on Tao

1845:    Input Parameters:
1846: +  tao - the Tao context
1847: -  ctx - PetscViewer context

1849:    Options Database Keys:
1850: .  -tao_draw_step

1852:    Level: advanced

1854: .seealso: TaoSetMonitor(), TaoDrawSolutionMonitor
1855: @*/
1856: PetscErrorCode TaoDrawStepMonitor(Tao tao, void *ctx)
1857: {
1859:   PetscViewer    viewer = (PetscViewer)(ctx);

1862:   VecView(tao->stepdirection, viewer);
1863:   return(0);
1864: }

1866: /*@C
1867:    TaoResidualMonitor - Views the least-squares residual at each iteration
1868:    It can be turned on from the command line using the
1869:    -tao_view_ls_residual option

1871:    Collective on Tao

1873:    Input Parameters:
1874: +  tao - the Tao context
1875: -  ctx - PetscViewer context or NULL

1877:    Options Database Keys:
1878: .  -tao_view_ls_residual

1880:    Level: advanced

1882: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1883: @*/
1884: PetscErrorCode TaoResidualMonitor(Tao tao, void *ctx)
1885: {
1887:   PetscViewer    viewer  = (PetscViewer)ctx;

1891:   VecView(tao->ls_res,viewer);
1892:   return(0);
1893: }

1895: /*@
1896:    TaoDefaultConvergenceTest - Determines whether the solver should continue iterating
1897:    or terminate.

1899:    Collective on Tao

1901:    Input Parameters:
1902: +  tao - the Tao context
1903: -  dummy - unused dummy context

1905:    Output Parameter:
1906: .  reason - for terminating

1908:    Notes:
1909:    This routine checks the residual in the optimality conditions, the
1910:    relative residual in the optimity conditions, the number of function
1911:    evaluations, and the function value to test convergence.  Some
1912:    solvers may use different convergence routines.

1914:    Level: developer

1916: .seealso: TaoSetTolerances(),TaoGetConvergedReason(),TaoSetConvergedReason()
1917: @*/

1919: PetscErrorCode TaoDefaultConvergenceTest(Tao tao,void *dummy)
1920: {
1921:   PetscInt           niter=tao->niter, nfuncs=PetscMax(tao->nfuncs,tao->nfuncgrads);
1922:   PetscInt           max_funcs=tao->max_funcs;
1923:   PetscReal          gnorm=tao->residual, gnorm0=tao->gnorm0;
1924:   PetscReal          f=tao->fc, steptol=tao->steptol,trradius=tao->step;
1925:   PetscReal          gatol=tao->gatol,grtol=tao->grtol,gttol=tao->gttol;
1926:   PetscReal          catol=tao->catol,crtol=tao->crtol;
1927:   PetscReal          fmin=tao->fmin, cnorm=tao->cnorm;
1928:   TaoConvergedReason reason=tao->reason;
1929:   PetscErrorCode     ierr;

1933:   if (reason != TAO_CONTINUE_ITERATING) {
1934:     return(0);
1935:   }

1937:   if (PetscIsInfOrNanReal(f)) {
1938:     PetscInfo(tao,"Failed to converged, function value is Inf or NaN\n");
1939:     reason = TAO_DIVERGED_NAN;
1940:   } else if (f <= fmin && cnorm <=catol) {
1941:     PetscInfo2(tao,"Converged due to function value %g < minimum function value %g\n", (double)f,(double)fmin);
1942:     reason = TAO_CONVERGED_MINF;
1943:   } else if (gnorm<= gatol && cnorm <=catol) {
1944:     PetscInfo2(tao,"Converged due to residual norm ||g(X)||=%g < %g\n",(double)gnorm,(double)gatol);
1945:     reason = TAO_CONVERGED_GATOL;
1946:   } else if ( f!=0 && PetscAbsReal(gnorm/f) <= grtol && cnorm <= crtol) {
1947:     PetscInfo2(tao,"Converged due to residual ||g(X)||/|f(X)| =%g < %g\n",(double)(gnorm/f),(double)grtol);
1948:     reason = TAO_CONVERGED_GRTOL;
1949:   } else if (gnorm0 != 0 && ((gttol == 0 && gnorm == 0) || gnorm/gnorm0 < gttol) && cnorm <= crtol) {
1950:     PetscInfo2(tao,"Converged due to relative residual norm ||g(X)||/||g(X0)|| = %g < %g\n",(double)(gnorm/gnorm0),(double)gttol);
1951:     reason = TAO_CONVERGED_GTTOL;
1952:   } else if (nfuncs > max_funcs){
1953:     PetscInfo2(tao,"Exceeded maximum number of function evaluations: %D > %D\n", nfuncs,max_funcs);
1954:     reason = TAO_DIVERGED_MAXFCN;
1955:   } else if ( tao->lsflag != 0 ){
1956:     PetscInfo(tao,"Tao Line Search failure.\n");
1957:     reason = TAO_DIVERGED_LS_FAILURE;
1958:   } else if (trradius < steptol && niter > 0){
1959:     PetscInfo2(tao,"Trust region/step size too small: %g < %g\n", (double)trradius,(double)steptol);
1960:     reason = TAO_CONVERGED_STEPTOL;
1961:   } else if (niter >= tao->max_it) {
1962:     PetscInfo2(tao,"Exceeded maximum number of iterations: %D > %D\n",niter,tao->max_it);
1963:     reason = TAO_DIVERGED_MAXITS;
1964:   } else {
1965:     reason = TAO_CONTINUE_ITERATING;
1966:   }
1967:   tao->reason = reason;
1968:   return(0);
1969: }

1971: /*@C
1972:    TaoSetOptionsPrefix - Sets the prefix used for searching for all
1973:    TAO options in the database.


1976:    Logically Collective on Tao

1978:    Input Parameters:
1979: +  tao - the Tao context
1980: -  prefix - the prefix string to prepend to all TAO option requests

1982:    Notes:
1983:    A hyphen (-) must NOT be given at the beginning of the prefix name.
1984:    The first character of all runtime options is AUTOMATICALLY the hyphen.

1986:    For example, to distinguish between the runtime options for two
1987:    different TAO solvers, one could call
1988: .vb
1989:       TaoSetOptionsPrefix(tao1,"sys1_")
1990:       TaoSetOptionsPrefix(tao2,"sys2_")
1991: .ve

1993:    This would enable use of different options for each system, such as
1994: .vb
1995:       -sys1_tao_method blmvm -sys1_tao_gtol 1.e-3
1996:       -sys2_tao_method lmvm  -sys2_tao_gtol 1.e-4
1997: .ve


2000:    Level: advanced

2002: .seealso: TaoAppendOptionsPrefix(), TaoGetOptionsPrefix()
2003: @*/

2005: PetscErrorCode TaoSetOptionsPrefix(Tao tao, const char p[])
2006: {

2010:   PetscObjectSetOptionsPrefix((PetscObject)tao,p);
2011:   if (tao->linesearch) {
2012:     TaoLineSearchSetOptionsPrefix(tao->linesearch,p);
2013:   }
2014:   if (tao->ksp) {
2015:     KSPSetOptionsPrefix(tao->ksp,p);
2016:   }
2017:   return(0);
2018: }

2020: /*@C
2021:    TaoAppendOptionsPrefix - Appends to the prefix used for searching for all
2022:    TAO options in the database.


2025:    Logically Collective on Tao

2027:    Input Parameters:
2028: +  tao - the Tao solver context
2029: -  prefix - the prefix string to prepend to all TAO option requests

2031:    Notes:
2032:    A hyphen (-) must NOT be given at the beginning of the prefix name.
2033:    The first character of all runtime options is AUTOMATICALLY the hyphen.


2036:    Level: advanced

2038: .seealso: TaoSetOptionsPrefix(), TaoGetOptionsPrefix()
2039: @*/
2040: PetscErrorCode TaoAppendOptionsPrefix(Tao tao, const char p[])
2041: {

2045:   PetscObjectAppendOptionsPrefix((PetscObject)tao,p);
2046:   if (tao->linesearch) {
2047:     TaoLineSearchSetOptionsPrefix(tao->linesearch,p);
2048:   }
2049:   if (tao->ksp) {
2050:     KSPSetOptionsPrefix(tao->ksp,p);
2051:   }
2052:   return(0);
2053: }

2055: /*@C
2056:   TaoGetOptionsPrefix - Gets the prefix used for searching for all
2057:   TAO options in the database

2059:   Not Collective

2061:   Input Parameters:
2062: . tao - the Tao context

2064:   Output Parameters:
2065: . prefix - pointer to the prefix string used is returned

2067:   Notes:
2068:     On the fortran side, the user should pass in a string 'prefix' of
2069:   sufficient length to hold the prefix.

2071:   Level: advanced

2073: .seealso: TaoSetOptionsPrefix(), TaoAppendOptionsPrefix()
2074: @*/
2075: PetscErrorCode TaoGetOptionsPrefix(Tao tao, const char *p[])
2076: {
2077:    return PetscObjectGetOptionsPrefix((PetscObject)tao,p);
2078: }

2080: /*@C
2081:    TaoSetType - Sets the method for the unconstrained minimization solver.

2083:    Collective on Tao

2085:    Input Parameters:
2086: +  solver - the Tao solver context
2087: -  type - a known method

2089:    Options Database Key:
2090: .  -tao_type <type> - Sets the method; use -help for a list
2091:    of available methods (for instance, "-tao_type lmvm" or "-tao_type tron")

2093:    Available methods include:
2094: +    nls - Newton's method with line search for unconstrained minimization
2095: .    ntr - Newton's method with trust region for unconstrained minimization
2096: .    ntl - Newton's method with trust region, line search for unconstrained minimization
2097: .    lmvm - Limited memory variable metric method for unconstrained minimization
2098: .    cg - Nonlinear conjugate gradient method for unconstrained minimization
2099: .    nm - Nelder-Mead algorithm for derivate-free unconstrained minimization
2100: .    tron - Newton Trust Region method for bound constrained minimization
2101: .    gpcg - Newton Trust Region method for quadratic bound constrained minimization
2102: .    blmvm - Limited memory variable metric method for bound constrained minimization
2103: -    pounders - Model-based algorithm pounder extended for nonlinear least squares

2105:   Level: intermediate

2107: .seealso: TaoCreate(), TaoGetType(), TaoType

2109: @*/
2110: PetscErrorCode TaoSetType(Tao tao, TaoType type)
2111: {
2113:   PetscErrorCode (*create_xxx)(Tao);
2114:   PetscBool      issame;


2119:   PetscObjectTypeCompare((PetscObject)tao,type,&issame);
2120:   if (issame) return(0);

2122:   PetscFunctionListFind(TaoList, type, (void(**)(void))&create_xxx);
2123:   if (!create_xxx) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested Tao type %s",type);

2125:   /* Destroy the existing solver information */
2126:   if (tao->ops->destroy) {
2127:     (*tao->ops->destroy)(tao);
2128:   }
2129:   KSPDestroy(&tao->ksp);
2130:   TaoLineSearchDestroy(&tao->linesearch);
2131:   VecDestroy(&tao->gradient);
2132:   VecDestroy(&tao->stepdirection);

2134:   tao->ops->setup = 0;
2135:   tao->ops->solve = 0;
2136:   tao->ops->view  = 0;
2137:   tao->ops->setfromoptions = 0;
2138:   tao->ops->destroy = 0;

2140:   tao->setupcalled = PETSC_FALSE;

2142:   (*create_xxx)(tao);
2143:   PetscObjectChangeTypeName((PetscObject)tao,type);
2144:   return(0);
2145: }

2147: /*MC
2148:    TaoRegister - Adds a method to the TAO package for unconstrained minimization.

2150:    Synopsis:
2151:    TaoRegister(char *name_solver,char *path,char *name_Create,PetscErrorCode (*routine_Create)(Tao))

2153:    Not collective

2155:    Input Parameters:
2156: +  sname - name of a new user-defined solver
2157: -  func - routine to Create method context

2159:    Notes:
2160:    TaoRegister() may be called multiple times to add several user-defined solvers.

2162:    Sample usage:
2163: .vb
2164:    TaoRegister("my_solver",MySolverCreate);
2165: .ve

2167:    Then, your solver can be chosen with the procedural interface via
2168: $     TaoSetType(tao,"my_solver")
2169:    or at runtime via the option
2170: $     -tao_type my_solver

2172:    Level: advanced

2174: .seealso: TaoRegisterAll(), TaoRegisterDestroy()
2175: M*/
2176: PetscErrorCode TaoRegister(const char sname[], PetscErrorCode (*func)(Tao))
2177: {

2181:   TaoInitializePackage();
2182:   PetscFunctionListAdd(&TaoList,sname, (void (*)(void))func);
2183:   return(0);
2184: }

2186: /*@C
2187:    TaoRegisterDestroy - Frees the list of minimization solvers that were
2188:    registered by TaoRegisterDynamic().

2190:    Not Collective

2192:    Level: advanced

2194: .seealso: TaoRegisterAll(), TaoRegister()
2195: @*/
2196: PetscErrorCode TaoRegisterDestroy(void)
2197: {
2200:   PetscFunctionListDestroy(&TaoList);
2201:   TaoRegisterAllCalled = PETSC_FALSE;
2202:   return(0);
2203: }

2205: /*@
2206:    TaoGetIterationNumber - Gets the number of Tao iterations completed
2207:    at this time.

2209:    Not Collective

2211:    Input Parameter:
2212: .  tao - Tao context

2214:    Output Parameter:
2215: .  iter - iteration number

2217:    Notes:
2218:    For example, during the computation of iteration 2 this would return 1.


2221:    Level: intermediate

2223: .seealso:   TaoGetLinearSolveIterations(), TaoGetResidualNorm(), TaoGetObjective()
2224: @*/
2225: PetscErrorCode  TaoGetIterationNumber(Tao tao,PetscInt *iter)
2226: {
2230:   *iter = tao->niter;
2231:   return(0);
2232: }

2234: /*@
2235:    TaoGetObjective - Gets the current value of the objective function
2236:    at this time.

2238:    Not Collective

2240:    Input Parameter:
2241: .  tao - Tao context

2243:    Output Parameter:
2244: .  value - the current value

2246:    Level: intermediate

2248: .seealso:   TaoGetLinearSolveIterations(), TaoGetIterationNumber(), TaoGetResidualNorm()
2249: @*/
2250: PetscErrorCode  TaoGetObjective(Tao tao,PetscReal *value)
2251: {
2255:   *value = tao->fc;
2256:   return(0);
2257: }

2259: /*@
2260:    TaoGetResidualNorm - Gets the current value of the norm of the residual
2261:    at this time.

2263:    Not Collective

2265:    Input Parameter:
2266: .  tao - Tao context

2268:    Output Parameter:
2269: .  value - the current value

2271:    Level: intermediate

2273:    Developer Note: This is the 2-norm of the residual, we cannot use TaoGetGradientNorm() because that has
2274:                    a different meaning. For some reason Tao sometimes calls the gradient the residual.

2276: .seealso:   TaoGetLinearSolveIterations(), TaoGetIterationNumber(), TaoGetObjective()
2277: @*/
2278: PetscErrorCode  TaoGetResidualNorm(Tao tao,PetscReal *value)
2279: {
2283:   *value = tao->residual;
2284:   return(0);
2285: }

2287: /*@
2288:    TaoSetIterationNumber - Sets the current iteration number.

2290:    Not Collective

2292:    Input Parameter:
2293: +  tao - Tao context
2294: -  iter - iteration number

2296:    Level: developer

2298: .seealso:   TaoGetLinearSolveIterations()
2299: @*/
2300: PetscErrorCode  TaoSetIterationNumber(Tao tao,PetscInt iter)
2301: {

2306:   PetscObjectSAWsTakeAccess((PetscObject)tao);
2307:   tao->niter = iter;
2308:   PetscObjectSAWsGrantAccess((PetscObject)tao);
2309:   return(0);
2310: }

2312: /*@
2313:    TaoGetTotalIterationNumber - Gets the total number of Tao iterations
2314:    completed. This number keeps accumulating if multiple solves
2315:    are called with the Tao object.

2317:    Not Collective

2319:    Input Parameter:
2320: .  tao - Tao context

2322:    Output Parameter:
2323: .  iter - iteration number

2325:    Notes:
2326:    The total iteration count is updated after each solve, if there is a current
2327:    TaoSolve() in progress then those iterations are not yet counted.

2329:    Level: intermediate

2331: .seealso:   TaoGetLinearSolveIterations()
2332: @*/
2333: PetscErrorCode  TaoGetTotalIterationNumber(Tao tao,PetscInt *iter)
2334: {
2338:   *iter = tao->ntotalits;
2339:   return(0);
2340: }

2342: /*@
2343:    TaoSetTotalIterationNumber - Sets the current total iteration number.

2345:    Not Collective

2347:    Input Parameter:
2348: +  tao - Tao context
2349: -  iter - iteration number

2351:    Level: developer

2353: .seealso:   TaoGetLinearSolveIterations()
2354: @*/
2355: PetscErrorCode  TaoSetTotalIterationNumber(Tao tao,PetscInt iter)
2356: {

2361:   PetscObjectSAWsTakeAccess((PetscObject)tao);
2362:   tao->ntotalits = iter;
2363:   PetscObjectSAWsGrantAccess((PetscObject)tao);
2364:   return(0);
2365: }

2367: /*@
2368:   TaoSetConvergedReason - Sets the termination flag on a Tao object

2370:   Logically Collective on Tao

2372:   Input Parameters:
2373: + tao - the Tao context
2374: - reason - one of
2375: $     TAO_CONVERGED_ATOL (2),
2376: $     TAO_CONVERGED_RTOL (3),
2377: $     TAO_CONVERGED_STEPTOL (4),
2378: $     TAO_CONVERGED_MINF (5),
2379: $     TAO_CONVERGED_USER (6),
2380: $     TAO_DIVERGED_MAXITS (-2),
2381: $     TAO_DIVERGED_NAN (-4),
2382: $     TAO_DIVERGED_MAXFCN (-5),
2383: $     TAO_DIVERGED_LS_FAILURE (-6),
2384: $     TAO_DIVERGED_TR_REDUCTION (-7),
2385: $     TAO_DIVERGED_USER (-8),
2386: $     TAO_CONTINUE_ITERATING (0)

2388:    Level: intermediate

2390: @*/
2391: PetscErrorCode TaoSetConvergedReason(Tao tao, TaoConvergedReason reason)
2392: {
2395:   tao->reason = reason;
2396:   return(0);
2397: }

2399: /*@
2400:    TaoGetConvergedReason - Gets the reason the Tao iteration was stopped.

2402:    Not Collective

2404:    Input Parameter:
2405: .  tao - the Tao solver context

2407:    Output Parameter:
2408: .  reason - one of
2409: $  TAO_CONVERGED_GATOL (3)           ||g(X)|| < gatol
2410: $  TAO_CONVERGED_GRTOL (4)           ||g(X)|| / f(X)  < grtol
2411: $  TAO_CONVERGED_GTTOL (5)           ||g(X)|| / ||g(X0)|| < gttol
2412: $  TAO_CONVERGED_STEPTOL (6)         step size small
2413: $  TAO_CONVERGED_MINF (7)            F < F_min
2414: $  TAO_CONVERGED_USER (8)            User defined
2415: $  TAO_DIVERGED_MAXITS (-2)          its > maxits
2416: $  TAO_DIVERGED_NAN (-4)             Numerical problems
2417: $  TAO_DIVERGED_MAXFCN (-5)          fevals > max_funcsals
2418: $  TAO_DIVERGED_LS_FAILURE (-6)      line search failure
2419: $  TAO_DIVERGED_TR_REDUCTION (-7)    trust region failure
2420: $  TAO_DIVERGED_USER(-8)             (user defined)
2421:  $  TAO_CONTINUE_ITERATING (0)

2423:    where
2424: +  X - current solution
2425: .  X0 - initial guess
2426: .  f(X) - current function value
2427: .  f(X*) - true solution (estimated)
2428: .  g(X) - current gradient
2429: .  its - current iterate number
2430: .  maxits - maximum number of iterates
2431: .  fevals - number of function evaluations
2432: -  max_funcsals - maximum number of function evaluations

2434:    Level: intermediate

2436: .seealso: TaoSetConvergenceTest(), TaoSetTolerances()

2438: @*/
2439: PetscErrorCode TaoGetConvergedReason(Tao tao, TaoConvergedReason *reason)
2440: {
2444:   *reason = tao->reason;
2445:   return(0);
2446: }

2448: /*@
2449:   TaoGetSolutionStatus - Get the current iterate, objective value,
2450:   residual, infeasibility, and termination

2452:   Not Collective

2454:    Input Parameters:
2455: .  tao - the Tao context

2457:    Output Parameters:
2458: +  iterate - the current iterate number (>=0)
2459: .  f - the current function value
2460: .  gnorm - the square of the gradient norm, duality gap, or other measure indicating distance from optimality.
2461: .  cnorm - the infeasibility of the current solution with regard to the constraints.
2462: .  xdiff - the step length or trust region radius of the most recent iterate.
2463: -  reason - The termination reason, which can equal TAO_CONTINUE_ITERATING

2465:    Level: intermediate

2467:    Note:
2468:    TAO returns the values set by the solvers in the routine TaoMonitor().

2470:    Note:
2471:    If any of the output arguments are set to NULL, no corresponding value will be returned.

2473: .seealso: TaoMonitor(), TaoGetConvergedReason()
2474: @*/
2475: PetscErrorCode TaoGetSolutionStatus(Tao tao, PetscInt *its, PetscReal *f, PetscReal *gnorm, PetscReal *cnorm, PetscReal *xdiff, TaoConvergedReason *reason)
2476: {
2478:   if (its) *its=tao->niter;
2479:   if (f) *f=tao->fc;
2480:   if (gnorm) *gnorm=tao->residual;
2481:   if (cnorm) *cnorm=tao->cnorm;
2482:   if (reason) *reason=tao->reason;
2483:   if (xdiff) *xdiff=tao->step;
2484:   return(0);
2485: }

2487: /*@C
2488:    TaoGetType - Gets the current Tao algorithm.

2490:    Not Collective

2492:    Input Parameter:
2493: .  tao - the Tao solver context

2495:    Output Parameter:
2496: .  type - Tao method

2498:    Level: intermediate

2500: @*/
2501: PetscErrorCode TaoGetType(Tao tao,TaoType *type)
2502: {
2506:   *type=((PetscObject)tao)->type_name;
2507:   return(0);
2508: }

2510: /*@C
2511:   TaoMonitor - Monitor the solver and the current solution.  This
2512:   routine will record the iteration number and residual statistics,
2513:   call any monitors specified by the user, and calls the convergence-check routine.

2515:    Input Parameters:
2516: +  tao - the Tao context
2517: .  its - the current iterate number (>=0)
2518: .  f - the current objective function value
2519: .  res - the gradient norm, square root of the duality gap, or other measure indicating distince from optimality.  This measure will be recorded and
2520:           used for some termination tests.
2521: .  cnorm - the infeasibility of the current solution with regard to the constraints.
2522: -  steplength - multiple of the step direction added to the previous iterate.

2524:    Output Parameters:
2525: .  reason - The termination reason, which can equal TAO_CONTINUE_ITERATING

2527:    Options Database Key:
2528: .  -tao_monitor - Use the default monitor, which prints statistics to standard output

2530: .seealso TaoGetConvergedReason(), TaoMonitorDefault(), TaoSetMonitor()

2532:    Level: developer

2534: @*/
2535: PetscErrorCode TaoMonitor(Tao tao, PetscInt its, PetscReal f, PetscReal res, PetscReal cnorm, PetscReal steplength)
2536: {
2538:   PetscInt       i;

2542:   tao->fc = f;
2543:   tao->residual = res;
2544:   tao->cnorm = cnorm;
2545:   tao->step = steplength;
2546:   if (!its) {
2547:     tao->cnorm0 = cnorm; tao->gnorm0 = res;
2548:   }
2549:   if (PetscIsInfOrNanReal(f) || PetscIsInfOrNanReal(res)) SETERRQ(PETSC_COMM_SELF,1, "User provided compute function generated Inf or NaN");
2550:   for (i=0;i<tao->numbermonitors;i++) {
2551:     (*tao->monitor[i])(tao,tao->monitorcontext[i]);
2552:   }
2553:   return(0);
2554: }

2556: /*@
2557:    TaoSetConvergenceHistory - Sets the array used to hold the convergence history.

2559:    Logically Collective on Tao

2561:    Input Parameters:
2562: +  tao - the Tao solver context
2563: .  obj   - array to hold objective value history
2564: .  resid - array to hold residual history
2565: .  cnorm - array to hold constraint violation history
2566: .  lits - integer array holds the number of linear iterations for each Tao iteration
2567: .  na  - size of obj, resid, and cnorm
2568: -  reset - PetscTrue indicates each new minimization resets the history counter to zero,
2569:            else it continues storing new values for new minimizations after the old ones

2571:    Notes:
2572:    If set, TAO will fill the given arrays with the indicated
2573:    information at each iteration.  If 'obj','resid','cnorm','lits' are
2574:    *all* NULL then space (using size na, or 1000 if na is PETSC_DECIDE or
2575:    PETSC_DEFAULT) is allocated for the history.
2576:    If not all are NULL, then only the non-NULL information categories
2577:    will be stored, the others will be ignored.

2579:    Any convergence information after iteration number 'na' will not be stored.

2581:    This routine is useful, e.g., when running a code for purposes
2582:    of accurate performance monitoring, when no I/O should be done
2583:    during the section of code that is being timed.

2585:    Level: intermediate

2587: .seealso: TaoGetConvergenceHistory()

2589: @*/
2590: PetscErrorCode TaoSetConvergenceHistory(Tao tao, PetscReal obj[], PetscReal resid[], PetscReal cnorm[], PetscInt lits[], PetscInt na,PetscBool reset)
2591: {


2601:   if (na == PETSC_DECIDE || na == PETSC_DEFAULT) na = 1000;
2602:   if (!obj && !resid && !cnorm && !lits) {
2603:     PetscCalloc4(na,&obj,na,&resid,na,&cnorm,na,&lits);
2604:     tao->hist_malloc = PETSC_TRUE;
2605:   }

2607:   tao->hist_obj = obj;
2608:   tao->hist_resid = resid;
2609:   tao->hist_cnorm = cnorm;
2610:   tao->hist_lits = lits;
2611:   tao->hist_max   = na;
2612:   tao->hist_reset = reset;
2613:   tao->hist_len = 0;
2614:   return(0);
2615: }

2617: /*@C
2618:    TaoGetConvergenceHistory - Gets the arrays used to hold the convergence history.

2620:    Collective on Tao

2622:    Input Parameter:
2623: .  tao - the Tao context

2625:    Output Parameters:
2626: +  obj   - array used to hold objective value history
2627: .  resid - array used to hold residual history
2628: .  cnorm - array used to hold constraint violation history
2629: .  lits  - integer array used to hold linear solver iteration count
2630: -  nhist  - size of obj, resid, cnorm, and lits

2632:    Notes:
2633:     This routine must be preceded by calls to TaoSetConvergenceHistory()
2634:     and TaoSolve(), otherwise it returns useless information.

2636:     The calling sequence for this routine in Fortran is
2637: $   call TaoGetConvergenceHistory(Tao tao, PetscInt nhist, PetscErrorCode ierr)

2639:    This routine is useful, e.g., when running a code for purposes
2640:    of accurate performance monitoring, when no I/O should be done
2641:    during the section of code that is being timed.

2643:    Level: advanced

2645: .seealso: TaoSetConvergenceHistory()

2647: @*/
2648: PetscErrorCode TaoGetConvergenceHistory(Tao tao, PetscReal **obj, PetscReal **resid, PetscReal **cnorm, PetscInt **lits, PetscInt *nhist)
2649: {
2652:   if (obj)   *obj   = tao->hist_obj;
2653:   if (cnorm) *cnorm = tao->hist_cnorm;
2654:   if (resid) *resid = tao->hist_resid;
2655:   if (nhist) *nhist = tao->hist_len;
2656:   return(0);
2657: }

2659: /*@
2660:    TaoSetApplicationContext - Sets the optional user-defined context for
2661:    a solver.

2663:    Logically Collective on Tao

2665:    Input Parameters:
2666: +  tao  - the Tao context
2667: -  usrP - optional user context

2669:    Level: intermediate

2671: .seealso: TaoGetApplicationContext(), TaoSetApplicationContext()
2672: @*/
2673: PetscErrorCode  TaoSetApplicationContext(Tao tao,void *usrP)
2674: {
2677:   tao->user = usrP;
2678:   return(0);
2679: }

2681: /*@
2682:    TaoGetApplicationContext - Gets the user-defined context for a
2683:    TAO solvers.

2685:    Not Collective

2687:    Input Parameter:
2688: .  tao  - Tao context

2690:    Output Parameter:
2691: .  usrP - user context

2693:    Level: intermediate

2695: .seealso: TaoSetApplicationContext()
2696: @*/
2697: PetscErrorCode  TaoGetApplicationContext(Tao tao,void *usrP)
2698: {
2701:   *(void**)usrP = tao->user;
2702:   return(0);
2703: }

2705: /*@
2706:    TaoSetGradientNorm - Sets the matrix used to define the inner product that measures the size of the gradient.

2708:    Collective on tao

2710:    Input Parameters:
2711: +  tao  - the Tao context
2712: -  M    - gradient norm

2714:    Level: beginner

2716: .seealso: TaoGetGradientNorm(), TaoGradientNorm()
2717: @*/
2718: PetscErrorCode  TaoSetGradientNorm(Tao tao, Mat M)
2719: {


2725:   if (tao->gradient_norm) {
2726:     PetscObjectDereference((PetscObject)tao->gradient_norm);
2727:     VecDestroy(&tao->gradient_norm_tmp);
2728:   }

2730:   PetscObjectReference((PetscObject)M);
2731:   tao->gradient_norm = M;
2732:   MatCreateVecs(M, NULL, &tao->gradient_norm_tmp);
2733:   return(0);
2734: }

2736: /*@
2737:    TaoGetGradientNorm - Returns the matrix used to define the inner product for measuring the size of the gradient.

2739:    Not Collective

2741:    Input Parameter:
2742: .  tao  - Tao context

2744:    Output Parameter:
2745: .  M - gradient norm

2747:    Level: beginner

2749: .seealso: TaoSetGradientNorm(), TaoGradientNorm()
2750: @*/
2751: PetscErrorCode  TaoGetGradientNorm(Tao tao, Mat *M)
2752: {
2755:   *M = tao->gradient_norm;
2756:   return(0);
2757: }

2759: /*c
2760:    TaoGradientNorm - Compute the norm with respect to the inner product the user has set.

2762:    Collective on tao

2764:    Input Parameter:
2765: .  tao      - the Tao context
2766: .  gradient - the gradient to be computed
2767: .  norm     - the norm type

2769:    Output Parameter:
2770: .  gnorm    - the gradient norm

2772:    Level: developer

2774: .seealso: TaoSetGradientNorm(), TaoGetGradientNorm()
2775: @*/
2776: PetscErrorCode  TaoGradientNorm(Tao tao, Vec gradient, NormType type, PetscReal *gnorm)
2777: {


2783:   if (tao->gradient_norm) {
2784:     PetscScalar gnorms;

2786:     if (type != NORM_2) SETERRQ(PetscObjectComm((PetscObject)gradient), PETSC_ERR_ARG_WRONGSTATE, "Norm type must be NORM_2 if an inner product for the gradient norm is set.");
2787:     MatMult(tao->gradient_norm, gradient, tao->gradient_norm_tmp);
2788:     VecDot(gradient, tao->gradient_norm_tmp, &gnorms);
2789:     *gnorm = PetscRealPart(PetscSqrtScalar(gnorms));
2790:   } else {
2791:     VecNorm(gradient, type, gnorm);
2792:   }
2793:   return(0);
2794: }

2796: /*@C
2797:    TaoMonitorDrawCtxCreate - Creates the monitor context for TaoMonitorDrawCtx

2799:    Collective on Tao

2801:    Output Patameter:
2802: .    ctx - the monitor context

2804:    Options Database:
2805: .   -tao_draw_solution_initial - show initial guess as well as current solution

2807:    Level: intermediate

2809: .seealso: TaoMonitorSet(), TaoMonitorDefault(), VecView(), TaoMonitorDrawCtx()
2810: @*/
2811: PetscErrorCode  TaoMonitorDrawCtxCreate(MPI_Comm comm,const char host[],const char label[],int x,int y,int m,int n,PetscInt howoften,TaoMonitorDrawCtx *ctx)
2812: {
2813:   PetscErrorCode   ierr;

2816:   PetscNew(ctx);
2817:   PetscViewerDrawOpen(comm,host,label,x,y,m,n,&(*ctx)->viewer);
2818:   PetscViewerSetFromOptions((*ctx)->viewer);
2819:   (*ctx)->howoften = howoften;
2820:   return(0);
2821: }

2823: /*@C
2824:    TaoMonitorDrawCtxDestroy - Destroys the monitor context for TaoMonitorDrawSolution()

2826:    Collective on Tao

2828:    Input Parameters:
2829: .    ctx - the monitor context

2831:    Level: intermediate

2833: .seealso: TaoMonitorSet(), TaoMonitorDefault(), VecView(), TaoMonitorDrawSolution()
2834: @*/
2835: PetscErrorCode  TaoMonitorDrawCtxDestroy(TaoMonitorDrawCtx *ictx)
2836: {

2840:   PetscViewerDestroy(&(*ictx)->viewer);
2841:   PetscFree(*ictx);
2842:   return(0);
2843: }