Actual source code: taosolver.c

petsc-3.7.3 2016-08-01
Report Typos and Errors
  1: #define TAO_DLL

  3: #include <petsc/private/taoimpl.h> /*I "petsctao.h" I*/

  5: PetscBool TaoRegisterAllCalled = PETSC_FALSE;
  6: PetscFunctionList TaoList = NULL;

  8: PetscClassId TAO_CLASSID;
  9: PetscLogEvent Tao_Solve, Tao_ObjectiveEval, Tao_GradientEval, Tao_ObjGradientEval, Tao_HessianEval, Tao_ConstraintsEval, Tao_JacobianEval;

 11: const char *TaoSubSetTypes[] = {  "subvec","mask","matrixfree","TaoSubSetType","TAO_SUBSET_",0};

 15: /*@
 16:   TaoCreate - Creates a TAO solver

 18:   Collective on MPI_Comm

 20:   Input Parameter:
 21: . comm - MPI communicator

 23:   Output Parameter:
 24: . newtao - the new Tao context

 26:   Available methods include:
 27: +    nls - Newton's method with line search for unconstrained minimization
 28: .    ntr - Newton's method with trust region for unconstrained minimization
 29: .    ntl - Newton's method with trust region, line search for unconstrained minimization
 30: .    lmvm - Limited memory variable metric method for unconstrained minimization
 31: .    cg - Nonlinear conjugate gradient method for unconstrained minimization
 32: .    nm - Nelder-Mead algorithm for derivate-free unconstrained minimization
 33: .    tron - Newton Trust Region method for bound constrained minimization
 34: .    gpcg - Newton Trust Region method for quadratic bound constrained minimization
 35: .    blmvm - Limited memory variable metric method for bound constrained minimization
 36: .    lcl - Linearly constrained Lagrangian method for pde-constrained minimization
 37: -    pounders - Model-based algorithm for nonlinear least squares

 39:    Options Database Keys:
 40: .   -tao_type - select which method TAO should use

 42:    Level: beginner

 44: .seealso: TaoSolve(), TaoDestroy()
 45: @*/
 46: PetscErrorCode TaoCreate(MPI_Comm comm, Tao *newtao)
 47: {
 49:   Tao            tao;

 53:   *newtao = NULL;

 55:   TaoInitializePackage();
 56:   TaoLineSearchInitializePackage();

 58:   PetscHeaderCreate(tao,TAO_CLASSID,"Tao","Optimization solver","Tao",comm,TaoDestroy,TaoView);
 59:   tao->ops->computeobjective=0;
 60:   tao->ops->computeobjectiveandgradient=0;
 61:   tao->ops->computegradient=0;
 62:   tao->ops->computehessian=0;
 63:   tao->ops->computeseparableobjective=0;
 64:   tao->ops->computeconstraints=0;
 65:   tao->ops->computejacobian=0;
 66:   tao->ops->computejacobianequality=0;
 67:   tao->ops->computejacobianinequality=0;
 68:   tao->ops->computeequalityconstraints=0;
 69:   tao->ops->computeinequalityconstraints=0;
 70:   tao->ops->convergencetest=TaoDefaultConvergenceTest;
 71:   tao->ops->convergencedestroy=0;
 72:   tao->ops->computedual=0;
 73:   tao->ops->setup=0;
 74:   tao->ops->solve=0;
 75:   tao->ops->view=0;
 76:   tao->ops->setfromoptions=0;
 77:   tao->ops->destroy=0;

 79:   tao->solution=NULL;
 80:   tao->gradient=NULL;
 81:   tao->sep_objective = NULL;
 82:   tao->constraints=NULL;
 83:   tao->constraints_equality=NULL;
 84:   tao->constraints_inequality=NULL;
 85:   tao->sep_weights_v=NULL;
 86:   tao->sep_weights_w=NULL;
 87:   tao->stepdirection=NULL;
 88:   tao->niter=0;
 89:   tao->ntotalits=0;
 90:   tao->XL = NULL;
 91:   tao->XU = NULL;
 92:   tao->IL = NULL;
 93:   tao->IU = NULL;
 94:   tao->DI = NULL;
 95:   tao->DE = NULL;
 96:   tao->gradient_norm = NULL;
 97:   tao->gradient_norm_tmp = NULL;
 98:   tao->hessian = NULL;
 99:   tao->hessian_pre = NULL;
100:   tao->jacobian = NULL;
101:   tao->jacobian_pre = NULL;
102:   tao->jacobian_state = NULL;
103:   tao->jacobian_state_pre = NULL;
104:   tao->jacobian_state_inv = NULL;
105:   tao->jacobian_design = NULL;
106:   tao->jacobian_design_pre = NULL;
107:   tao->jacobian_equality = NULL;
108:   tao->jacobian_equality_pre = NULL;
109:   tao->jacobian_inequality = NULL;
110:   tao->jacobian_inequality_pre = NULL;
111:   tao->state_is = NULL;
112:   tao->design_is = NULL;

114:   tao->max_it     = 10000;
115:   tao->max_funcs   = 10000;
116: #if defined(PETSC_USE_REAL_SINGLE)
117:   tao->gatol       = 1e-5;
118:   tao->grtol       = 1e-5;
119: #else
120:   tao->gatol       = 1e-8;
121:   tao->grtol       = 1e-8;
122: #endif
123:   tao->crtol       = 0.0;
124:   tao->catol       = 0.0;
125:   tao->steptol     = 0.0;
126:   tao->gttol       = 0.0;
127:   tao->trust0      = PETSC_INFINITY;
128:   tao->fmin        = PETSC_NINFINITY;
129:   tao->hist_malloc = PETSC_FALSE;
130:   tao->hist_reset = PETSC_TRUE;
131:   tao->hist_max = 0;
132:   tao->hist_len = 0;
133:   tao->hist_obj = NULL;
134:   tao->hist_resid = NULL;
135:   tao->hist_cnorm = NULL;
136:   tao->hist_lits = NULL;

138:   tao->numbermonitors=0;
139:   tao->viewsolution=PETSC_FALSE;
140:   tao->viewhessian=PETSC_FALSE;
141:   tao->viewgradient=PETSC_FALSE;
142:   tao->viewjacobian=PETSC_FALSE;
143:   tao->viewconstraints = PETSC_FALSE;

145:   /* These flags prevents algorithms from overriding user options */
146:   tao->max_it_changed   =PETSC_FALSE;
147:   tao->max_funcs_changed=PETSC_FALSE;
148:   tao->gatol_changed    =PETSC_FALSE;
149:   tao->grtol_changed    =PETSC_FALSE;
150:   tao->gttol_changed    =PETSC_FALSE;
151:   tao->steptol_changed  =PETSC_FALSE;
152:   tao->trust0_changed   =PETSC_FALSE;
153:   tao->fmin_changed     =PETSC_FALSE;
154:   tao->catol_changed    =PETSC_FALSE;
155:   tao->crtol_changed    =PETSC_FALSE;
156:   TaoResetStatistics(tao);
157:   *newtao = tao;
158:   return(0);
159: }

163: /*@
164:   TaoSolve - Solves an optimization problem min F(x) s.t. l <= x <= u

166:   Collective on Tao

168:   Input Parameters:
169: . tao - the Tao context

171:   Notes:
172:   The user must set up the Tao with calls to TaoSetInitialVector(),
173:   TaoSetObjectiveRoutine(),
174:   TaoSetGradientRoutine(), and (if using 2nd order method) TaoSetHessianRoutine().

176:   Level: beginner

178: .seealso: TaoCreate(), TaoSetObjectiveRoutine(), TaoSetGradientRoutine(), TaoSetHessianRoutine()
179:  @*/
180: PetscErrorCode TaoSolve(Tao tao)
181: {
182:   PetscErrorCode   ierr;
183:   static PetscBool set = PETSC_FALSE;

187:   PetscCitationsRegister("@TechReport{tao-user-ref,\n"
188:                                 "title   = {Toolkit for Advanced Optimization (TAO) Users Manual},\n"
189:                                 "author  = {Todd Munson and Jason Sarich and Stefan Wild and Steve Benson and Lois Curfman McInnes},\n"
190:                                 "Institution = {Argonne National Laboratory},\n"
191:                                 "Year   = 2014,\n"
192:                                 "Number = {ANL/MCS-TM-322 - Revision 3.5},\n"
193:                                 "url    = {http://www.mcs.anl.gov/tao}\n}\n",&set);

195:   TaoSetUp(tao);
196:   TaoResetStatistics(tao);
197:   if (tao->linesearch) {
198:     TaoLineSearchReset(tao->linesearch);
199:   }

201:   PetscLogEventBegin(Tao_Solve,tao,0,0,0);
202:   if (tao->ops->solve){ (*tao->ops->solve)(tao); }
203:   PetscLogEventEnd(Tao_Solve,tao,0,0,0);

205:   tao->ntotalits += tao->niter;
206:   TaoViewFromOptions(tao,NULL,"-tao_view");

208:   if (tao->printreason) {
209:     if (tao->reason > 0) {
210:       PetscPrintf(((PetscObject)tao)->comm,"TAO solve converged due to %s iterations %D\n",TaoConvergedReasons[tao->reason],tao->niter);
211:     } else {
212:       PetscPrintf(((PetscObject)tao)->comm,"TAO solve did not converge due to %s iteration %D\n",TaoConvergedReasons[tao->reason],tao->niter);
213:     }
214:   }
215:   return(0);
216: }

220: /*@
221:   TaoSetUp - Sets up the internal data structures for the later use
222:   of a Tao solver

224:   Collective on tao

226:   Input Parameters:
227: . tao - the TAO context

229:   Notes:
230:   The user will not need to explicitly call TaoSetUp(), as it will
231:   automatically be called in TaoSolve().  However, if the user
232:   desires to call it explicitly, it should come after TaoCreate()
233:   and any TaoSetSomething() routines, but before TaoSolve().

235:   Level: advanced

237: .seealso: TaoCreate(), TaoSolve()
238: @*/
239: PetscErrorCode TaoSetUp(Tao tao)
240: {

245:   if (tao->setupcalled) return(0);

247:   if (!tao->solution) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Must call TaoSetInitialVector");
248:   if (tao->ops->setup) {
249:     (*tao->ops->setup)(tao);
250:   }
251:   tao->setupcalled = PETSC_TRUE;
252:   return(0);
253: }

257: /*@
258:   TaoDestroy - Destroys the TAO context that was created with
259:   TaoCreate()

261:   Collective on Tao

263:   Input Parameter:
264: . tao - the Tao context

266:   Level: beginner

268: .seealso: TaoCreate(), TaoSolve()
269: @*/
270: PetscErrorCode TaoDestroy(Tao *tao)
271: {

275:   if (!*tao) return(0);
277:   if (--((PetscObject)*tao)->refct > 0) {*tao=0;return(0);}

279:   if ((*tao)->ops->destroy) {
280:     (*((*tao))->ops->destroy)(*tao);
281:   }
282:   KSPDestroy(&(*tao)->ksp);
283:   TaoLineSearchDestroy(&(*tao)->linesearch);

285:   if ((*tao)->ops->convergencedestroy) {
286:     (*(*tao)->ops->convergencedestroy)((*tao)->cnvP);
287:     if ((*tao)->jacobian_state_inv) {
288:       MatDestroy(&(*tao)->jacobian_state_inv);
289:     }
290:   }
291:   VecDestroy(&(*tao)->solution);
292:   VecDestroy(&(*tao)->gradient);

294:   if ((*tao)->gradient_norm) {
295:     PetscObjectDereference((PetscObject)(*tao)->gradient_norm);
296:     VecDestroy(&(*tao)->gradient_norm_tmp);
297:   }

299:   VecDestroy(&(*tao)->XL);
300:   VecDestroy(&(*tao)->XU);
301:   VecDestroy(&(*tao)->IL);
302:   VecDestroy(&(*tao)->IU);
303:   VecDestroy(&(*tao)->DE);
304:   VecDestroy(&(*tao)->DI);
305:   VecDestroy(&(*tao)->constraints_equality);
306:   VecDestroy(&(*tao)->constraints_inequality);
307:   VecDestroy(&(*tao)->stepdirection);
308:   MatDestroy(&(*tao)->hessian_pre);
309:   MatDestroy(&(*tao)->hessian);
310:   MatDestroy(&(*tao)->jacobian_pre);
311:   MatDestroy(&(*tao)->jacobian);
312:   MatDestroy(&(*tao)->jacobian_state_pre);
313:   MatDestroy(&(*tao)->jacobian_state);
314:   MatDestroy(&(*tao)->jacobian_state_inv);
315:   MatDestroy(&(*tao)->jacobian_design);
316:   MatDestroy(&(*tao)->jacobian_equality);
317:   MatDestroy(&(*tao)->jacobian_equality_pre);
318:   MatDestroy(&(*tao)->jacobian_inequality);
319:   MatDestroy(&(*tao)->jacobian_inequality_pre);
320:   ISDestroy(&(*tao)->state_is);
321:   ISDestroy(&(*tao)->design_is);
322:   VecDestroy(&(*tao)->sep_weights_v);
323:   TaoCancelMonitors(*tao);
324:   if ((*tao)->hist_malloc) {
325:     PetscFree((*tao)->hist_obj);
326:     PetscFree((*tao)->hist_resid);
327:     PetscFree((*tao)->hist_cnorm);
328:     PetscFree((*tao)->hist_lits);
329:   }
330:   if ((*tao)->sep_weights_n) {
331:     PetscFree((*tao)->sep_weights_rows);
332:     PetscFree((*tao)->sep_weights_cols);
333:     PetscFree((*tao)->sep_weights_w);
334:   }
335:   PetscHeaderDestroy(tao);
336:   return(0);
337: }

341: /*@
342:   TaoSetFromOptions - Sets various Tao parameters from user
343:   options.

345:   Collective on Tao

347:   Input Paremeter:
348: . tao - the Tao solver context

350:   options Database Keys:
351: + -tao_type <type> - The algorithm that TAO uses (lmvm, nls, etc.)
352: . -tao_gatol <gatol> - absolute error tolerance for ||gradient||
353: . -tao_grtol <grtol> - relative error tolerance for ||gradient||
354: . -tao_gttol <gttol> - reduction of ||gradient|| relative to initial gradient
355: . -tao_max_it <max> - sets maximum number of iterations
356: . -tao_max_funcs <max> - sets maximum number of function evaluations
357: . -tao_fmin <fmin> - stop if function value reaches fmin
358: . -tao_steptol <tol> - stop if trust region radius less than <tol>
359: . -tao_trust0 <t> - initial trust region radius
360: . -tao_monitor - prints function value and residual at each iteration
361: . -tao_smonitor - same as tao_monitor, but truncates very small values
362: . -tao_cmonitor - prints function value, residual, and constraint norm at each iteration
363: . -tao_view_solution - prints solution vector at each iteration
364: . -tao_view_separableobjective - prints separable objective vector at each iteration
365: . -tao_view_step - prints step direction vector at each iteration
366: . -tao_view_gradient - prints gradient vector at each iteration
367: . -tao_draw_solution - graphically view solution vector at each iteration
368: . -tao_draw_step - graphically view step vector at each iteration
369: . -tao_draw_gradient - graphically view gradient at each iteration
370: . -tao_fd_gradient - use gradient computed with finite differences
371: . -tao_cancelmonitors - cancels all monitors (except those set with command line)
372: . -tao_view - prints information about the Tao after solving
373: - -tao_converged_reason - prints the reason TAO stopped iterating

375:   Notes:
376:   To see all options, run your program with the -help option or consult the
377:   user's manual. Should be called after TaoCreate() but before TaoSolve()

379:   Level: beginner
380: @*/
381: PetscErrorCode TaoSetFromOptions(Tao tao)
382: {
384:   const TaoType  default_type = TAOLMVM;
385:   char           type[256], monfilename[PETSC_MAX_PATH_LEN];
386:   PetscViewer    monviewer;
387:   PetscBool      flg;
388:   MPI_Comm       comm;

392:   PetscObjectGetComm((PetscObject)tao,&comm);

394:   /* So no warnings are given about unused options */
395:   PetscOptionsHasName(((PetscObject)tao)->options,((PetscObject)tao)->prefix,"-tao_ls_type",&flg);

397:   PetscObjectOptionsBegin((PetscObject)tao);
398:   {
399:     TaoRegisterAll();
400:     if (((PetscObject)tao)->type_name) {
401:       default_type = ((PetscObject)tao)->type_name;
402:     }
403:     /* Check for type from options */
404:     PetscOptionsFList("-tao_type","Tao Solver type","TaoSetType",TaoList,default_type,type,256,&flg);
405:     if (flg) {
406:       TaoSetType(tao,type);
407:     } else if (!((PetscObject)tao)->type_name) {
408:       TaoSetType(tao,default_type);
409:     }

411:     PetscOptionsReal("-tao_catol","Stop if constraints violations within","TaoSetConstraintTolerances",tao->catol,&tao->catol,&flg);
412:     if (flg) tao->catol_changed=PETSC_TRUE;
413:     PetscOptionsReal("-tao_crtol","Stop if relative contraint violations within","TaoSetConstraintTolerances",tao->crtol,&tao->crtol,&flg);
414:     if (flg) tao->crtol_changed=PETSC_TRUE;
415:     PetscOptionsReal("-tao_gatol","Stop if norm of gradient less than","TaoSetTolerances",tao->gatol,&tao->gatol,&flg);
416:     if (flg) tao->gatol_changed=PETSC_TRUE;
417:     PetscOptionsReal("-tao_grtol","Stop if norm of gradient divided by the function value is less than","TaoSetTolerances",tao->grtol,&tao->grtol,&flg);
418:     if (flg) tao->grtol_changed=PETSC_TRUE;
419:     PetscOptionsReal("-tao_gttol","Stop if the norm of the gradient is less than the norm of the initial gradient times tol","TaoSetTolerances",tao->gttol,&tao->gttol,&flg);
420:     if (flg) tao->gttol_changed=PETSC_TRUE;
421:     PetscOptionsInt("-tao_max_it","Stop if iteration number exceeds","TaoSetMaximumIterations",tao->max_it,&tao->max_it,&flg);
422:     if (flg) tao->max_it_changed=PETSC_TRUE;
423:     PetscOptionsInt("-tao_max_funcs","Stop if number of function evaluations exceeds","TaoSetMaximumFunctionEvaluations",tao->max_funcs,&tao->max_funcs,&flg);
424:     if (flg) tao->max_funcs_changed=PETSC_TRUE;
425:     PetscOptionsReal("-tao_fmin","Stop if function less than","TaoSetFunctionLowerBound",tao->fmin,&tao->fmin,&flg);
426:     if (flg) tao->fmin_changed=PETSC_TRUE;
427:     PetscOptionsReal("-tao_steptol","Stop if step size or trust region radius less than","",tao->steptol,&tao->steptol,&flg);
428:     if (flg) tao->steptol_changed=PETSC_TRUE;
429:     PetscOptionsReal("-tao_trust0","Initial trust region radius","TaoSetTrustRegionRadius",tao->trust0,&tao->trust0,&flg);
430:     if (flg) tao->trust0_changed=PETSC_TRUE;
431:     PetscOptionsString("-tao_view_solution","view solution vector after each evaluation","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
432:     if (flg) {
433:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
434:       TaoSetMonitor(tao,TaoSolutionMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
435:     }

437:     PetscOptionsBool("-tao_converged_reason","Print reason for TAO converged","TaoSolve",tao->printreason,&tao->printreason,NULL);
438:     PetscOptionsString("-tao_view_gradient","view gradient vector after each evaluation","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
439:     if (flg) {
440:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
441:       TaoSetMonitor(tao,TaoGradientMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
442:     }

444:     PetscOptionsString("-tao_view_stepdirection","view step direction vector after each iteration","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
445:     if (flg) {
446:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
447:       TaoSetMonitor(tao,TaoStepDirectionMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
448:     }

450:     PetscOptionsString("-tao_view_separableobjective","view separable objective vector after each evaluation","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
451:     if (flg) {
452:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
453:       TaoSetMonitor(tao,TaoSeparableObjectiveMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
454:     }

456:     PetscOptionsString("-tao_monitor","Use the default convergence monitor","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
457:     if (flg) {
458:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
459:       TaoSetMonitor(tao,TaoDefaultMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
460:     }

462:     PetscOptionsString("-tao_smonitor","Use the short convergence monitor","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
463:     if (flg) {
464:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
465:       TaoSetMonitor(tao,TaoDefaultSMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
466:     }

468:     PetscOptionsString("-tao_cmonitor","Use the default convergence monitor with constraint norm","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
469:     if (flg) {
470:       PetscViewerASCIIOpen(comm,monfilename,&monviewer);
471:       TaoSetMonitor(tao,TaoDefaultCMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
472:     }


475:     flg = PETSC_FALSE;
476:     PetscOptionsBool("-tao_cancelmonitors","cancel all monitors and call any registered destroy routines","TaoCancelMonitors",flg,&flg,NULL);
477:     if (flg) {TaoCancelMonitors(tao);}

479:     flg = PETSC_FALSE;
480:     PetscOptionsBool("-tao_draw_solution","Plot solution vector at each iteration","TaoSetMonitor",flg,&flg,NULL);
481:     if (flg) {
482:       TaoSetMonitor(tao,TaoDrawSolutionMonitor,NULL,NULL);
483:     }

485:     flg = PETSC_FALSE;
486:     PetscOptionsBool("-tao_draw_step","plots step direction at each iteration","TaoSetMonitor",flg,&flg,NULL);
487:     if (flg) {
488:       TaoSetMonitor(tao,TaoDrawStepMonitor,NULL,NULL);
489:     }

491:     flg = PETSC_FALSE;
492:     PetscOptionsBool("-tao_draw_gradient","plots gradient at each iteration","TaoSetMonitor",flg,&flg,NULL);
493:     if (flg) {
494:       TaoSetMonitor(tao,TaoDrawGradientMonitor,NULL,NULL);
495:     }
496:     flg = PETSC_FALSE;
497:     PetscOptionsBool("-tao_fd_gradient","compute gradient using finite differences","TaoDefaultComputeGradient",flg,&flg,NULL);
498:     if (flg) {
499:       TaoSetGradientRoutine(tao,TaoDefaultComputeGradient,NULL);
500:     }
501:     PetscOptionsEnum("-tao_subset_type","subset type", "", TaoSubSetTypes,(PetscEnum)tao->subset_type, (PetscEnum*)&tao->subset_type, 0);

503:     if (tao->ops->setfromoptions) {
504:       (*tao->ops->setfromoptions)(PetscOptionsObject,tao);
505:     }
506:   }
507:   PetscOptionsEnd();
508:   return(0);
509: }

513: /*@C
514:   TaoView - Prints information about the Tao

516:   Collective on Tao

518:   InputParameters:
519: + tao - the Tao context
520: - viewer - visualization context

522:   Options Database Key:
523: . -tao_view - Calls TaoView() at the end of TaoSolve()

525:   Notes:
526:   The available visualization contexts include
527: +     PETSC_VIEWER_STDOUT_SELF - standard output (default)
528: -     PETSC_VIEWER_STDOUT_WORLD - synchronized standard
529:          output where only the first processor opens
530:          the file.  All other processors send their
531:          data to the first processor to print.

533:   Level: beginner

535: .seealso: PetscViewerASCIIOpen()
536: @*/
537: PetscErrorCode TaoView(Tao tao, PetscViewer viewer)
538: {
539:   PetscErrorCode      ierr;
540:   PetscBool           isascii,isstring;
541:   const TaoType type;

545:   if (!viewer) {
546:     PetscViewerASCIIGetStdout(((PetscObject)tao)->comm,&viewer);
547:   }

551:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
552:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
553:   if (isascii) {
554:     PetscObjectPrintClassNamePrefixType((PetscObject)tao,viewer);
555:     PetscViewerASCIIPushTab(viewer);

557:     if (tao->ops->view) {
558:       PetscViewerASCIIPushTab(viewer);
559:       (*tao->ops->view)(tao,viewer);
560:       PetscViewerASCIIPopTab(viewer);
561:     }
562:     if (tao->linesearch) {
563:       PetscObjectPrintClassNamePrefixType((PetscObject)(tao->linesearch),viewer);
564:     }
565:     if (tao->ksp) {
566:       PetscObjectPrintClassNamePrefixType((PetscObject)(tao->ksp),viewer);
567:       PetscViewerASCIIPrintf(viewer,"total KSP iterations: %D\n",tao->ksp_tot_its);
568:     }
569:     if (tao->XL || tao->XU) {
570:       PetscViewerASCIIPrintf(viewer,"Active Set subset type: %s\n",TaoSubSetTypes[tao->subset_type]);
571:     }

573:     ierr=PetscViewerASCIIPrintf(viewer,"convergence tolerances: gatol=%g,",(double)tao->gatol);
574:     ierr=PetscViewerASCIIPrintf(viewer," steptol=%g,",(double)tao->steptol);
575:     ierr=PetscViewerASCIIPrintf(viewer," gttol=%g\n",(double)tao->gttol);

577:     PetscViewerASCIIPrintf(viewer,"Residual in Function/Gradient:=%g\n",(double)tao->residual);

579:     if (tao->cnorm>0 || tao->catol>0 || tao->crtol>0){
580:       ierr=PetscViewerASCIIPrintf(viewer,"convergence tolerances:");
581:       ierr=PetscViewerASCIIPrintf(viewer," catol=%g,",(double)tao->catol);
582:       ierr=PetscViewerASCIIPrintf(viewer," crtol=%g\n",(double)tao->crtol);
583:       PetscViewerASCIIPrintf(viewer,"Residual in Constraints:=%g\n",(double)tao->cnorm);
584:     }

586:     if (tao->trust < tao->steptol){
587:       ierr=PetscViewerASCIIPrintf(viewer,"convergence tolerances: steptol=%g\n",(double)tao->steptol);
588:       ierr=PetscViewerASCIIPrintf(viewer,"Final trust region radius:=%g\n",(double)tao->trust);
589:     }

591:     if (tao->fmin>-1.e25){
592:       ierr=PetscViewerASCIIPrintf(viewer,"convergence tolerances: function minimum=%g\n",(double)tao->fmin);
593:     }
594:     PetscViewerASCIIPrintf(viewer,"Objective value=%g\n",(double)tao->fc);

596:     PetscViewerASCIIPrintf(viewer,"total number of iterations=%D,          ",tao->niter);
597:     PetscViewerASCIIPrintf(viewer,"              (max: %D)\n",tao->max_it);

599:     if (tao->nfuncs>0){
600:       PetscViewerASCIIPrintf(viewer,"total number of function evaluations=%D,",tao->nfuncs);
601:       PetscViewerASCIIPrintf(viewer,"                max: %D\n",tao->max_funcs);
602:     }
603:     if (tao->ngrads>0){
604:       PetscViewerASCIIPrintf(viewer,"total number of gradient evaluations=%D,",tao->ngrads);
605:       PetscViewerASCIIPrintf(viewer,"                max: %D\n",tao->max_funcs);
606:     }
607:     if (tao->nfuncgrads>0){
608:       PetscViewerASCIIPrintf(viewer,"total number of function/gradient evaluations=%D,",tao->nfuncgrads);
609:       PetscViewerASCIIPrintf(viewer,"    (max: %D)\n",tao->max_funcs);
610:     }
611:     if (tao->nhess>0){
612:       PetscViewerASCIIPrintf(viewer,"total number of Hessian evaluations=%D\n",tao->nhess);
613:     }
614:     /*  if (tao->linear_its>0){
615:      PetscViewerASCIIPrintf(viewer,"  total Krylov method iterations=%D\n",tao->linear_its);
616:      }*/
617:     if (tao->nconstraints>0){
618:       PetscViewerASCIIPrintf(viewer,"total number of constraint function evaluations=%D\n",tao->nconstraints);
619:     }
620:     if (tao->njac>0){
621:       PetscViewerASCIIPrintf(viewer,"total number of Jacobian evaluations=%D\n",tao->njac);
622:     }

624:     if (tao->reason>0){
625:       PetscViewerASCIIPrintf(viewer,    "Solution converged: ");
626:       switch (tao->reason) {
627:       case TAO_CONVERGED_GATOL:
628:         PetscViewerASCIIPrintf(viewer," ||g(X)|| <= gatol\n");
629:         break;
630:       case TAO_CONVERGED_GRTOL:
631:         PetscViewerASCIIPrintf(viewer," ||g(X)||/|f(X)| <= grtol\n");
632:         break;
633:       case TAO_CONVERGED_GTTOL:
634:         PetscViewerASCIIPrintf(viewer," ||g(X)||/||g(X0)|| <= gttol\n");
635:         break;
636:       case TAO_CONVERGED_STEPTOL:
637:         PetscViewerASCIIPrintf(viewer," Steptol -- step size small\n");
638:         break;
639:       case TAO_CONVERGED_MINF:
640:         PetscViewerASCIIPrintf(viewer," Minf --  f < fmin\n");
641:         break;
642:       case TAO_CONVERGED_USER:
643:         PetscViewerASCIIPrintf(viewer," User Terminated\n");
644:         break;
645:       default:
646:         PetscViewerASCIIPrintf(viewer,"\n");
647:         break;
648:       }

650:     } else {
651:       PetscViewerASCIIPrintf(viewer,"Solver terminated: %d",tao->reason);
652:       switch (tao->reason) {
653:       case TAO_DIVERGED_MAXITS:
654:         PetscViewerASCIIPrintf(viewer," Maximum Iterations\n");
655:         break;
656:       case TAO_DIVERGED_NAN:
657:         PetscViewerASCIIPrintf(viewer," NAN or Inf encountered\n");
658:         break;
659:       case TAO_DIVERGED_MAXFCN:
660:         PetscViewerASCIIPrintf(viewer," Maximum Function Evaluations\n");
661:         break;
662:       case TAO_DIVERGED_LS_FAILURE:
663:         PetscViewerASCIIPrintf(viewer," Line Search Failure\n");
664:         break;
665:       case TAO_DIVERGED_TR_REDUCTION:
666:         PetscViewerASCIIPrintf(viewer," Trust Region too small\n");
667:         break;
668:       case TAO_DIVERGED_USER:
669:         PetscViewerASCIIPrintf(viewer," User Terminated\n");
670:         break;
671:       default:
672:         PetscViewerASCIIPrintf(viewer,"\n");
673:         break;
674:       }
675:     }
676:     PetscViewerASCIIPopTab(viewer);
677:   } else if (isstring) {
678:     TaoGetType(tao,&type);
679:     PetscViewerStringSPrintf(viewer," %-3.3s",type);
680:   }
681:   return(0);
682: }

686: /*@
687:   TaoSetTolerances - Sets parameters used in TAO convergence tests

689:   Logically collective on Tao

691:   Input Parameters:
692: + tao - the Tao context
693: . gatol - stop if norm of gradient is less than this
694: . grtol - stop if relative norm of gradient is less than this
695: - gttol - stop if norm of gradient is reduced by this factor

697:   Options Database Keys:
698: + -tao_gatol <gatol> - Sets gatol
699: . -tao_grtol <grtol> - Sets grtol
700: - -tao_gttol <gttol> - Sets gttol

702:   Stopping Criteria:
703: $ ||g(X)||                            <= gatol
704: $ ||g(X)|| / |f(X)|                   <= grtol
705: $ ||g(X)|| / ||g(X0)||                <= gttol

707:   Notes:
708:   Use PETSC_DEFAULT to leave one or more tolerances unchanged.

710:   Level: beginner

712: .seealso: TaoGetTolerances()

714: @*/
715: PetscErrorCode TaoSetTolerances(Tao tao, PetscReal gatol, PetscReal grtol, PetscReal gttol)
716: {


722:   if (gatol != PETSC_DEFAULT) {
723:     if (gatol<0) {
724:       PetscInfo(tao,"Tried to set negative gatol -- ignored.\n");
725:     } else {
726:       tao->gatol = PetscMax(0,gatol);
727:       tao->gatol_changed=PETSC_TRUE;
728:     }
729:   }

731:   if (grtol != PETSC_DEFAULT) {
732:     if (grtol<0) {
733:       PetscInfo(tao,"Tried to set negative grtol -- ignored.\n");
734:     } else {
735:       tao->grtol = PetscMax(0,grtol);
736:       tao->grtol_changed=PETSC_TRUE;
737:     }
738:   }

740:   if (gttol != PETSC_DEFAULT) {
741:     if (gttol<0) {
742:       PetscInfo(tao,"Tried to set negative gttol -- ignored.\n");
743:     } else {
744:       tao->gttol = PetscMax(0,gttol);
745:       tao->gttol_changed=PETSC_TRUE;
746:     }
747:   }
748:   return(0);
749: }

753: /*@
754:   TaoSetConstraintTolerances - Sets constraint tolerance parameters used in TAO  convergence tests

756:   Logically collective on Tao

758:   Input Parameters:
759: + tao - the Tao context
760: . catol - absolute constraint tolerance, constraint norm must be less than catol for used for gatol convergence criteria
761: - crtol - relative contraint tolerance, constraint norm must be less than crtol for used for gatol, gttol convergence criteria

763:   Options Database Keys:
764: + -tao_catol <catol> - Sets catol
765: - -tao_crtol <crtol> - Sets crtol

767:   Notes:
768:   Use PETSC_DEFAULT to leave any tolerance unchanged.

770:   Level: intermediate

772: .seealso: TaoGetTolerances(), TaoGetConstraintTolerances(), TaoSetTolerances()

774: @*/
775: PetscErrorCode TaoSetConstraintTolerances(Tao tao, PetscReal catol, PetscReal crtol)
776: {


782:   if (catol != PETSC_DEFAULT) {
783:     if (catol<0) {
784:       PetscInfo(tao,"Tried to set negative catol -- ignored.\n");
785:     } else {
786:       tao->catol = PetscMax(0,catol);
787:       tao->catol_changed=PETSC_TRUE;
788:     }
789:   }

791:   if (crtol != PETSC_DEFAULT) {
792:     if (crtol<0) {
793:       PetscInfo(tao,"Tried to set negative crtol -- ignored.\n");
794:     } else {
795:       tao->crtol = PetscMax(0,crtol);
796:       tao->crtol_changed=PETSC_TRUE;
797:     }
798:   }
799:   return(0);
800: }

804: /*@
805:   TaoGetConstraintTolerances - Gets constraint tolerance parameters used in TAO  convergence tests

807:   Not ollective

809:   Input Parameter:
810: . tao - the Tao context

812:   Output Parameter:
813: + catol - absolute constraint tolerance, constraint norm must be less than catol for used for gatol convergence criteria
814: - crtol - relative contraint tolerance, constraint norm must be less than crtol for used for gatol, gttol convergence criteria

816:   Level: intermediate

818: .seealso: TaoGetTolerances(), TaoSetTolerances(), TaoSetConstraintTolerances()

820: @*/
821: PetscErrorCode TaoGetConstraintTolerances(Tao tao, PetscReal *catol, PetscReal *crtol)
822: {
825:   if (catol) *catol = tao->catol;
826:   if (crtol) *crtol = tao->crtol;
827:   return(0);
828: }

832: /*@
833:    TaoSetFunctionLowerBound - Sets a bound on the solution objective value.
834:    When an approximate solution with an objective value below this number
835:    has been found, the solver will terminate.

837:    Logically Collective on Tao

839:    Input Parameters:
840: +  tao - the Tao solver context
841: -  fmin - the tolerance

843:    Options Database Keys:
844: .    -tao_fmin <fmin> - sets the minimum function value

846:    Level: intermediate

848: .seealso: TaoSetTolerances()
849: @*/
850: PetscErrorCode TaoSetFunctionLowerBound(Tao tao,PetscReal fmin)
851: {
854:   tao->fmin = fmin;
855:   tao->fmin_changed=PETSC_TRUE;
856:   return(0);
857: }

861: /*@
862:    TaoGetFunctionLowerBound - Gets the bound on the solution objective value.
863:    When an approximate solution with an objective value below this number
864:    has been found, the solver will terminate.

866:    Not collective on Tao

868:    Input Parameters:
869: .  tao - the Tao solver context

871:    OutputParameters:
872: .  fmin - the minimum function value

874:    Level: intermediate

876: .seealso: TaoSetFunctionLowerBound()
877: @*/
878: PetscErrorCode TaoGetFunctionLowerBound(Tao tao,PetscReal *fmin)
879: {
882:   *fmin = tao->fmin;
883:   return(0);
884: }

888: /*@
889:    TaoSetMaximumFunctionEvaluations - Sets a maximum number of
890:    function evaluations.

892:    Logically Collective on Tao

894:    Input Parameters:
895: +  tao - the Tao solver context
896: -  nfcn - the maximum number of function evaluations (>=0)

898:    Options Database Keys:
899: .    -tao_max_funcs <nfcn> - sets the maximum number of function evaluations

901:    Level: intermediate

903: .seealso: TaoSetTolerances(), TaoSetMaximumIterations()
904: @*/

906: PetscErrorCode TaoSetMaximumFunctionEvaluations(Tao tao,PetscInt nfcn)
907: {
910:   tao->max_funcs = PetscMax(0,nfcn);
911:   tao->max_funcs_changed=PETSC_TRUE;
912:   return(0);
913: }

917: /*@
918:    TaoGetMaximumFunctionEvaluations - Sets a maximum number of
919:    function evaluations.

921:    Not Collective

923:    Input Parameters:
924: .  tao - the Tao solver context

926:    Output Parameters:
927: .  nfcn - the maximum number of function evaluations

929:    Level: intermediate

931: .seealso: TaoSetMaximumFunctionEvaluations(), TaoGetMaximumIterations()
932: @*/

934: PetscErrorCode TaoGetMaximumFunctionEvaluations(Tao tao,PetscInt *nfcn)
935: {
938:   *nfcn = tao->max_funcs;
939:   return(0);
940: }

944: /*@
945:    TaoGetCurrentFunctionEvaluations - Get current number of
946:    function evaluations.

948:    Not Collective

950:    Input Parameters:
951: .  tao - the Tao solver context

953:    Output Parameters:
954: .  nfuncs - the current number of function evaluations

956:    Level: intermediate

958: .seealso: TaoSetMaximumFunctionEvaluations(), TaoGetMaximumFunctionEvaluations(), TaoGetMaximumIterations()
959: @*/

961: PetscErrorCode TaoGetCurrentFunctionEvaluations(Tao tao,PetscInt *nfuncs)
962: {
965:   *nfuncs=PetscMax(tao->nfuncs,tao->nfuncgrads);
966:   return(0);
967: }

971: /*@
972:    TaoSetMaximumIterations - Sets a maximum number of iterates.

974:    Logically Collective on Tao

976:    Input Parameters:
977: +  tao - the Tao solver context
978: -  maxits - the maximum number of iterates (>=0)

980:    Options Database Keys:
981: .    -tao_max_it <its> - sets the maximum number of iterations

983:    Level: intermediate

985: .seealso: TaoSetTolerances(), TaoSetMaximumFunctionEvaluations()
986: @*/
987: PetscErrorCode TaoSetMaximumIterations(Tao tao,PetscInt maxits)
988: {
991:   tao->max_it = PetscMax(0,maxits);
992:   tao->max_it_changed=PETSC_TRUE;
993:   return(0);
994: }

998: /*@
999:    TaoGetMaximumIterations - Sets a maximum number of iterates.

1001:    Not Collective

1003:    Input Parameters:
1004: .  tao - the Tao solver context

1006:    Output Parameters:
1007: .  maxits - the maximum number of iterates

1009:    Level: intermediate

1011: .seealso: TaoSetMaximumIterations(), TaoGetMaximumFunctionEvaluations()
1012: @*/
1013: PetscErrorCode TaoGetMaximumIterations(Tao tao,PetscInt *maxits)
1014: {
1017:   *maxits = tao->max_it;
1018:   return(0);
1019: }

1023: /*@
1024:    TaoSetInitialTrustRegionRadius - Sets the initial trust region radius.

1026:    Logically collective on Tao

1028:    Input Parameter:
1029: +  tao - a TAO optimization solver
1030: -  radius - the trust region radius

1032:    Level: intermediate

1034:    Options Database Key:
1035: .  -tao_trust0 <t0> - sets initial trust region radius

1037: .seealso: TaoGetTrustRegionRadius(), TaoSetTrustRegionTolerance()
1038: @*/
1039: PetscErrorCode TaoSetInitialTrustRegionRadius(Tao tao, PetscReal radius)
1040: {
1043:   tao->trust0 = PetscMax(0.0,radius);
1044:   tao->trust0_changed=PETSC_TRUE;
1045:   return(0);
1046: }

1050: /*@
1051:    TaoGetInitialTrustRegionRadius - Sets the initial trust region radius.

1053:    Not Collective

1055:    Input Parameter:
1056: .  tao - a TAO optimization solver

1058:    Output Parameter:
1059: .  radius - the trust region radius

1061:    Level: intermediate

1063: .seealso: TaoSetInitialTrustRegionRadius(), TaoGetCurrentTrustRegionRadius()
1064: @*/
1065: PetscErrorCode TaoGetInitialTrustRegionRadius(Tao tao, PetscReal *radius)
1066: {
1069:   *radius = tao->trust0;
1070:   return(0);
1071: }

1075: /*@
1076:    TaoGetCurrentTrustRegionRadius - Gets the current trust region radius.

1078:    Not Collective

1080:    Input Parameter:
1081: .  tao - a TAO optimization solver

1083:    Output Parameter:
1084: .  radius - the trust region radius

1086:    Level: intermediate

1088: .seealso: TaoSetInitialTrustRegionRadius(), TaoGetInitialTrustRegionRadius()
1089: @*/
1090: PetscErrorCode TaoGetCurrentTrustRegionRadius(Tao tao, PetscReal *radius)
1091: {
1094:   *radius = tao->trust;
1095:   return(0);
1096: }

1100: /*@
1101:   TaoGetTolerances - gets the current values of tolerances

1103:   Not Collective

1105:   Input Parameters:
1106: . tao - the Tao context

1108:   Output Parameters:
1109: + gatol - stop if norm of gradient is less than this
1110: . grtol - stop if relative norm of gradient is less than this
1111: - gttol - stop if norm of gradient is reduced by a this factor

1113:   Note: NULL can be used as an argument if not all tolerances values are needed

1115: .seealso TaoSetTolerances()

1117:   Level: intermediate
1118: @*/
1119: PetscErrorCode TaoGetTolerances(Tao tao, PetscReal *gatol, PetscReal *grtol, PetscReal *gttol)
1120: {
1123:   if (gatol) *gatol=tao->gatol;
1124:   if (grtol) *grtol=tao->grtol;
1125:   if (gttol) *gttol=tao->gttol;
1126:   return(0);
1127: }

1131: /*@
1132:   TaoGetKSP - Gets the linear solver used by the optimization solver.
1133:   Application writers should use TaoGetKSP if they need direct access
1134:   to the PETSc KSP object.

1136:   Not Collective

1138:    Input Parameters:
1139: .  tao - the TAO solver

1141:    Output Parameters:
1142: .  ksp - the KSP linear solver used in the optimization solver

1144:    Level: intermediate

1146: @*/
1147: PetscErrorCode TaoGetKSP(Tao tao, KSP *ksp)
1148: {
1150:   *ksp = tao->ksp;
1151:   return(0);
1152: }

1156: /*@
1157:    TaoGetLinearSolveIterations - Gets the total number of linear iterations
1158:    used by the TAO solver

1160:    Not Collective

1162:    Input Parameter:
1163: .  tao - TAO context

1165:    Output Parameter:
1166: .  lits - number of linear iterations

1168:    Notes:
1169:    This counter is reset to zero for each successive call to TaoSolve()

1171:    Level: intermediate

1173: .keywords: TAO

1175: .seealso:  TaoGetKSP()
1176: @*/
1177: PetscErrorCode  TaoGetLinearSolveIterations(Tao tao,PetscInt *lits)
1178: {
1182:   *lits = tao->ksp_tot_its;
1183:   return(0);
1184: }

1188: /*@
1189:   TaoGetLineSearch - Gets the line search used by the optimization solver.
1190:   Application writers should use TaoGetLineSearch if they need direct access
1191:   to the TaoLineSearch object.

1193:   Not Collective

1195:    Input Parameters:
1196: .  tao - the TAO solver

1198:    Output Parameters:
1199: .  ls - the line search used in the optimization solver

1201:    Level: intermediate

1203: @*/
1204: PetscErrorCode TaoGetLineSearch(Tao tao, TaoLineSearch *ls)
1205: {
1207:   *ls = tao->linesearch;
1208:   return(0);
1209: }

1213: /*@
1214:   TaoAddLineSearchCounts - Adds the number of function evaluations spent
1215:   in the line search to the running total.

1217:    Input Parameters:
1218: +  tao - the TAO solver
1219: -  ls - the line search used in the optimization solver

1221:    Level: developer

1223: .seealso: TaoLineSearchApply()
1224: @*/
1225: PetscErrorCode TaoAddLineSearchCounts(Tao tao)
1226: {
1228:   PetscBool      flg;
1229:   PetscInt       nfeval,ngeval,nfgeval;

1233:   if (tao->linesearch) {
1234:     TaoLineSearchIsUsingTaoRoutines(tao->linesearch,&flg);
1235:     if (!flg) {
1236:       TaoLineSearchGetNumberFunctionEvaluations(tao->linesearch,&nfeval,&ngeval,&nfgeval);
1237:       tao->nfuncs+=nfeval;
1238:       tao->ngrads+=ngeval;
1239:       tao->nfuncgrads+=nfgeval;
1240:     }
1241:   }
1242:   return(0);
1243: }

1247: /*@
1248:   TaoGetSolutionVector - Returns the vector with the current TAO solution

1250:   Not Collective

1252:   Input Parameter:
1253: . tao - the Tao context

1255:   Output Parameter:
1256: . X - the current solution

1258:   Level: intermediate

1260:   Note:  The returned vector will be the same object that was passed into TaoSetInitialVector()
1261: @*/
1262: PetscErrorCode TaoGetSolutionVector(Tao tao, Vec *X)
1263: {
1266:   *X = tao->solution;
1267:   return(0);
1268: }

1272: /*@
1273:   TaoGetGradientVector - Returns the vector with the current TAO gradient

1275:   Not Collective

1277:   Input Parameter:
1278: . tao - the Tao context

1280:   Output Parameter:
1281: . G - the current solution

1283:   Level: intermediate
1284: @*/
1285: PetscErrorCode TaoGetGradientVector(Tao tao, Vec *G)
1286: {
1289:   *G = tao->gradient;
1290:   return(0);
1291: }

1295: /*@
1296:    TaoResetStatistics - Initialize the statistics used by TAO for all of the solvers.
1297:    These statistics include the iteration number, residual norms, and convergence status.
1298:    This routine gets called before solving each optimization problem.

1300:    Collective on Tao

1302:    Input Parameters:
1303: .  solver - the Tao context

1305:    Level: developer

1307: .seealso: TaoCreate(), TaoSolve()
1308: @*/
1309: PetscErrorCode TaoResetStatistics(Tao tao)
1310: {
1313:   tao->niter        = 0;
1314:   tao->nfuncs       = 0;
1315:   tao->nfuncgrads   = 0;
1316:   tao->ngrads       = 0;
1317:   tao->nhess        = 0;
1318:   tao->njac         = 0;
1319:   tao->nconstraints = 0;
1320:   tao->ksp_its      = 0;
1321:   tao->ksp_tot_its      = 0;
1322:   tao->reason       = TAO_CONTINUE_ITERATING;
1323:   tao->residual     = 0.0;
1324:   tao->cnorm        = 0.0;
1325:   tao->step         = 0.0;
1326:   tao->lsflag       = PETSC_FALSE;
1327:   if (tao->hist_reset) tao->hist_len=0;
1328:   return(0);
1329: }

1333: /*@C
1334:   TaoSetConvergenceTest - Sets the function that is to be used to test
1335:   for convergence o fthe iterative minimization solution.  The new convergence
1336:   testing routine will replace TAO's default convergence test.

1338:   Logically Collective on Tao

1340:   Input Parameters:
1341: + tao - the Tao object
1342: . conv - the routine to test for convergence
1343: - ctx - [optional] context for private data for the convergence routine
1344:         (may be NULL)

1346:   Calling sequence of conv:
1347: $   PetscErrorCode conv(Tao tao, void *ctx)

1349: + tao - the Tao object
1350: - ctx - [optional] convergence context

1352:   Note: The new convergence testing routine should call TaoSetConvergedReason().

1354:   Level: advanced

1356: .seealso: TaoSetConvergedReason(), TaoGetSolutionStatus(), TaoGetTolerances(), TaoSetMonitor

1358: @*/
1359: PetscErrorCode TaoSetConvergenceTest(Tao tao, PetscErrorCode (*conv)(Tao,void*), void *ctx)
1360: {
1363:   (tao)->ops->convergencetest = conv;
1364:   (tao)->cnvP = ctx;
1365:   return(0);
1366: }

1370: /*@C
1371:    TaoSetMonitor - Sets an ADDITIONAL function that is to be used at every
1372:    iteration of the solver to display the iteration's
1373:    progress.

1375:    Logically Collective on Tao

1377:    Input Parameters:
1378: +  tao - the Tao solver context
1379: .  mymonitor - monitoring routine
1380: -  mctx - [optional] user-defined context for private data for the
1381:           monitor routine (may be NULL)

1383:    Calling sequence of mymonitor:
1384: $     int mymonitor(Tao tao,void *mctx)

1386: +    tao - the Tao solver context
1387: -    mctx - [optional] monitoring context


1390:    Options Database Keys:
1391: +    -tao_monitor        - sets TaoDefaultMonitor()
1392: .    -tao_smonitor       - sets short monitor
1393: .    -tao_cmonitor       - same as smonitor plus constraint norm
1394: .    -tao_view_solution   - view solution at each iteration
1395: .    -tao_view_gradient   - view gradient at each iteration
1396: .    -tao_view_separableobjective - view separable objective function at each iteration
1397: -    -tao_cancelmonitors - cancels all monitors that have been hardwired into a code by calls to TaoSetMonitor(), but does not cancel those set via the options database.


1400:    Notes:
1401:    Several different monitoring routines may be set by calling
1402:    TaoSetMonitor() multiple times; all will be called in the
1403:    order in which they were set.

1405:    Fortran Notes: Only one monitor function may be set

1407:    Level: intermediate

1409: .seealso: TaoDefaultMonitor(), TaoCancelMonitors(),  TaoSetDestroyRoutine()
1410: @*/
1411: PetscErrorCode TaoSetMonitor(Tao tao, PetscErrorCode (*func)(Tao, void*), void *ctx,PetscErrorCode (*dest)(void**))
1412: {
1414:   PetscInt       i;

1418:   if (tao->numbermonitors >= MAXTAOMONITORS) SETERRQ1(PETSC_COMM_SELF,1,"Cannot attach another monitor -- max=",MAXTAOMONITORS);

1420:   for (i=0; i<tao->numbermonitors;i++) {
1421:     if (func == tao->monitor[i] && dest == tao->monitordestroy[i] && ctx == tao->monitorcontext[i]) {
1422:       if (dest) {
1423:         (*dest)(&ctx);
1424:       }
1425:       return(0);
1426:     }
1427:   }
1428:   tao->monitor[tao->numbermonitors] = func;
1429:   tao->monitorcontext[tao->numbermonitors] = ctx;
1430:   tao->monitordestroy[tao->numbermonitors] = dest;
1431:   ++tao->numbermonitors;
1432:   return(0);
1433: }

1437: /*@
1438:    TaoCancelMonitors - Clears all the monitor functions for a Tao object.

1440:    Logically Collective on Tao

1442:    Input Parameters:
1443: .  tao - the Tao solver context

1445:    Options Database:
1446: .  -tao_cancelmonitors - cancels all monitors that have been hardwired
1447:     into a code by calls to TaoSetMonitor(), but does not cancel those
1448:     set via the options database

1450:    Notes:
1451:    There is no way to clear one specific monitor from a Tao object.

1453:    Level: advanced

1455: .seealso: TaoDefaultMonitor(), TaoSetMonitor()
1456: @*/
1457: PetscErrorCode TaoCancelMonitors(Tao tao)
1458: {
1459:   PetscInt       i;

1464:   for (i=0;i<tao->numbermonitors;i++) {
1465:     if (tao->monitordestroy[i]) {
1466:       (*tao->monitordestroy[i])(&tao->monitorcontext[i]);
1467:     }
1468:   }
1469:   tao->numbermonitors=0;
1470:   return(0);
1471: }

1475: /*@
1476:    TaoDefaultMonitor - Default routine for monitoring progress of the
1477:    Tao solvers (default).  This monitor prints the function value and gradient
1478:    norm at each iteration.  It can be turned on from the command line using the
1479:    -tao_monitor option

1481:    Collective on Tao

1483:    Input Parameters:
1484: +  tao - the Tao context
1485: -  ctx - PetscViewer context or NULL

1487:    Options Database Keys:
1488: .  -tao_monitor

1490:    Level: advanced

1492: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1493: @*/
1494: PetscErrorCode TaoDefaultMonitor(Tao tao, void *ctx)
1495: {
1497:   PetscInt       its;
1498:   PetscReal      fct,gnorm;
1499:   PetscViewer    viewer = (PetscViewer)ctx;

1503:   its=tao->niter;
1504:   fct=tao->fc;
1505:   gnorm=tao->residual;
1506:   ierr=PetscViewerASCIIPrintf(viewer,"iter = %3D,",its);
1507:   ierr=PetscViewerASCIIPrintf(viewer," Function value: %g,",(double)fct);
1508:   if (gnorm >= PETSC_INFINITY) {
1509:     ierr=PetscViewerASCIIPrintf(viewer,"  Residual: Inf \n");
1510:   } else {
1511:     ierr=PetscViewerASCIIPrintf(viewer,"  Residual: %g \n",(double)gnorm);
1512:   }
1513:   return(0);
1514: }

1518: /*@
1519:    TaoDefaultSMonitor - Default routine for monitoring progress of the
1520:    solver. Same as TaoDefaultMonitor() except
1521:    it prints fewer digits of the residual as the residual gets smaller.
1522:    This is because the later digits are meaningless and are often
1523:    different on different machines; by using this routine different
1524:    machines will usually generate the same output. It can be turned on
1525:    by using the -tao_smonitor option

1527:    Collective on Tao

1529:    Input Parameters:
1530: +  tao - the Tao context
1531: -  ctx - PetscViewer context of type ASCII

1533:    Options Database Keys:
1534: .  -tao_smonitor

1536:    Level: advanced

1538: .seealso: TaoDefaultMonitor(), TaoSetMonitor()
1539: @*/
1540: PetscErrorCode TaoDefaultSMonitor(Tao tao, void *ctx)
1541: {
1543:   PetscInt       its;
1544:   PetscReal      fct,gnorm;
1545:   PetscViewer    viewer = (PetscViewer)ctx;

1549:   its=tao->niter;
1550:   fct=tao->fc;
1551:   gnorm=tao->residual;
1552:   ierr=PetscViewerASCIIPrintf(viewer,"iter = %3D,",its);
1553:   ierr=PetscViewerASCIIPrintf(viewer," Function value %g,",(double)fct);
1554:   if (gnorm >= PETSC_INFINITY) {
1555:     ierr=PetscViewerASCIIPrintf(viewer," Residual: Inf \n");
1556:   } else if (gnorm > 1.e-6) {
1557:     ierr=PetscViewerASCIIPrintf(viewer," Residual: %g \n",(double)gnorm);
1558:   } else if (gnorm > 1.e-11) {
1559:     ierr=PetscViewerASCIIPrintf(viewer," Residual: < 1.0e-6 \n");
1560:   } else {
1561:     ierr=PetscViewerASCIIPrintf(viewer," Residual: < 1.0e-11 \n");
1562:   }
1563:   return(0);
1564: }

1568: /*@
1569:    TaoDefaultCMonitor - same as TaoDefaultMonitor() except
1570:    it prints the norm of the constraints function. It can be turned on
1571:    from the command line using the -tao_cmonitor option

1573:    Collective on Tao

1575:    Input Parameters:
1576: +  tao - the Tao context
1577: -  ctx - PetscViewer context or NULL

1579:    Options Database Keys:
1580: .  -tao_cmonitor

1582:    Level: advanced

1584: .seealso: TaoDefaultMonitor(), TaoSetMonitor()
1585: @*/
1586: PetscErrorCode TaoDefaultCMonitor(Tao tao, void *ctx)
1587: {
1589:   PetscInt       its;
1590:   PetscReal      fct,gnorm;
1591:   PetscViewer    viewer = (PetscViewer)ctx;

1595:   its=tao->niter;
1596:   fct=tao->fc;
1597:   gnorm=tao->residual;
1598:   ierr=PetscViewerASCIIPrintf(viewer,"iter = %D,",its);
1599:   ierr=PetscViewerASCIIPrintf(viewer," Function value: %g,",(double)fct);
1600:   ierr=PetscViewerASCIIPrintf(viewer,"  Residual: %g ",(double)gnorm);
1601:   PetscViewerASCIIPrintf(viewer,"  Constraint: %g \n",(double)tao->cnorm);
1602:   return(0);
1603: }

1607: /*@C
1608:    TaoSolutionMonitor - Views the solution at each iteration
1609:    It can be turned on from the command line using the
1610:    -tao_view_solution option

1612:    Collective on Tao

1614:    Input Parameters:
1615: +  tao - the Tao context
1616: -  ctx - PetscViewer context or NULL

1618:    Options Database Keys:
1619: .  -tao_view_solution

1621:    Level: advanced

1623: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1624: @*/
1625: PetscErrorCode TaoSolutionMonitor(Tao tao, void *ctx)
1626: {
1628:   PetscViewer    viewer  = (PetscViewer)ctx;;

1632:   VecView(tao->solution, viewer);
1633:   return(0);
1634: }

1638: /*@C
1639:    TaoGradientMonitor - Views the gradient at each iteration
1640:    It can be turned on from the command line using the
1641:    -tao_view_gradient option

1643:    Collective on Tao

1645:    Input Parameters:
1646: +  tao - the Tao context
1647: -  ctx - PetscViewer context or NULL

1649:    Options Database Keys:
1650: .  -tao_view_gradient

1652:    Level: advanced

1654: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1655: @*/
1656: PetscErrorCode TaoGradientMonitor(Tao tao, void *ctx)
1657: {
1659:   PetscViewer    viewer = (PetscViewer)ctx;

1663:   VecView(tao->gradient, viewer);
1664:   return(0);
1665: }

1669: /*@C
1670:    TaoStepDirectionMonitor - Views the gradient at each iteration
1671:    It can be turned on from the command line using the
1672:    -tao_view_gradient option

1674:    Collective on Tao

1676:    Input Parameters:
1677: +  tao - the Tao context
1678: -  ctx - PetscViewer context or NULL

1680:    Options Database Keys:
1681: .  -tao_view_gradient

1683:    Level: advanced

1685: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1686: @*/
1687: PetscErrorCode TaoStepDirectionMonitor(Tao tao, void *ctx)
1688: {
1690:   PetscViewer    viewer = (PetscViewer)ctx;

1694:   VecView(tao->stepdirection, viewer);
1695:   return(0);
1696: }

1700: /*@C
1701:    TaoDrawSolutionMonitor - Plots the solution at each iteration
1702:    It can be turned on from the command line using the
1703:    -tao_draw_solution option

1705:    Collective on Tao

1707:    Input Parameters:
1708: +  tao - the Tao context
1709: -  ctx - PetscViewer context

1711:    Options Database Keys:
1712: .  -tao_draw_solution

1714:    Level: advanced

1716: .seealso: TaoSolutionMonitor(), TaoSetMonitor(), TaoDrawGradientMonitor
1717: @*/
1718: PetscErrorCode TaoDrawSolutionMonitor(Tao tao, void *ctx)
1719: {
1721:   PetscViewer    viewer = (PetscViewer) ctx;

1725:   VecView(tao->solution, viewer);
1726:   return(0);
1727: }

1731: /*@C
1732:    TaoDrawGradientMonitor - Plots the gradient at each iteration
1733:    It can be turned on from the command line using the
1734:    -tao_draw_gradient option

1736:    Collective on Tao

1738:    Input Parameters:
1739: +  tao - the Tao context
1740: -  ctx - PetscViewer context

1742:    Options Database Keys:
1743: .  -tao_draw_gradient

1745:    Level: advanced

1747: .seealso: TaoGradientMonitor(), TaoSetMonitor(), TaoDrawSolutionMonitor
1748: @*/
1749: PetscErrorCode TaoDrawGradientMonitor(Tao tao, void *ctx)
1750: {
1752:   PetscViewer    viewer = (PetscViewer)ctx;

1756:   VecView(tao->gradient, viewer);
1757:   return(0);
1758: }

1762: /*@C
1763:    TaoDrawStepMonitor - Plots the step direction at each iteration
1764:    It can be turned on from the command line using the
1765:    -tao_draw_step option

1767:    Collective on Tao

1769:    Input Parameters:
1770: +  tao - the Tao context
1771: -  ctx - PetscViewer context

1773:    Options Database Keys:
1774: .  -tao_draw_step

1776:    Level: advanced

1778: .seealso: TaoSetMonitor(), TaoDrawSolutionMonitor
1779: @*/
1780: PetscErrorCode TaoDrawStepMonitor(Tao tao, void *ctx)
1781: {
1783:   PetscViewer    viewer = (PetscViewer)(ctx);

1786:   VecView(tao->stepdirection, viewer);
1787:   return(0);
1788: }

1792: /*@C
1793:    TaoSeparableObjectiveMonitor - Views the separable objective function at each iteration
1794:    It can be turned on from the command line using the
1795:    -tao_view_separableobjective option

1797:    Collective on Tao

1799:    Input Parameters:
1800: +  tao - the Tao context
1801: -  ctx - PetscViewer context or NULL

1803:    Options Database Keys:
1804: .  -tao_view_separableobjective

1806:    Level: advanced

1808: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1809: @*/
1810: PetscErrorCode TaoSeparableObjectiveMonitor(Tao tao, void *ctx)
1811: {
1813:   PetscViewer    viewer  = (PetscViewer)ctx;

1817:   VecView(tao->sep_objective,viewer);
1818:   return(0);
1819: }

1823: /*@
1824:    TaoDefaultConvergenceTest - Determines whether the solver should continue iterating
1825:    or terminate.

1827:    Collective on Tao

1829:    Input Parameters:
1830: +  tao - the Tao context
1831: -  dummy - unused dummy context

1833:    Output Parameter:
1834: .  reason - for terminating

1836:    Notes:
1837:    This routine checks the residual in the optimality conditions, the
1838:    relative residual in the optimity conditions, the number of function
1839:    evaluations, and the function value to test convergence.  Some
1840:    solvers may use different convergence routines.

1842:    Level: developer

1844: .seealso: TaoSetTolerances(),TaoGetConvergedReason(),TaoSetConvergedReason()
1845: @*/

1847: PetscErrorCode TaoDefaultConvergenceTest(Tao tao,void *dummy)
1848: {
1849:   PetscInt           niter=tao->niter, nfuncs=PetscMax(tao->nfuncs,tao->nfuncgrads);
1850:   PetscInt           max_funcs=tao->max_funcs;
1851:   PetscReal          gnorm=tao->residual, gnorm0=tao->gnorm0;
1852:   PetscReal          f=tao->fc, steptol=tao->steptol,trradius=tao->step;
1853:   PetscReal          gatol=tao->gatol,grtol=tao->grtol,gttol=tao->gttol;
1854:   PetscReal          catol=tao->catol,crtol=tao->crtol;
1855:   PetscReal          fmin=tao->fmin, cnorm=tao->cnorm;
1856:   TaoConvergedReason reason=tao->reason;
1857:   PetscErrorCode     ierr;

1861:   if (reason != TAO_CONTINUE_ITERATING) {
1862:     return(0);
1863:   }

1865:   if (PetscIsInfOrNanReal(f)) {
1866:     PetscInfo(tao,"Failed to converged, function value is Inf or NaN\n");
1867:     reason = TAO_DIVERGED_NAN;
1868:   } else if (f <= fmin && cnorm <=catol) {
1869:     PetscInfo2(tao,"Converged due to function value %g < minimum function value %g\n", (double)f,(double)fmin);
1870:     reason = TAO_CONVERGED_MINF;
1871:   } else if (gnorm<= gatol && cnorm <=catol) {
1872:     PetscInfo2(tao,"Converged due to residual norm ||g(X)||=%g < %g\n",(double)gnorm,(double)gatol);
1873:     reason = TAO_CONVERGED_GATOL;
1874:   } else if ( f!=0 && PetscAbsReal(gnorm/f) <= grtol && cnorm <= crtol) {
1875:     PetscInfo2(tao,"Converged due to residual ||g(X)||/|f(X)| =%g < %g\n",(double)(gnorm/f),(double)grtol);
1876:     reason = TAO_CONVERGED_GRTOL;
1877:   } else if (gnorm0 != 0 && ((gttol == 0 && gnorm == 0) || gnorm/gnorm0 < gttol) && cnorm <= crtol) {
1878:     PetscInfo2(tao,"Converged due to relative residual norm ||g(X)||/||g(X0)|| = %g < %g\n",(double)(gnorm/gnorm0),(double)gttol);
1879:     reason = TAO_CONVERGED_GTTOL;
1880:   } else if (nfuncs > max_funcs){
1881:     PetscInfo2(tao,"Exceeded maximum number of function evaluations: %D > %D\n", nfuncs,max_funcs);
1882:     reason = TAO_DIVERGED_MAXFCN;
1883:   } else if ( tao->lsflag != 0 ){
1884:     PetscInfo(tao,"Tao Line Search failure.\n");
1885:     reason = TAO_DIVERGED_LS_FAILURE;
1886:   } else if (trradius < steptol && niter > 0){
1887:     PetscInfo2(tao,"Trust region/step size too small: %g < %g\n", (double)trradius,(double)steptol);
1888:     reason = TAO_CONVERGED_STEPTOL;
1889:   } else if (niter > tao->max_it) {
1890:     PetscInfo2(tao,"Exceeded maximum number of iterations: %D > %D\n",niter,tao->max_it);
1891:     reason = TAO_DIVERGED_MAXITS;
1892:   } else {
1893:     reason = TAO_CONTINUE_ITERATING;
1894:   }
1895:   tao->reason = reason;
1896:   return(0);
1897: }

1901: /*@C
1902:    TaoSetOptionsPrefix - Sets the prefix used for searching for all
1903:    TAO options in the database.


1906:    Logically Collective on Tao

1908:    Input Parameters:
1909: +  tao - the Tao context
1910: -  prefix - the prefix string to prepend to all TAO option requests

1912:    Notes:
1913:    A hyphen (-) must NOT be given at the beginning of the prefix name.
1914:    The first character of all runtime options is AUTOMATICALLY the hyphen.

1916:    For example, to distinguish between the runtime options for two
1917:    different TAO solvers, one could call
1918: .vb
1919:       TaoSetOptionsPrefix(tao1,"sys1_")
1920:       TaoSetOptionsPrefix(tao2,"sys2_")
1921: .ve

1923:    This would enable use of different options for each system, such as
1924: .vb
1925:       -sys1_tao_method blmvm -sys1_tao_gtol 1.e-3
1926:       -sys2_tao_method lmvm  -sys2_tao_gtol 1.e-4
1927: .ve


1930:    Level: advanced

1932: .seealso: TaoAppendOptionsPrefix(), TaoGetOptionsPrefix()
1933: @*/

1935: PetscErrorCode TaoSetOptionsPrefix(Tao tao, const char p[])
1936: {

1940:   PetscObjectSetOptionsPrefix((PetscObject)tao,p);
1941:   if (tao->linesearch) {
1942:     TaoLineSearchSetOptionsPrefix(tao->linesearch,p);
1943:   }
1944:   if (tao->ksp) {
1945:     KSPSetOptionsPrefix(tao->ksp,p);
1946:   }
1947:   return(0);
1948: }

1952: /*@C
1953:    TaoAppendOptionsPrefix - Appends to the prefix used for searching for all
1954:    TAO options in the database.


1957:    Logically Collective on Tao

1959:    Input Parameters:
1960: +  tao - the Tao solver context
1961: -  prefix - the prefix string to prepend to all TAO option requests

1963:    Notes:
1964:    A hyphen (-) must NOT be given at the beginning of the prefix name.
1965:    The first character of all runtime options is AUTOMATICALLY the hyphen.


1968:    Level: advanced

1970: .seealso: TaoSetOptionsPrefix(), TaoGetOptionsPrefix()
1971: @*/
1972: PetscErrorCode TaoAppendOptionsPrefix(Tao tao, const char p[])
1973: {

1977:   PetscObjectAppendOptionsPrefix((PetscObject)tao,p);
1978:   if (tao->linesearch) {
1979:     TaoLineSearchSetOptionsPrefix(tao->linesearch,p);
1980:   }
1981:   if (tao->ksp) {
1982:     KSPSetOptionsPrefix(tao->ksp,p);
1983:   }
1984:   return(0);
1985: }

1989: /*@C
1990:   TaoGetOptionsPrefix - Gets the prefix used for searching for all
1991:   TAO options in the database

1993:   Not Collective

1995:   Input Parameters:
1996: . tao - the Tao context

1998:   Output Parameters:
1999: . prefix - pointer to the prefix string used is returned

2001:   Notes: On the fortran side, the user should pass in a string 'prefix' of
2002:   sufficient length to hold the prefix.

2004:   Level: advanced

2006: .seealso: TaoSetOptionsPrefix(), TaoAppendOptionsPrefix()
2007: @*/
2008: PetscErrorCode TaoGetOptionsPrefix(Tao tao, const char *p[])
2009: {
2010:    return PetscObjectGetOptionsPrefix((PetscObject)tao,p);
2011: }

2015: /*@C
2016:    TaoSetType - Sets the method for the unconstrained minimization solver.

2018:    Collective on Tao

2020:    Input Parameters:
2021: +  solver - the Tao solver context
2022: -  type - a known method

2024:    Options Database Key:
2025: .  -tao_type <type> - Sets the method; use -help for a list
2026:    of available methods (for instance, "-tao_type lmvm" or "-tao_type tron")

2028:    Available methods include:
2029: +    nls - Newton's method with line search for unconstrained minimization
2030: .    ntr - Newton's method with trust region for unconstrained minimization
2031: .    ntl - Newton's method with trust region, line search for unconstrained minimization
2032: .    lmvm - Limited memory variable metric method for unconstrained minimization
2033: .    cg - Nonlinear conjugate gradient method for unconstrained minimization
2034: .    nm - Nelder-Mead algorithm for derivate-free unconstrained minimization
2035: .    tron - Newton Trust Region method for bound constrained minimization
2036: .    gpcg - Newton Trust Region method for quadratic bound constrained minimization
2037: .    blmvm - Limited memory variable metric method for bound constrained minimization
2038: -    pounders - Model-based algorithm pounder extended for nonlinear least squares

2040:   Level: intermediate

2042: .seealso: TaoCreate(), TaoGetType(), TaoType

2044: @*/
2045: PetscErrorCode TaoSetType(Tao tao, const TaoType type)
2046: {
2048:   PetscErrorCode (*create_xxx)(Tao);
2049:   PetscBool      issame;


2054:   PetscObjectTypeCompare((PetscObject)tao,type,&issame);
2055:   if (issame) return(0);

2057:   PetscFunctionListFind(TaoList, type, (void(**)(void))&create_xxx);
2058:   if (!create_xxx) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested Tao type %s",type);

2060:   /* Destroy the existing solver information */
2061:   if (tao->ops->destroy) {
2062:     (*tao->ops->destroy)(tao);
2063:   }
2064:   KSPDestroy(&tao->ksp);
2065:   TaoLineSearchDestroy(&tao->linesearch);
2066:   VecDestroy(&tao->gradient);
2067:   VecDestroy(&tao->stepdirection);

2069:   tao->ops->setup = 0;
2070:   tao->ops->solve = 0;
2071:   tao->ops->view  = 0;
2072:   tao->ops->setfromoptions = 0;
2073:   tao->ops->destroy = 0;

2075:   tao->setupcalled = PETSC_FALSE;

2077:   (*create_xxx)(tao);
2078:   PetscObjectChangeTypeName((PetscObject)tao,type);
2079:   return(0);
2080: }

2084: /*MC
2085:    TaoRegister - Adds a method to the TAO package for unconstrained minimization.

2087:    Synopsis:
2088:    TaoRegister(char *name_solver,char *path,char *name_Create,int (*routine_Create)(Tao))

2090:    Not collective

2092:    Input Parameters:
2093: +  sname - name of a new user-defined solver
2094: -  func - routine to Create method context

2096:    Notes:
2097:    TaoRegister() may be called multiple times to add several user-defined solvers.

2099:    Sample usage:
2100: .vb
2101:    TaoRegister("my_solver",MySolverCreate);
2102: .ve

2104:    Then, your solver can be chosen with the procedural interface via
2105: $     TaoSetType(tao,"my_solver")
2106:    or at runtime via the option
2107: $     -tao_type my_solver

2109:    Level: advanced

2111: .seealso: TaoRegisterAll(), TaoRegisterDestroy()
2112: M*/
2113: PetscErrorCode TaoRegister(const char sname[], PetscErrorCode (*func)(Tao))
2114: {

2118:   PetscFunctionListAdd(&TaoList,sname, (void (*)(void))func);
2119:   return(0);
2120: }

2124: /*@C
2125:    TaoRegisterDestroy - Frees the list of minimization solvers that were
2126:    registered by TaoRegisterDynamic().

2128:    Not Collective

2130:    Level: advanced

2132: .seealso: TaoRegisterAll(), TaoRegister()
2133: @*/
2134: PetscErrorCode TaoRegisterDestroy(void)
2135: {
2138:   PetscFunctionListDestroy(&TaoList);
2139:   TaoRegisterAllCalled = PETSC_FALSE;
2140:   return(0);
2141: }

2145: /*@
2146:    TaoGetIterationNumber - Gets the number of Tao iterations completed
2147:    at this time.

2149:    Not Collective

2151:    Input Parameter:
2152: .  tao - Tao context

2154:    Output Parameter:
2155: .  iter - iteration number

2157:    Notes:
2158:    For example, during the computation of iteration 2 this would return 1.


2161:    Level: intermediate

2163: .keywords: Tao, nonlinear, get, iteration, number,

2165: .seealso:   TaoGetLinearSolveIterations()
2166: @*/
2167: PetscErrorCode  TaoGetIterationNumber(Tao tao,PetscInt *iter)
2168: {
2172:   *iter = tao->niter;
2173:   return(0);
2174: }

2178: /*@
2179:    TaoSetIterationNumber - Sets the current iteration number.

2181:    Not Collective

2183:    Input Parameter:
2184: .  tao - Tao context
2185: .  iter - iteration number

2187:    Level: developer

2189: .keywords: Tao, nonlinear, set, iteration, number,

2191: .seealso:   TaoGetLinearSolveIterations()
2192: @*/
2193: PetscErrorCode  TaoSetIterationNumber(Tao tao,PetscInt iter)
2194: {

2199:   PetscObjectSAWsTakeAccess((PetscObject)tao);
2200:   tao->niter = iter;
2201:   PetscObjectSAWsGrantAccess((PetscObject)tao);
2202:   return(0);
2203: }

2207: /*@
2208:    TaoGetTotalIterationNumber - Gets the total number of Tao iterations
2209:    completed. This number keeps accumulating if multiple solves
2210:    are called with the Tao object.

2212:    Not Collective

2214:    Input Parameter:
2215: .  tao - Tao context

2217:    Output Parameter:
2218: .  iter - iteration number

2220:    Notes:
2221:    The total iteration count is updated after each solve, if there is a current
2222:    TaoSolve() in progress then those iterations are not yet counted.

2224:    Level: intermediate

2226: .keywords: Tao, nonlinear, get, iteration, number,

2228: .seealso:   TaoGetLinearSolveIterations()
2229: @*/
2230: PetscErrorCode  TaoGetTotalIterationNumber(Tao tao,PetscInt *iter)
2231: {
2235:   *iter = tao->ntotalits;
2236:   return(0);
2237: }

2241: /*@
2242:    TaoSetTotalIterationNumber - Sets the current total iteration number.

2244:    Not Collective

2246:    Input Parameter:
2247: .  tao - Tao context
2248: .  iter - iteration number

2250:    Level: developer

2252: .keywords: Tao, nonlinear, set, iteration, number,

2254: .seealso:   TaoGetLinearSolveIterations()
2255: @*/
2256: PetscErrorCode  TaoSetTotalIterationNumber(Tao tao,PetscInt iter)
2257: {

2262:   PetscObjectSAWsTakeAccess((PetscObject)tao);
2263:   tao->ntotalits = iter;
2264:   PetscObjectSAWsGrantAccess((PetscObject)tao);
2265:   return(0);
2266: }

2270: /*@
2271:   TaoSetConvergedReason - Sets the termination flag on a Tao object

2273:   Logically Collective on Tao

2275:   Input Parameters:
2276: + tao - the Tao context
2277: - reason - one of
2278: $     TAO_CONVERGED_ATOL (2),
2279: $     TAO_CONVERGED_RTOL (3),
2280: $     TAO_CONVERGED_STEPTOL (4),
2281: $     TAO_CONVERGED_MINF (5),
2282: $     TAO_CONVERGED_USER (6),
2283: $     TAO_DIVERGED_MAXITS (-2),
2284: $     TAO_DIVERGED_NAN (-4),
2285: $     TAO_DIVERGED_MAXFCN (-5),
2286: $     TAO_DIVERGED_LS_FAILURE (-6),
2287: $     TAO_DIVERGED_TR_REDUCTION (-7),
2288: $     TAO_DIVERGED_USER (-8),
2289: $     TAO_CONTINUE_ITERATING (0)

2291:    Level: intermediate

2293: @*/
2294: PetscErrorCode TaoSetConvergedReason(Tao tao, TaoConvergedReason reason)
2295: {
2298:   tao->reason = reason;
2299:   return(0);
2300: }

2304: /*@
2305:    TaoGetConvergedReason - Gets the reason the Tao iteration was stopped.

2307:    Not Collective

2309:    Input Parameter:
2310: .  tao - the Tao solver context

2312:    Output Parameter:
2313: .  reason - one of
2314: $  TAO_CONVERGED_GATOL (3)           ||g(X)|| < gatol
2315: $  TAO_CONVERGED_GRTOL (4)           ||g(X)|| / f(X)  < grtol
2316: $  TAO_CONVERGED_GTTOL (5)           ||g(X)|| / ||g(X0)|| < gttol
2317: $  TAO_CONVERGED_STEPTOL (6)         step size small
2318: $  TAO_CONVERGED_MINF (7)            F < F_min
2319: $  TAO_CONVERGED_USER (8)            User defined
2320: $  TAO_DIVERGED_MAXITS (-2)          its > maxits
2321: $  TAO_DIVERGED_NAN (-4)             Numerical problems
2322: $  TAO_DIVERGED_MAXFCN (-5)          fevals > max_funcsals
2323: $  TAO_DIVERGED_LS_FAILURE (-6)      line search failure
2324: $  TAO_DIVERGED_TR_REDUCTION (-7)    trust region failure
2325: $  TAO_DIVERGED_USER(-8)             (user defined)
2326:  $  TAO_CONTINUE_ITERATING (0)

2328:    where
2329: +  X - current solution
2330: .  X0 - initial guess
2331: .  f(X) - current function value
2332: .  f(X*) - true solution (estimated)
2333: .  g(X) - current gradient
2334: .  its - current iterate number
2335: .  maxits - maximum number of iterates
2336: .  fevals - number of function evaluations
2337: -  max_funcsals - maximum number of function evaluations

2339:    Level: intermediate

2341: .seealso: TaoSetConvergenceTest(), TaoSetTolerances()

2343: @*/
2344: PetscErrorCode TaoGetConvergedReason(Tao tao, TaoConvergedReason *reason)
2345: {
2349:   *reason = tao->reason;
2350:   return(0);
2351: }

2355: /*@
2356:   TaoGetSolutionStatus - Get the current iterate, objective value,
2357:   residual, infeasibility, and termination

2359:   Not Collective

2361:    Input Parameters:
2362: .  tao - the Tao context

2364:    Output Parameters:
2365: +  iterate - the current iterate number (>=0)
2366: .  f - the current function value
2367: .  gnorm - the square of the gradient norm, duality gap, or other measure indicating distance from optimality.
2368: .  cnorm - the infeasibility of the current solution with regard to the constraints.
2369: .  xdiff - the step length or trust region radius of the most recent iterate.
2370: -  reason - The termination reason, which can equal TAO_CONTINUE_ITERATING

2372:    Level: intermediate

2374:    Note:
2375:    TAO returns the values set by the solvers in the routine TaoMonitor().

2377:    Note:
2378:    If any of the output arguments are set to NULL, no corresponding value will be returned.

2380: .seealso: TaoMonitor(), TaoGetConvergedReason()
2381: @*/
2382: PetscErrorCode TaoGetSolutionStatus(Tao tao, PetscInt *its, PetscReal *f, PetscReal *gnorm, PetscReal *cnorm, PetscReal *xdiff, TaoConvergedReason *reason)
2383: {
2385:   if (its) *its=tao->niter;
2386:   if (f) *f=tao->fc;
2387:   if (gnorm) *gnorm=tao->residual;
2388:   if (cnorm) *cnorm=tao->cnorm;
2389:   if (reason) *reason=tao->reason;
2390:   if (xdiff) *xdiff=tao->step;
2391:   return(0);
2392: }

2396: /*@C
2397:    TaoGetType - Gets the current Tao algorithm.

2399:    Not Collective

2401:    Input Parameter:
2402: .  tao - the Tao solver context

2404:    Output Parameter:
2405: .  type - Tao method

2407:    Level: intermediate

2409: @*/
2410: PetscErrorCode TaoGetType(Tao tao, const TaoType *type)
2411: {
2415:   *type=((PetscObject)tao)->type_name;
2416:   return(0);
2417: }

2421: /*@C
2422:   TaoMonitor - Monitor the solver and the current solution.  This
2423:   routine will record the iteration number and residual statistics,
2424:   call any monitors specified by the user, and calls the convergence-check routine.

2426:    Input Parameters:
2427: +  tao - the Tao context
2428: .  its - the current iterate number (>=0)
2429: .  f - the current objective function value
2430: .  res - the gradient norm, square root of the duality gap, or other measure indicating distince from optimality.  This measure will be recorded and
2431:           used for some termination tests.
2432: .  cnorm - the infeasibility of the current solution with regard to the constraints.
2433: -  steplength - multiple of the step direction added to the previous iterate.

2435:    Output Parameters:
2436: .  reason - The termination reason, which can equal TAO_CONTINUE_ITERATING

2438:    Options Database Key:
2439: .  -tao_monitor - Use the default monitor, which prints statistics to standard output

2441: .seealso TaoGetConvergedReason(), TaoDefaultMonitor(), TaoSetMonitor()

2443:    Level: developer

2445: @*/
2446: PetscErrorCode TaoMonitor(Tao tao, PetscInt its, PetscReal f, PetscReal res, PetscReal cnorm, PetscReal steplength, TaoConvergedReason *reason)
2447: {
2449:   PetscInt       i;

2453:   tao->fc = f;
2454:   tao->residual = res;
2455:   tao->cnorm = cnorm;
2456:   tao->step = steplength;
2457:   if (!its) {
2458:     tao->cnorm0 = cnorm; tao->gnorm0 = res;
2459:   }
2460:   TaoLogConvergenceHistory(tao,f,res,cnorm,tao->ksp_its);
2461:   if (PetscIsInfOrNanReal(f) || PetscIsInfOrNanReal(res)) SETERRQ(PETSC_COMM_SELF,1, "User provided compute function generated Inf or NaN");
2462:   if (tao->ops->convergencetest) {
2463:     (*tao->ops->convergencetest)(tao,tao->cnvP);
2464:   }
2465:   for (i=0;i<tao->numbermonitors;i++) {
2466:     (*tao->monitor[i])(tao,tao->monitorcontext[i]);
2467:   }
2468:   *reason = tao->reason;
2469:   return(0);
2470: }

2474: /*@
2475:    TaoSetConvergenceHistory - Sets the array used to hold the convergence history.

2477:    Logically Collective on Tao

2479:    Input Parameters:
2480: +  tao - the Tao solver context
2481: .  obj   - array to hold objective value history
2482: .  resid - array to hold residual history
2483: .  cnorm - array to hold constraint violation history
2484: .  lits - integer array holds the number of linear iterations for each Tao iteration
2485: .  na  - size of obj, resid, and cnorm
2486: -  reset - PetscTrue indicates each new minimization resets the history counter to zero,
2487:            else it continues storing new values for new minimizations after the old ones

2489:    Notes:
2490:    If set, TAO will fill the given arrays with the indicated
2491:    information at each iteration.  If 'obj','resid','cnorm','lits' are
2492:    *all* NULL then space (using size na, or 1000 if na is PETSC_DECIDE or
2493:    PETSC_DEFAULT) is allocated for the history.
2494:    If not all are NULL, then only the non-NULL information categories
2495:    will be stored, the others will be ignored.

2497:    Any convergence information after iteration number 'na' will not be stored.

2499:    This routine is useful, e.g., when running a code for purposes
2500:    of accurate performance monitoring, when no I/O should be done
2501:    during the section of code that is being timed.

2503:    Level: intermediate

2505: .seealso: TaoGetConvergenceHistory()

2507: @*/
2508: PetscErrorCode TaoSetConvergenceHistory(Tao tao, PetscReal *obj, PetscReal *resid, PetscReal *cnorm, PetscInt *lits, PetscInt na,PetscBool reset)
2509: {


2519:   if (na == PETSC_DECIDE || na == PETSC_DEFAULT) na = 1000;
2520:   if (!obj && !resid && !cnorm && !lits) {
2521:     PetscCalloc1(na,&obj);
2522:     PetscCalloc1(na,&resid);
2523:     PetscCalloc1(na,&cnorm);
2524:     PetscCalloc1(na,&lits);
2525:     tao->hist_malloc=PETSC_TRUE;
2526:   }

2528:   tao->hist_obj = obj;
2529:   tao->hist_resid = resid;
2530:   tao->hist_cnorm = cnorm;
2531:   tao->hist_lits = lits;
2532:   tao->hist_max   = na;
2533:   tao->hist_reset = reset;
2534:   tao->hist_len = 0;
2535:   return(0);
2536: }

2540: /*@C
2541:    TaoGetConvergenceHistory - Gets the arrays used to hold the convergence history.

2543:    Collective on Tao

2545:    Input Parameter:
2546: .  tao - the Tao context

2548:    Output Parameters:
2549: +  obj   - array used to hold objective value history
2550: .  resid - array used to hold residual history
2551: .  cnorm - array used to hold constraint violation history
2552: .  lits  - integer array used to hold linear solver iteration count
2553: -  nhist  - size of obj, resid, cnorm, and lits (will be less than or equal to na given in TaoSetHistory)

2555:    Notes:
2556:     This routine must be preceded by calls to TaoSetConvergenceHistory()
2557:     and TaoSolve(), otherwise it returns useless information.

2559:     The calling sequence for this routine in Fortran is
2560: $   call TaoGetConvergenceHistory(Tao tao, PetscInt nhist, PetscErrorCode ierr)

2562:    This routine is useful, e.g., when running a code for purposes
2563:    of accurate performance monitoring, when no I/O should be done
2564:    during the section of code that is being timed.

2566:    Level: advanced

2568: .seealso: TaoSetConvergenceHistory()

2570: @*/
2571: PetscErrorCode TaoGetConvergenceHistory(Tao tao, PetscReal **obj, PetscReal **resid, PetscReal **cnorm, PetscInt **lits, PetscInt *nhist)
2572: {
2575:   if (obj)   *obj   = tao->hist_obj;
2576:   if (cnorm) *cnorm = tao->hist_cnorm;
2577:   if (resid) *resid = tao->hist_resid;
2578:   if (nhist) *nhist   = tao->hist_len;
2579:   return(0);
2580: }

2584: /*@
2585:    TaoSetApplicationContext - Sets the optional user-defined context for
2586:    a solver.

2588:    Logically Collective on Tao

2590:    Input Parameters:
2591: +  tao  - the Tao context
2592: -  usrP - optional user context

2594:    Level: intermediate

2596: .seealso: TaoGetApplicationContext(), TaoSetApplicationContext()
2597: @*/
2598: PetscErrorCode  TaoSetApplicationContext(Tao tao,void *usrP)
2599: {
2602:   tao->user = usrP;
2603:   return(0);
2604: }

2608: /*@
2609:    TaoGetApplicationContext - Gets the user-defined context for a
2610:    TAO solvers.

2612:    Not Collective

2614:    Input Parameter:
2615: .  tao  - Tao context

2617:    Output Parameter:
2618: .  usrP - user context

2620:    Level: intermediate

2622: .seealso: TaoSetApplicationContext()
2623: @*/
2624: PetscErrorCode  TaoGetApplicationContext(Tao tao,void *usrP)
2625: {
2628:   *(void**)usrP = tao->user;
2629:   return(0);
2630: }

2634: /*@
2635:    TaoSetGradientNorm - Sets the matrix used to define the inner product that measures the size of the gradient.

2637:    Collective on tao

2639:    Input Parameters:
2640: +  tao  - the Tao context
2641: -  M    - gradient norm

2643:    Level: beginner

2645: .seealso: TaoGetGradientNorm(), TaoGradientNorm()
2646: @*/
2647: PetscErrorCode  TaoSetGradientNorm(Tao tao, Mat M)
2648: {


2654:   if (tao->gradient_norm) {
2655:     PetscObjectDereference((PetscObject)tao->gradient_norm);
2656:     VecDestroy(&tao->gradient_norm_tmp);
2657:   }

2659:   PetscObjectReference((PetscObject)M);
2660:   tao->gradient_norm = M;
2661:   MatCreateVecs(M, NULL, &tao->gradient_norm_tmp);
2662:   return(0);
2663: }

2667: /*@
2668:    TaoGetGradientNorm - Returns the matrix used to define the inner product for measuring the size of the gradient.

2670:    Not Collective

2672:    Input Parameter:
2673: .  tao  - Tao context

2675:    Output Parameter:
2676: .  M - gradient norm

2678:    Level: beginner

2680: .seealso: TaoSetGradientNorm(), TaoGradientNorm()
2681: @*/
2682: PetscErrorCode  TaoGetGradientNorm(Tao tao, Mat *M)
2683: {
2686:   *M = tao->gradient_norm;
2687:   return(0);
2688: }

2692: /*c
2693:    TaoGradientNorm - Compute the norm with respect to the inner product the user has set.

2695:    Collective on tao

2697:    Input Parameter:
2698: .  tao      - the Tao context
2699: .  gradient - the gradient to be computed
2700: .  norm     - the norm type

2702:    Output Parameter:
2703: .  gnorm    - the gradient norm

2705:    Level: developer

2707: .seealso: TaoSetGradientNorm(), TaoGetGradientNorm()
2708: @*/
2709: PetscErrorCode  TaoGradientNorm(Tao tao, Vec gradient, NormType type, PetscReal *gnorm)
2710: {


2716:   if (tao->gradient_norm) {
2717:     PetscScalar gnorms;

2719:     if (type != NORM_2) SETERRQ(PetscObjectComm((PetscObject)gradient), PETSC_ERR_ARG_WRONGSTATE, "Norm type must be NORM_2 if an inner product for the gradient norm is set.");
2720:     MatMult(tao->gradient_norm, gradient, tao->gradient_norm_tmp);
2721:     VecDot(gradient, tao->gradient_norm_tmp, &gnorms);
2722:     *gnorm = PetscRealPart(PetscSqrtScalar(gnorms));
2723:   } else {
2724:     VecNorm(gradient, type, gnorm);
2725:   }
2726:   return(0);
2727: }