Actual source code: taosolver.c
petsc-3.9.4 2018-09-11
1: #define TAO_DLL
3: #include <petsc/private/taoimpl.h>
5: PetscBool TaoRegisterAllCalled = PETSC_FALSE;
6: PetscFunctionList TaoList = NULL;
8: PetscClassId TAO_CLASSID;
9: PetscLogEvent Tao_Solve, Tao_ObjectiveEval, Tao_GradientEval, Tao_ObjGradientEval, Tao_HessianEval, Tao_ConstraintsEval, Tao_JacobianEval;
11: const char *TaoSubSetTypes[] = { "subvec","mask","matrixfree","TaoSubSetType","TAO_SUBSET_",0};
13: struct _n_TaoMonitorDrawCtx {
14: PetscViewer viewer;
15: PetscInt howoften; /* when > 0 uses iteration % howoften, when negative only final solution plotted */
16: };
18: /*@
19: TaoCreate - Creates a TAO solver
21: Collective on MPI_Comm
23: Input Parameter:
24: . comm - MPI communicator
26: Output Parameter:
27: . newtao - the new Tao context
29: Available methods include:
30: + nls - Newton's method with line search for unconstrained minimization
31: . ntr - Newton's method with trust region for unconstrained minimization
32: . ntl - Newton's method with trust region, line search for unconstrained minimization
33: . lmvm - Limited memory variable metric method for unconstrained minimization
34: . cg - Nonlinear conjugate gradient method for unconstrained minimization
35: . nm - Nelder-Mead algorithm for derivate-free unconstrained minimization
36: . tron - Newton Trust Region method for bound constrained minimization
37: . gpcg - Newton Trust Region method for quadratic bound constrained minimization
38: . blmvm - Limited memory variable metric method for bound constrained minimization
39: . lcl - Linearly constrained Lagrangian method for pde-constrained minimization
40: - pounders - Model-based algorithm for nonlinear least squares
42: Options Database Keys:
43: . -tao_type - select which method TAO should use
45: Level: beginner
47: .seealso: TaoSolve(), TaoDestroy()
48: @*/
49: PetscErrorCode TaoCreate(MPI_Comm comm, Tao *newtao)
50: {
52: Tao tao;
56: *newtao = NULL;
58: TaoInitializePackage();
59: TaoLineSearchInitializePackage();
61: PetscHeaderCreate(tao,TAO_CLASSID,"Tao","Optimization solver","Tao",comm,TaoDestroy,TaoView);
62: tao->ops->computeobjective=0;
63: tao->ops->computeobjectiveandgradient=0;
64: tao->ops->computegradient=0;
65: tao->ops->computehessian=0;
66: tao->ops->computeseparableobjective=0;
67: tao->ops->computeconstraints=0;
68: tao->ops->computejacobian=0;
69: tao->ops->computejacobianequality=0;
70: tao->ops->computejacobianinequality=0;
71: tao->ops->computeequalityconstraints=0;
72: tao->ops->computeinequalityconstraints=0;
73: tao->ops->convergencetest=TaoDefaultConvergenceTest;
74: tao->ops->convergencedestroy=0;
75: tao->ops->computedual=0;
76: tao->ops->setup=0;
77: tao->ops->solve=0;
78: tao->ops->view=0;
79: tao->ops->setfromoptions=0;
80: tao->ops->destroy=0;
82: tao->solution=NULL;
83: tao->gradient=NULL;
84: tao->sep_objective = NULL;
85: tao->constraints=NULL;
86: tao->constraints_equality=NULL;
87: tao->constraints_inequality=NULL;
88: tao->sep_weights_v=NULL;
89: tao->sep_weights_w=NULL;
90: tao->stepdirection=NULL;
91: tao->niter=0;
92: tao->ntotalits=0;
93: tao->XL = NULL;
94: tao->XU = NULL;
95: tao->IL = NULL;
96: tao->IU = NULL;
97: tao->DI = NULL;
98: tao->DE = NULL;
99: tao->gradient_norm = NULL;
100: tao->gradient_norm_tmp = NULL;
101: tao->hessian = NULL;
102: tao->hessian_pre = NULL;
103: tao->jacobian = NULL;
104: tao->jacobian_pre = NULL;
105: tao->jacobian_state = NULL;
106: tao->jacobian_state_pre = NULL;
107: tao->jacobian_state_inv = NULL;
108: tao->jacobian_design = NULL;
109: tao->jacobian_design_pre = NULL;
110: tao->jacobian_equality = NULL;
111: tao->jacobian_equality_pre = NULL;
112: tao->jacobian_inequality = NULL;
113: tao->jacobian_inequality_pre = NULL;
114: tao->state_is = NULL;
115: tao->design_is = NULL;
117: tao->max_it = 10000;
118: tao->max_funcs = 10000;
119: #if defined(PETSC_USE_REAL_SINGLE)
120: tao->gatol = 1e-5;
121: tao->grtol = 1e-5;
122: #else
123: tao->gatol = 1e-8;
124: tao->grtol = 1e-8;
125: #endif
126: tao->crtol = 0.0;
127: tao->catol = 0.0;
128: tao->gttol = 0.0;
129: tao->steptol = 0.0;
130: tao->trust0 = PETSC_INFINITY;
131: tao->fmin = PETSC_NINFINITY;
132: tao->hist_malloc = PETSC_FALSE;
133: tao->hist_reset = PETSC_TRUE;
134: tao->hist_max = 0;
135: tao->hist_len = 0;
136: tao->hist_obj = NULL;
137: tao->hist_resid = NULL;
138: tao->hist_cnorm = NULL;
139: tao->hist_lits = NULL;
141: tao->numbermonitors=0;
142: tao->viewsolution=PETSC_FALSE;
143: tao->viewhessian=PETSC_FALSE;
144: tao->viewgradient=PETSC_FALSE;
145: tao->viewjacobian=PETSC_FALSE;
146: tao->viewconstraints = PETSC_FALSE;
148: /* These flags prevents algorithms from overriding user options */
149: tao->max_it_changed =PETSC_FALSE;
150: tao->max_funcs_changed=PETSC_FALSE;
151: tao->gatol_changed =PETSC_FALSE;
152: tao->grtol_changed =PETSC_FALSE;
153: tao->gttol_changed =PETSC_FALSE;
154: tao->steptol_changed =PETSC_FALSE;
155: tao->trust0_changed =PETSC_FALSE;
156: tao->fmin_changed =PETSC_FALSE;
157: tao->catol_changed =PETSC_FALSE;
158: tao->crtol_changed =PETSC_FALSE;
159: TaoResetStatistics(tao);
160: *newtao = tao;
161: return(0);
162: }
164: /*@
165: TaoSolve - Solves an optimization problem min F(x) s.t. l <= x <= u
167: Collective on Tao
169: Input Parameters:
170: . tao - the Tao context
172: Notes:
173: The user must set up the Tao with calls to TaoSetInitialVector(),
174: TaoSetObjectiveRoutine(),
175: TaoSetGradientRoutine(), and (if using 2nd order method) TaoSetHessianRoutine().
177: You should call TaoGetConvergedReason() or run with -tao_converged_reason to determine if the optimization algorithm actually succeeded or
178: why it failed.
180: Level: beginner
182: .seealso: TaoCreate(), TaoSetObjectiveRoutine(), TaoSetGradientRoutine(), TaoSetHessianRoutine(), TaoGetConvergedReason()
183: @*/
184: PetscErrorCode TaoSolve(Tao tao)
185: {
186: PetscErrorCode ierr;
187: static PetscBool set = PETSC_FALSE;
191: PetscCitationsRegister("@TechReport{tao-user-ref,\n"
192: "title = {Toolkit for Advanced Optimization (TAO) Users Manual},\n"
193: "author = {Todd Munson and Jason Sarich and Stefan Wild and Steve Benson and Lois Curfman McInnes},\n"
194: "Institution = {Argonne National Laboratory},\n"
195: "Year = 2014,\n"
196: "Number = {ANL/MCS-TM-322 - Revision 3.5},\n"
197: "url = {http://www.mcs.anl.gov/tao}\n}\n",&set);
199: TaoSetUp(tao);
200: TaoResetStatistics(tao);
201: if (tao->linesearch) {
202: TaoLineSearchReset(tao->linesearch);
203: }
205: PetscLogEventBegin(Tao_Solve,tao,0,0,0);
206: if (tao->ops->solve){ (*tao->ops->solve)(tao); }
207: PetscLogEventEnd(Tao_Solve,tao,0,0,0);
209: VecViewFromOptions(tao->solution,(PetscObject)tao,"-tao_view_solution");
211: tao->ntotalits += tao->niter;
212: TaoViewFromOptions(tao,NULL,"-tao_view");
214: if (tao->printreason) {
215: if (tao->reason > 0) {
216: PetscPrintf(((PetscObject)tao)->comm,"TAO solve converged due to %s iterations %D\n",TaoConvergedReasons[tao->reason],tao->niter);
217: } else {
218: PetscPrintf(((PetscObject)tao)->comm,"TAO solve did not converge due to %s iteration %D\n",TaoConvergedReasons[tao->reason],tao->niter);
219: }
220: }
221: return(0);
222: }
224: /*@
225: TaoSetUp - Sets up the internal data structures for the later use
226: of a Tao solver
228: Collective on tao
230: Input Parameters:
231: . tao - the TAO context
233: Notes:
234: The user will not need to explicitly call TaoSetUp(), as it will
235: automatically be called in TaoSolve(). However, if the user
236: desires to call it explicitly, it should come after TaoCreate()
237: and any TaoSetSomething() routines, but before TaoSolve().
239: Level: advanced
241: .seealso: TaoCreate(), TaoSolve()
242: @*/
243: PetscErrorCode TaoSetUp(Tao tao)
244: {
249: if (tao->setupcalled) return(0);
251: if (!tao->solution) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Must call TaoSetInitialVector");
252: if (tao->ops->setup) {
253: (*tao->ops->setup)(tao);
254: }
255: tao->setupcalled = PETSC_TRUE;
256: return(0);
257: }
259: /*@
260: TaoDestroy - Destroys the TAO context that was created with
261: TaoCreate()
263: Collective on Tao
265: Input Parameter:
266: . tao - the Tao context
268: Level: beginner
270: .seealso: TaoCreate(), TaoSolve()
271: @*/
272: PetscErrorCode TaoDestroy(Tao *tao)
273: {
277: if (!*tao) return(0);
279: if (--((PetscObject)*tao)->refct > 0) {*tao=0;return(0);}
281: if ((*tao)->ops->destroy) {
282: (*((*tao))->ops->destroy)(*tao);
283: }
284: KSPDestroy(&(*tao)->ksp);
285: TaoLineSearchDestroy(&(*tao)->linesearch);
287: if ((*tao)->ops->convergencedestroy) {
288: (*(*tao)->ops->convergencedestroy)((*tao)->cnvP);
289: if ((*tao)->jacobian_state_inv) {
290: MatDestroy(&(*tao)->jacobian_state_inv);
291: }
292: }
293: VecDestroy(&(*tao)->solution);
294: VecDestroy(&(*tao)->gradient);
296: if ((*tao)->gradient_norm) {
297: PetscObjectDereference((PetscObject)(*tao)->gradient_norm);
298: VecDestroy(&(*tao)->gradient_norm_tmp);
299: }
301: VecDestroy(&(*tao)->XL);
302: VecDestroy(&(*tao)->XU);
303: VecDestroy(&(*tao)->IL);
304: VecDestroy(&(*tao)->IU);
305: VecDestroy(&(*tao)->DE);
306: VecDestroy(&(*tao)->DI);
307: VecDestroy(&(*tao)->constraints_equality);
308: VecDestroy(&(*tao)->constraints_inequality);
309: VecDestroy(&(*tao)->stepdirection);
310: MatDestroy(&(*tao)->hessian_pre);
311: MatDestroy(&(*tao)->hessian);
312: MatDestroy(&(*tao)->jacobian_pre);
313: MatDestroy(&(*tao)->jacobian);
314: MatDestroy(&(*tao)->jacobian_state_pre);
315: MatDestroy(&(*tao)->jacobian_state);
316: MatDestroy(&(*tao)->jacobian_state_inv);
317: MatDestroy(&(*tao)->jacobian_design);
318: MatDestroy(&(*tao)->jacobian_equality);
319: MatDestroy(&(*tao)->jacobian_equality_pre);
320: MatDestroy(&(*tao)->jacobian_inequality);
321: MatDestroy(&(*tao)->jacobian_inequality_pre);
322: ISDestroy(&(*tao)->state_is);
323: ISDestroy(&(*tao)->design_is);
324: VecDestroy(&(*tao)->sep_weights_v);
325: TaoCancelMonitors(*tao);
326: if ((*tao)->hist_malloc) {
327: PetscFree((*tao)->hist_obj);
328: PetscFree((*tao)->hist_resid);
329: PetscFree((*tao)->hist_cnorm);
330: PetscFree((*tao)->hist_lits);
331: }
332: if ((*tao)->sep_weights_n) {
333: PetscFree((*tao)->sep_weights_rows);
334: PetscFree((*tao)->sep_weights_cols);
335: PetscFree((*tao)->sep_weights_w);
336: }
337: PetscHeaderDestroy(tao);
338: return(0);
339: }
341: /*@
342: TaoSetFromOptions - Sets various Tao parameters from user
343: options.
345: Collective on Tao
347: Input Paremeter:
348: . tao - the Tao solver context
350: options Database Keys:
351: + -tao_type <type> - The algorithm that TAO uses (lmvm, nls, etc.)
352: . -tao_gatol <gatol> - absolute error tolerance for ||gradient||
353: . -tao_grtol <grtol> - relative error tolerance for ||gradient||
354: . -tao_gttol <gttol> - reduction of ||gradient|| relative to initial gradient
355: . -tao_max_it <max> - sets maximum number of iterations
356: . -tao_max_funcs <max> - sets maximum number of function evaluations
357: . -tao_fmin <fmin> - stop if function value reaches fmin
358: . -tao_steptol <tol> - stop if trust region radius less than <tol>
359: . -tao_trust0 <t> - initial trust region radius
360: . -tao_monitor - prints function value and residual at each iteration
361: . -tao_smonitor - same as tao_monitor, but truncates very small values
362: . -tao_cmonitor - prints function value, residual, and constraint norm at each iteration
363: . -tao_view_solution - prints solution vector at each iteration
364: . -tao_view_separableobjective - prints separable objective vector at each iteration
365: . -tao_view_step - prints step direction vector at each iteration
366: . -tao_view_gradient - prints gradient vector at each iteration
367: . -tao_draw_solution - graphically view solution vector at each iteration
368: . -tao_draw_step - graphically view step vector at each iteration
369: . -tao_draw_gradient - graphically view gradient at each iteration
370: . -tao_fd_gradient - use gradient computed with finite differences
371: . -tao_fd_hessian - use hessian computed with finite differences
372: . -tao_mf_hessian - use matrix-free hessian computed with finite differences
373: . -tao_cancelmonitors - cancels all monitors (except those set with command line)
374: . -tao_view - prints information about the Tao after solving
375: - -tao_converged_reason - prints the reason TAO stopped iterating
377: Notes:
378: To see all options, run your program with the -help option or consult the
379: user's manual. Should be called after TaoCreate() but before TaoSolve()
381: Level: beginner
382: @*/
383: PetscErrorCode TaoSetFromOptions(Tao tao)
384: {
386: const TaoType default_type = TAOLMVM;
387: char type[256], monfilename[PETSC_MAX_PATH_LEN];
388: PetscViewer monviewer;
389: PetscBool flg;
390: MPI_Comm comm;
394: PetscObjectGetComm((PetscObject)tao,&comm);
396: /* So no warnings are given about unused options */
397: PetscOptionsHasName(((PetscObject)tao)->options,((PetscObject)tao)->prefix,"-tao_ls_type",&flg);
399: PetscObjectOptionsBegin((PetscObject)tao);
400: {
401: TaoRegisterAll();
402: if (((PetscObject)tao)->type_name) {
403: default_type = ((PetscObject)tao)->type_name;
404: }
405: /* Check for type from options */
406: PetscOptionsFList("-tao_type","Tao Solver type","TaoSetType",TaoList,default_type,type,256,&flg);
407: if (flg) {
408: TaoSetType(tao,type);
409: } else if (!((PetscObject)tao)->type_name) {
410: TaoSetType(tao,default_type);
411: }
413: PetscOptionsReal("-tao_catol","Stop if constraints violations within","TaoSetConstraintTolerances",tao->catol,&tao->catol,&flg);
414: if (flg) tao->catol_changed=PETSC_TRUE;
415: PetscOptionsReal("-tao_crtol","Stop if relative contraint violations within","TaoSetConstraintTolerances",tao->crtol,&tao->crtol,&flg);
416: if (flg) tao->crtol_changed=PETSC_TRUE;
417: PetscOptionsReal("-tao_gatol","Stop if norm of gradient less than","TaoSetTolerances",tao->gatol,&tao->gatol,&flg);
418: if (flg) tao->gatol_changed=PETSC_TRUE;
419: PetscOptionsReal("-tao_grtol","Stop if norm of gradient divided by the function value is less than","TaoSetTolerances",tao->grtol,&tao->grtol,&flg);
420: if (flg) tao->grtol_changed=PETSC_TRUE;
421: PetscOptionsReal("-tao_gttol","Stop if the norm of the gradient is less than the norm of the initial gradient times tol","TaoSetTolerances",tao->gttol,&tao->gttol,&flg);
422: if (flg) tao->gttol_changed=PETSC_TRUE;
423: PetscOptionsInt("-tao_max_it","Stop if iteration number exceeds","TaoSetMaximumIterations",tao->max_it,&tao->max_it,&flg);
424: if (flg) tao->max_it_changed=PETSC_TRUE;
425: PetscOptionsInt("-tao_max_funcs","Stop if number of function evaluations exceeds","TaoSetMaximumFunctionEvaluations",tao->max_funcs,&tao->max_funcs,&flg);
426: if (flg) tao->max_funcs_changed=PETSC_TRUE;
427: PetscOptionsReal("-tao_fmin","Stop if function less than","TaoSetFunctionLowerBound",tao->fmin,&tao->fmin,&flg);
428: if (flg) tao->fmin_changed=PETSC_TRUE;
429: PetscOptionsReal("-tao_steptol","Stop if step size or trust region radius less than","",tao->steptol,&tao->steptol,&flg);
430: if (flg) tao->steptol_changed=PETSC_TRUE;
431: PetscOptionsReal("-tao_trust0","Initial trust region radius","TaoSetTrustRegionRadius",tao->trust0,&tao->trust0,&flg);
432: if (flg) tao->trust0_changed=PETSC_TRUE;
433: PetscOptionsString("-tao_view_solution","view solution vector after each evaluation","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
434: if (flg) {
435: PetscViewerASCIIOpen(comm,monfilename,&monviewer);
436: TaoSetMonitor(tao,TaoSolutionMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
437: }
439: PetscOptionsBool("-tao_converged_reason","Print reason for TAO converged","TaoSolve",tao->printreason,&tao->printreason,NULL);
440: PetscOptionsString("-tao_view_gradient","view gradient vector after each evaluation","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
441: if (flg) {
442: PetscViewerASCIIOpen(comm,monfilename,&monviewer);
443: TaoSetMonitor(tao,TaoGradientMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
444: }
446: PetscOptionsString("-tao_view_stepdirection","view step direction vector after each iteration","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
447: if (flg) {
448: PetscViewerASCIIOpen(comm,monfilename,&monviewer);
449: TaoSetMonitor(tao,TaoStepDirectionMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
450: }
452: PetscOptionsString("-tao_view_separableobjective","view separable objective vector after each evaluation","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
453: if (flg) {
454: PetscViewerASCIIOpen(comm,monfilename,&monviewer);
455: TaoSetMonitor(tao,TaoSeparableObjectiveMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
456: }
458: PetscOptionsString("-tao_monitor","Use the default convergence monitor","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
459: if (flg) {
460: PetscViewerASCIIOpen(comm,monfilename,&monviewer);
461: TaoSetMonitor(tao,TaoMonitorDefault,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
462: }
464: PetscOptionsString("-tao_smonitor","Use the short convergence monitor","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
465: if (flg) {
466: PetscViewerASCIIOpen(comm,monfilename,&monviewer);
467: TaoSetMonitor(tao,TaoDefaultSMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
468: }
470: PetscOptionsString("-tao_cmonitor","Use the default convergence monitor with constraint norm","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);
471: if (flg) {
472: PetscViewerASCIIOpen(comm,monfilename,&monviewer);
473: TaoSetMonitor(tao,TaoDefaultCMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);
474: }
477: flg = PETSC_FALSE;
478: PetscOptionsBool("-tao_cancelmonitors","cancel all monitors and call any registered destroy routines","TaoCancelMonitors",flg,&flg,NULL);
479: if (flg) {TaoCancelMonitors(tao);}
481: flg = PETSC_FALSE;
482: PetscOptionsBool("-tao_draw_solution","Plot solution vector at each iteration","TaoSetMonitor",flg,&flg,NULL);
483: if (flg) {
484: TaoMonitorDrawCtx drawctx;
485: PetscInt howoften = 1;
486: TaoMonitorDrawCtxCreate(PetscObjectComm((PetscObject)tao),0,0,PETSC_DECIDE,PETSC_DECIDE,300,300,howoften,&drawctx);
487: TaoSetMonitor(tao,TaoDrawSolutionMonitor,drawctx,(PetscErrorCode (*)(void**))TaoMonitorDrawCtxDestroy);
488: }
490: flg = PETSC_FALSE;
491: PetscOptionsBool("-tao_draw_step","plots step direction at each iteration","TaoSetMonitor",flg,&flg,NULL);
492: if (flg) {
493: TaoSetMonitor(tao,TaoDrawStepMonitor,NULL,NULL);
494: }
496: flg = PETSC_FALSE;
497: PetscOptionsBool("-tao_draw_gradient","plots gradient at each iteration","TaoSetMonitor",flg,&flg,NULL);
498: if (flg) {
499: TaoMonitorDrawCtx drawctx;
500: PetscInt howoften = 1;
501: TaoMonitorDrawCtxCreate(PetscObjectComm((PetscObject)tao),0,0,PETSC_DECIDE,PETSC_DECIDE,300,300,howoften,&drawctx);
502: TaoSetMonitor(tao,TaoDrawGradientMonitor,drawctx,(PetscErrorCode (*)(void**))TaoMonitorDrawCtxDestroy);
503: }
504: flg = PETSC_FALSE;
505: PetscOptionsBool("-tao_fd_gradient","compute gradient using finite differences","TaoDefaultComputeGradient",flg,&flg,NULL);
506: if (flg) {
507: TaoSetGradientRoutine(tao,TaoDefaultComputeGradient,NULL);
508: }
509: flg = PETSC_FALSE;
510: PetscOptionsBool("-tao_fd_hessian","compute hessian using finite differences","TaoDefaultComputeHessian",flg,&flg,NULL);
511: if (flg) {
512: Mat H;
514: MatCreate(PetscObjectComm((PetscObject)tao),&H);
515: MatSetType(H,MATAIJ);
516: TaoSetHessianRoutine(tao,H,H,TaoDefaultComputeHessian,NULL);
517: MatDestroy(&H);
518: }
519: flg = PETSC_FALSE;
520: PetscOptionsBool("-tao_mf_hessian","compute matrix-free hessian using finite differences","TaoDefaultComputeHessianMFFD",flg,&flg,NULL);
521: if (flg) {
522: Mat H;
524: MatCreate(PetscObjectComm((PetscObject)tao),&H);
525: TaoSetHessianRoutine(tao,H,H,TaoDefaultComputeHessianMFFD,NULL);
526: MatDestroy(&H);
527: }
528: PetscOptionsEnum("-tao_subset_type","subset type","",TaoSubSetTypes,(PetscEnum)tao->subset_type,(PetscEnum*)&tao->subset_type,NULL);
530: if (tao->ops->setfromoptions) {
531: (*tao->ops->setfromoptions)(PetscOptionsObject,tao);
532: }
533: }
534: PetscOptionsEnd();
535: return(0);
536: }
538: /*@C
539: TaoView - Prints information about the Tao
541: Collective on Tao
543: InputParameters:
544: + tao - the Tao context
545: - viewer - visualization context
547: Options Database Key:
548: . -tao_view - Calls TaoView() at the end of TaoSolve()
550: Notes:
551: The available visualization contexts include
552: + PETSC_VIEWER_STDOUT_SELF - standard output (default)
553: - PETSC_VIEWER_STDOUT_WORLD - synchronized standard
554: output where only the first processor opens
555: the file. All other processors send their
556: data to the first processor to print.
558: Level: beginner
560: .seealso: PetscViewerASCIIOpen()
561: @*/
562: PetscErrorCode TaoView(Tao tao, PetscViewer viewer)
563: {
564: PetscErrorCode ierr;
565: PetscBool isascii,isstring;
566: const TaoType type;
570: if (!viewer) {
571: PetscViewerASCIIGetStdout(((PetscObject)tao)->comm,&viewer);
572: }
576: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
577: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
578: if (isascii) {
579: PetscObjectPrintClassNamePrefixType((PetscObject)tao,viewer);
581: if (tao->ops->view) {
582: PetscViewerASCIIPushTab(viewer);
583: (*tao->ops->view)(tao,viewer);
584: PetscViewerASCIIPopTab(viewer);
585: }
586: if (tao->linesearch) {
587: PetscViewerASCIIPushTab(viewer);
588: TaoLineSearchView(tao->linesearch,viewer);
589: PetscViewerASCIIPopTab(viewer);
590: }
591: if (tao->ksp) {
592: PetscViewerASCIIPushTab(viewer);
593: KSPView(tao->ksp,viewer);
594: PetscViewerASCIIPrintf(viewer,"total KSP iterations: %D\n",tao->ksp_tot_its);
595: PetscViewerASCIIPopTab(viewer);
596: }
598: PetscViewerASCIIPushTab(viewer);
600: if (tao->XL || tao->XU) {
601: PetscViewerASCIIPrintf(viewer,"Active Set subset type: %s\n",TaoSubSetTypes[tao->subset_type]);
602: }
604: PetscViewerASCIIPrintf(viewer,"convergence tolerances: gatol=%g,",(double)tao->gatol);
605: PetscViewerASCIIPrintf(viewer," steptol=%g,",(double)tao->steptol);
606: PetscViewerASCIIPrintf(viewer," gttol=%g\n",(double)tao->gttol);
607: PetscViewerASCIIPrintf(viewer,"Residual in Function/Gradient:=%g\n",(double)tao->residual);
609: if (tao->cnorm>0 || tao->catol>0 || tao->crtol>0){
610: ierr=PetscViewerASCIIPrintf(viewer,"convergence tolerances:");
611: ierr=PetscViewerASCIIPrintf(viewer," catol=%g,",(double)tao->catol);
612: ierr=PetscViewerASCIIPrintf(viewer," crtol=%g\n",(double)tao->crtol);
613: PetscViewerASCIIPrintf(viewer,"Residual in Constraints:=%g\n",(double)tao->cnorm);
614: }
616: if (tao->trust < tao->steptol){
617: ierr=PetscViewerASCIIPrintf(viewer,"convergence tolerances: steptol=%g\n",(double)tao->steptol);
618: ierr=PetscViewerASCIIPrintf(viewer,"Final trust region radius:=%g\n",(double)tao->trust);
619: }
621: if (tao->fmin>-1.e25){
622: ierr=PetscViewerASCIIPrintf(viewer,"convergence tolerances: function minimum=%g\n",(double)tao->fmin);
623: }
624: PetscViewerASCIIPrintf(viewer,"Objective value=%g\n",(double)tao->fc);
626: PetscViewerASCIIPrintf(viewer,"total number of iterations=%D, ",tao->niter);
627: PetscViewerASCIIPrintf(viewer," (max: %D)\n",tao->max_it);
629: if (tao->nfuncs>0){
630: PetscViewerASCIIPrintf(viewer,"total number of function evaluations=%D,",tao->nfuncs);
631: PetscViewerASCIIPrintf(viewer," max: %D\n",tao->max_funcs);
632: }
633: if (tao->ngrads>0){
634: PetscViewerASCIIPrintf(viewer,"total number of gradient evaluations=%D,",tao->ngrads);
635: PetscViewerASCIIPrintf(viewer," max: %D\n",tao->max_funcs);
636: }
637: if (tao->nfuncgrads>0){
638: PetscViewerASCIIPrintf(viewer,"total number of function/gradient evaluations=%D,",tao->nfuncgrads);
639: PetscViewerASCIIPrintf(viewer," (max: %D)\n",tao->max_funcs);
640: }
641: if (tao->nhess>0){
642: PetscViewerASCIIPrintf(viewer,"total number of Hessian evaluations=%D\n",tao->nhess);
643: }
644: /* if (tao->linear_its>0){
645: PetscViewerASCIIPrintf(viewer," total Krylov method iterations=%D\n",tao->linear_its);
646: }*/
647: if (tao->nconstraints>0){
648: PetscViewerASCIIPrintf(viewer,"total number of constraint function evaluations=%D\n",tao->nconstraints);
649: }
650: if (tao->njac>0){
651: PetscViewerASCIIPrintf(viewer,"total number of Jacobian evaluations=%D\n",tao->njac);
652: }
654: if (tao->reason>0){
655: PetscViewerASCIIPrintf(viewer, "Solution converged: ");
656: switch (tao->reason) {
657: case TAO_CONVERGED_GATOL:
658: PetscViewerASCIIPrintf(viewer," ||g(X)|| <= gatol\n");
659: break;
660: case TAO_CONVERGED_GRTOL:
661: PetscViewerASCIIPrintf(viewer," ||g(X)||/|f(X)| <= grtol\n");
662: break;
663: case TAO_CONVERGED_GTTOL:
664: PetscViewerASCIIPrintf(viewer," ||g(X)||/||g(X0)|| <= gttol\n");
665: break;
666: case TAO_CONVERGED_STEPTOL:
667: PetscViewerASCIIPrintf(viewer," Steptol -- step size small\n");
668: break;
669: case TAO_CONVERGED_MINF:
670: PetscViewerASCIIPrintf(viewer," Minf -- f < fmin\n");
671: break;
672: case TAO_CONVERGED_USER:
673: PetscViewerASCIIPrintf(viewer," User Terminated\n");
674: break;
675: default:
676: PetscViewerASCIIPrintf(viewer,"\n");
677: break;
678: }
680: } else {
681: PetscViewerASCIIPrintf(viewer,"Solver terminated: %d",tao->reason);
682: switch (tao->reason) {
683: case TAO_DIVERGED_MAXITS:
684: PetscViewerASCIIPrintf(viewer," Maximum Iterations\n");
685: break;
686: case TAO_DIVERGED_NAN:
687: PetscViewerASCIIPrintf(viewer," NAN or Inf encountered\n");
688: break;
689: case TAO_DIVERGED_MAXFCN:
690: PetscViewerASCIIPrintf(viewer," Maximum Function Evaluations\n");
691: break;
692: case TAO_DIVERGED_LS_FAILURE:
693: PetscViewerASCIIPrintf(viewer," Line Search Failure\n");
694: break;
695: case TAO_DIVERGED_TR_REDUCTION:
696: PetscViewerASCIIPrintf(viewer," Trust Region too small\n");
697: break;
698: case TAO_DIVERGED_USER:
699: PetscViewerASCIIPrintf(viewer," User Terminated\n");
700: break;
701: default:
702: PetscViewerASCIIPrintf(viewer,"\n");
703: break;
704: }
705: }
706: PetscViewerASCIIPopTab(viewer);
707: } else if (isstring) {
708: TaoGetType(tao,&type);
709: PetscViewerStringSPrintf(viewer," %-3.3s",type);
710: }
711: return(0);
712: }
714: /*@
715: TaoSetTolerances - Sets parameters used in TAO convergence tests
717: Logically collective on Tao
719: Input Parameters:
720: + tao - the Tao context
721: . gatol - stop if norm of gradient is less than this
722: . grtol - stop if relative norm of gradient is less than this
723: - gttol - stop if norm of gradient is reduced by this factor
725: Options Database Keys:
726: + -tao_gatol <gatol> - Sets gatol
727: . -tao_grtol <grtol> - Sets grtol
728: - -tao_gttol <gttol> - Sets gttol
730: Stopping Criteria:
731: $ ||g(X)|| <= gatol
732: $ ||g(X)|| / |f(X)| <= grtol
733: $ ||g(X)|| / ||g(X0)|| <= gttol
735: Notes:
736: Use PETSC_DEFAULT to leave one or more tolerances unchanged.
738: Level: beginner
740: .seealso: TaoGetTolerances()
742: @*/
743: PetscErrorCode TaoSetTolerances(Tao tao, PetscReal gatol, PetscReal grtol, PetscReal gttol)
744: {
750: if (gatol != PETSC_DEFAULT) {
751: if (gatol<0) {
752: PetscInfo(tao,"Tried to set negative gatol -- ignored.\n");
753: } else {
754: tao->gatol = PetscMax(0,gatol);
755: tao->gatol_changed=PETSC_TRUE;
756: }
757: }
759: if (grtol != PETSC_DEFAULT) {
760: if (grtol<0) {
761: PetscInfo(tao,"Tried to set negative grtol -- ignored.\n");
762: } else {
763: tao->grtol = PetscMax(0,grtol);
764: tao->grtol_changed=PETSC_TRUE;
765: }
766: }
768: if (gttol != PETSC_DEFAULT) {
769: if (gttol<0) {
770: PetscInfo(tao,"Tried to set negative gttol -- ignored.\n");
771: } else {
772: tao->gttol = PetscMax(0,gttol);
773: tao->gttol_changed=PETSC_TRUE;
774: }
775: }
776: return(0);
777: }
779: /*@
780: TaoSetConstraintTolerances - Sets constraint tolerance parameters used in TAO convergence tests
782: Logically collective on Tao
784: Input Parameters:
785: + tao - the Tao context
786: . catol - absolute constraint tolerance, constraint norm must be less than catol for used for gatol convergence criteria
787: - crtol - relative contraint tolerance, constraint norm must be less than crtol for used for gatol, gttol convergence criteria
789: Options Database Keys:
790: + -tao_catol <catol> - Sets catol
791: - -tao_crtol <crtol> - Sets crtol
793: Notes:
794: Use PETSC_DEFAULT to leave any tolerance unchanged.
796: Level: intermediate
798: .seealso: TaoGetTolerances(), TaoGetConstraintTolerances(), TaoSetTolerances()
800: @*/
801: PetscErrorCode TaoSetConstraintTolerances(Tao tao, PetscReal catol, PetscReal crtol)
802: {
808: if (catol != PETSC_DEFAULT) {
809: if (catol<0) {
810: PetscInfo(tao,"Tried to set negative catol -- ignored.\n");
811: } else {
812: tao->catol = PetscMax(0,catol);
813: tao->catol_changed=PETSC_TRUE;
814: }
815: }
817: if (crtol != PETSC_DEFAULT) {
818: if (crtol<0) {
819: PetscInfo(tao,"Tried to set negative crtol -- ignored.\n");
820: } else {
821: tao->crtol = PetscMax(0,crtol);
822: tao->crtol_changed=PETSC_TRUE;
823: }
824: }
825: return(0);
826: }
828: /*@
829: TaoGetConstraintTolerances - Gets constraint tolerance parameters used in TAO convergence tests
831: Not ollective
833: Input Parameter:
834: . tao - the Tao context
836: Output Parameter:
837: + catol - absolute constraint tolerance, constraint norm must be less than catol for used for gatol convergence criteria
838: - crtol - relative contraint tolerance, constraint norm must be less than crtol for used for gatol, gttol convergence criteria
840: Level: intermediate
842: .seealso: TaoGetTolerances(), TaoSetTolerances(), TaoSetConstraintTolerances()
844: @*/
845: PetscErrorCode TaoGetConstraintTolerances(Tao tao, PetscReal *catol, PetscReal *crtol)
846: {
849: if (catol) *catol = tao->catol;
850: if (crtol) *crtol = tao->crtol;
851: return(0);
852: }
854: /*@
855: TaoSetFunctionLowerBound - Sets a bound on the solution objective value.
856: When an approximate solution with an objective value below this number
857: has been found, the solver will terminate.
859: Logically Collective on Tao
861: Input Parameters:
862: + tao - the Tao solver context
863: - fmin - the tolerance
865: Options Database Keys:
866: . -tao_fmin <fmin> - sets the minimum function value
868: Level: intermediate
870: .seealso: TaoSetTolerances()
871: @*/
872: PetscErrorCode TaoSetFunctionLowerBound(Tao tao,PetscReal fmin)
873: {
876: tao->fmin = fmin;
877: tao->fmin_changed=PETSC_TRUE;
878: return(0);
879: }
881: /*@
882: TaoGetFunctionLowerBound - Gets the bound on the solution objective value.
883: When an approximate solution with an objective value below this number
884: has been found, the solver will terminate.
886: Not collective on Tao
888: Input Parameters:
889: . tao - the Tao solver context
891: OutputParameters:
892: . fmin - the minimum function value
894: Level: intermediate
896: .seealso: TaoSetFunctionLowerBound()
897: @*/
898: PetscErrorCode TaoGetFunctionLowerBound(Tao tao,PetscReal *fmin)
899: {
902: *fmin = tao->fmin;
903: return(0);
904: }
906: /*@
907: TaoSetMaximumFunctionEvaluations - Sets a maximum number of
908: function evaluations.
910: Logically Collective on Tao
912: Input Parameters:
913: + tao - the Tao solver context
914: - nfcn - the maximum number of function evaluations (>=0)
916: Options Database Keys:
917: . -tao_max_funcs <nfcn> - sets the maximum number of function evaluations
919: Level: intermediate
921: .seealso: TaoSetTolerances(), TaoSetMaximumIterations()
922: @*/
924: PetscErrorCode TaoSetMaximumFunctionEvaluations(Tao tao,PetscInt nfcn)
925: {
928: tao->max_funcs = PetscMax(0,nfcn);
929: tao->max_funcs_changed=PETSC_TRUE;
930: return(0);
931: }
933: /*@
934: TaoGetMaximumFunctionEvaluations - Sets a maximum number of
935: function evaluations.
937: Not Collective
939: Input Parameters:
940: . tao - the Tao solver context
942: Output Parameters:
943: . nfcn - the maximum number of function evaluations
945: Level: intermediate
947: .seealso: TaoSetMaximumFunctionEvaluations(), TaoGetMaximumIterations()
948: @*/
950: PetscErrorCode TaoGetMaximumFunctionEvaluations(Tao tao,PetscInt *nfcn)
951: {
954: *nfcn = tao->max_funcs;
955: return(0);
956: }
958: /*@
959: TaoGetCurrentFunctionEvaluations - Get current number of
960: function evaluations.
962: Not Collective
964: Input Parameters:
965: . tao - the Tao solver context
967: Output Parameters:
968: . nfuncs - the current number of function evaluations
970: Level: intermediate
972: .seealso: TaoSetMaximumFunctionEvaluations(), TaoGetMaximumFunctionEvaluations(), TaoGetMaximumIterations()
973: @*/
975: PetscErrorCode TaoGetCurrentFunctionEvaluations(Tao tao,PetscInt *nfuncs)
976: {
979: *nfuncs=PetscMax(tao->nfuncs,tao->nfuncgrads);
980: return(0);
981: }
983: /*@
984: TaoSetMaximumIterations - Sets a maximum number of iterates.
986: Logically Collective on Tao
988: Input Parameters:
989: + tao - the Tao solver context
990: - maxits - the maximum number of iterates (>=0)
992: Options Database Keys:
993: . -tao_max_it <its> - sets the maximum number of iterations
995: Level: intermediate
997: .seealso: TaoSetTolerances(), TaoSetMaximumFunctionEvaluations()
998: @*/
999: PetscErrorCode TaoSetMaximumIterations(Tao tao,PetscInt maxits)
1000: {
1003: tao->max_it = PetscMax(0,maxits);
1004: tao->max_it_changed=PETSC_TRUE;
1005: return(0);
1006: }
1008: /*@
1009: TaoGetMaximumIterations - Sets a maximum number of iterates.
1011: Not Collective
1013: Input Parameters:
1014: . tao - the Tao solver context
1016: Output Parameters:
1017: . maxits - the maximum number of iterates
1019: Level: intermediate
1021: .seealso: TaoSetMaximumIterations(), TaoGetMaximumFunctionEvaluations()
1022: @*/
1023: PetscErrorCode TaoGetMaximumIterations(Tao tao,PetscInt *maxits)
1024: {
1027: *maxits = tao->max_it;
1028: return(0);
1029: }
1031: /*@
1032: TaoSetInitialTrustRegionRadius - Sets the initial trust region radius.
1034: Logically collective on Tao
1036: Input Parameter:
1037: + tao - a TAO optimization solver
1038: - radius - the trust region radius
1040: Level: intermediate
1042: Options Database Key:
1043: . -tao_trust0 <t0> - sets initial trust region radius
1045: .seealso: TaoGetTrustRegionRadius(), TaoSetTrustRegionTolerance()
1046: @*/
1047: PetscErrorCode TaoSetInitialTrustRegionRadius(Tao tao, PetscReal radius)
1048: {
1051: tao->trust0 = PetscMax(0.0,radius);
1052: tao->trust0_changed=PETSC_TRUE;
1053: return(0);
1054: }
1056: /*@
1057: TaoGetInitialTrustRegionRadius - Sets the initial trust region radius.
1059: Not Collective
1061: Input Parameter:
1062: . tao - a TAO optimization solver
1064: Output Parameter:
1065: . radius - the trust region radius
1067: Level: intermediate
1069: .seealso: TaoSetInitialTrustRegionRadius(), TaoGetCurrentTrustRegionRadius()
1070: @*/
1071: PetscErrorCode TaoGetInitialTrustRegionRadius(Tao tao, PetscReal *radius)
1072: {
1075: *radius = tao->trust0;
1076: return(0);
1077: }
1079: /*@
1080: TaoGetCurrentTrustRegionRadius - Gets the current trust region radius.
1082: Not Collective
1084: Input Parameter:
1085: . tao - a TAO optimization solver
1087: Output Parameter:
1088: . radius - the trust region radius
1090: Level: intermediate
1092: .seealso: TaoSetInitialTrustRegionRadius(), TaoGetInitialTrustRegionRadius()
1093: @*/
1094: PetscErrorCode TaoGetCurrentTrustRegionRadius(Tao tao, PetscReal *radius)
1095: {
1098: *radius = tao->trust;
1099: return(0);
1100: }
1102: /*@
1103: TaoGetTolerances - gets the current values of tolerances
1105: Not Collective
1107: Input Parameters:
1108: . tao - the Tao context
1110: Output Parameters:
1111: + gatol - stop if norm of gradient is less than this
1112: . grtol - stop if relative norm of gradient is less than this
1113: - gttol - stop if norm of gradient is reduced by a this factor
1115: Note: NULL can be used as an argument if not all tolerances values are needed
1117: .seealso TaoSetTolerances()
1119: Level: intermediate
1120: @*/
1121: PetscErrorCode TaoGetTolerances(Tao tao, PetscReal *gatol, PetscReal *grtol, PetscReal *gttol)
1122: {
1125: if (gatol) *gatol=tao->gatol;
1126: if (grtol) *grtol=tao->grtol;
1127: if (gttol) *gttol=tao->gttol;
1128: return(0);
1129: }
1131: /*@
1132: TaoGetKSP - Gets the linear solver used by the optimization solver.
1133: Application writers should use TaoGetKSP if they need direct access
1134: to the PETSc KSP object.
1136: Not Collective
1138: Input Parameters:
1139: . tao - the TAO solver
1141: Output Parameters:
1142: . ksp - the KSP linear solver used in the optimization solver
1144: Level: intermediate
1146: @*/
1147: PetscErrorCode TaoGetKSP(Tao tao, KSP *ksp)
1148: {
1150: *ksp = tao->ksp;
1151: return(0);
1152: }
1154: /*@
1155: TaoGetLinearSolveIterations - Gets the total number of linear iterations
1156: used by the TAO solver
1158: Not Collective
1160: Input Parameter:
1161: . tao - TAO context
1163: Output Parameter:
1164: . lits - number of linear iterations
1166: Notes:
1167: This counter is reset to zero for each successive call to TaoSolve()
1169: Level: intermediate
1171: .keywords: TAO
1173: .seealso: TaoGetKSP()
1174: @*/
1175: PetscErrorCode TaoGetLinearSolveIterations(Tao tao,PetscInt *lits)
1176: {
1180: *lits = tao->ksp_tot_its;
1181: return(0);
1182: }
1184: /*@
1185: TaoGetLineSearch - Gets the line search used by the optimization solver.
1186: Application writers should use TaoGetLineSearch if they need direct access
1187: to the TaoLineSearch object.
1189: Not Collective
1191: Input Parameters:
1192: . tao - the TAO solver
1194: Output Parameters:
1195: . ls - the line search used in the optimization solver
1197: Level: intermediate
1199: @*/
1200: PetscErrorCode TaoGetLineSearch(Tao tao, TaoLineSearch *ls)
1201: {
1203: *ls = tao->linesearch;
1204: return(0);
1205: }
1207: /*@
1208: TaoAddLineSearchCounts - Adds the number of function evaluations spent
1209: in the line search to the running total.
1211: Input Parameters:
1212: + tao - the TAO solver
1213: - ls - the line search used in the optimization solver
1215: Level: developer
1217: .seealso: TaoLineSearchApply()
1218: @*/
1219: PetscErrorCode TaoAddLineSearchCounts(Tao tao)
1220: {
1222: PetscBool flg;
1223: PetscInt nfeval,ngeval,nfgeval;
1227: if (tao->linesearch) {
1228: TaoLineSearchIsUsingTaoRoutines(tao->linesearch,&flg);
1229: if (!flg) {
1230: TaoLineSearchGetNumberFunctionEvaluations(tao->linesearch,&nfeval,&ngeval,&nfgeval);
1231: tao->nfuncs+=nfeval;
1232: tao->ngrads+=ngeval;
1233: tao->nfuncgrads+=nfgeval;
1234: }
1235: }
1236: return(0);
1237: }
1239: /*@
1240: TaoGetSolutionVector - Returns the vector with the current TAO solution
1242: Not Collective
1244: Input Parameter:
1245: . tao - the Tao context
1247: Output Parameter:
1248: . X - the current solution
1250: Level: intermediate
1252: Note: The returned vector will be the same object that was passed into TaoSetInitialVector()
1253: @*/
1254: PetscErrorCode TaoGetSolutionVector(Tao tao, Vec *X)
1255: {
1258: *X = tao->solution;
1259: return(0);
1260: }
1262: /*@
1263: TaoGetGradientVector - Returns the vector with the current TAO gradient
1265: Not Collective
1267: Input Parameter:
1268: . tao - the Tao context
1270: Output Parameter:
1271: . G - the current solution
1273: Level: intermediate
1274: @*/
1275: PetscErrorCode TaoGetGradientVector(Tao tao, Vec *G)
1276: {
1279: *G = tao->gradient;
1280: return(0);
1281: }
1283: /*@
1284: TaoResetStatistics - Initialize the statistics used by TAO for all of the solvers.
1285: These statistics include the iteration number, residual norms, and convergence status.
1286: This routine gets called before solving each optimization problem.
1288: Collective on Tao
1290: Input Parameters:
1291: . solver - the Tao context
1293: Level: developer
1295: .seealso: TaoCreate(), TaoSolve()
1296: @*/
1297: PetscErrorCode TaoResetStatistics(Tao tao)
1298: {
1301: tao->niter = 0;
1302: tao->nfuncs = 0;
1303: tao->nfuncgrads = 0;
1304: tao->ngrads = 0;
1305: tao->nhess = 0;
1306: tao->njac = 0;
1307: tao->nconstraints = 0;
1308: tao->ksp_its = 0;
1309: tao->ksp_tot_its = 0;
1310: tao->reason = TAO_CONTINUE_ITERATING;
1311: tao->residual = 0.0;
1312: tao->cnorm = 0.0;
1313: tao->step = 0.0;
1314: tao->lsflag = PETSC_FALSE;
1315: if (tao->hist_reset) tao->hist_len=0;
1316: return(0);
1317: }
1319: /*@C
1320: TaoSetConvergenceTest - Sets the function that is to be used to test
1321: for convergence o fthe iterative minimization solution. The new convergence
1322: testing routine will replace TAO's default convergence test.
1324: Logically Collective on Tao
1326: Input Parameters:
1327: + tao - the Tao object
1328: . conv - the routine to test for convergence
1329: - ctx - [optional] context for private data for the convergence routine
1330: (may be NULL)
1332: Calling sequence of conv:
1333: $ PetscErrorCode conv(Tao tao, void *ctx)
1335: + tao - the Tao object
1336: - ctx - [optional] convergence context
1338: Note: The new convergence testing routine should call TaoSetConvergedReason().
1340: Level: advanced
1342: .seealso: TaoSetConvergedReason(), TaoGetSolutionStatus(), TaoGetTolerances(), TaoSetMonitor
1344: @*/
1345: PetscErrorCode TaoSetConvergenceTest(Tao tao, PetscErrorCode (*conv)(Tao,void*), void *ctx)
1346: {
1349: (tao)->ops->convergencetest = conv;
1350: (tao)->cnvP = ctx;
1351: return(0);
1352: }
1354: /*@C
1355: TaoSetMonitor - Sets an ADDITIONAL function that is to be used at every
1356: iteration of the solver to display the iteration's
1357: progress.
1359: Logically Collective on Tao
1361: Input Parameters:
1362: + tao - the Tao solver context
1363: . mymonitor - monitoring routine
1364: - mctx - [optional] user-defined context for private data for the
1365: monitor routine (may be NULL)
1367: Calling sequence of mymonitor:
1368: $ int mymonitor(Tao tao,void *mctx)
1370: + tao - the Tao solver context
1371: - mctx - [optional] monitoring context
1374: Options Database Keys:
1375: + -tao_monitor - sets TaoMonitorDefault()
1376: . -tao_smonitor - sets short monitor
1377: . -tao_cmonitor - same as smonitor plus constraint norm
1378: . -tao_view_solution - view solution at each iteration
1379: . -tao_view_gradient - view gradient at each iteration
1380: . -tao_view_separableobjective - view separable objective function at each iteration
1381: - -tao_cancelmonitors - cancels all monitors that have been hardwired into a code by calls to TaoSetMonitor(), but does not cancel those set via the options database.
1384: Notes:
1385: Several different monitoring routines may be set by calling
1386: TaoSetMonitor() multiple times; all will be called in the
1387: order in which they were set.
1389: Fortran Notes: Only one monitor function may be set
1391: Level: intermediate
1393: .seealso: TaoMonitorDefault(), TaoCancelMonitors(), TaoSetDestroyRoutine()
1394: @*/
1395: PetscErrorCode TaoSetMonitor(Tao tao, PetscErrorCode (*func)(Tao, void*), void *ctx,PetscErrorCode (*dest)(void**))
1396: {
1398: PetscInt i;
1402: if (tao->numbermonitors >= MAXTAOMONITORS) SETERRQ1(PETSC_COMM_SELF,1,"Cannot attach another monitor -- max=",MAXTAOMONITORS);
1404: for (i=0; i<tao->numbermonitors;i++) {
1405: if (func == tao->monitor[i] && dest == tao->monitordestroy[i] && ctx == tao->monitorcontext[i]) {
1406: if (dest) {
1407: (*dest)(&ctx);
1408: }
1409: return(0);
1410: }
1411: }
1412: tao->monitor[tao->numbermonitors] = func;
1413: tao->monitorcontext[tao->numbermonitors] = ctx;
1414: tao->monitordestroy[tao->numbermonitors] = dest;
1415: ++tao->numbermonitors;
1416: return(0);
1417: }
1419: /*@
1420: TaoCancelMonitors - Clears all the monitor functions for a Tao object.
1422: Logically Collective on Tao
1424: Input Parameters:
1425: . tao - the Tao solver context
1427: Options Database:
1428: . -tao_cancelmonitors - cancels all monitors that have been hardwired
1429: into a code by calls to TaoSetMonitor(), but does not cancel those
1430: set via the options database
1432: Notes:
1433: There is no way to clear one specific monitor from a Tao object.
1435: Level: advanced
1437: .seealso: TaoMonitorDefault(), TaoSetMonitor()
1438: @*/
1439: PetscErrorCode TaoCancelMonitors(Tao tao)
1440: {
1441: PetscInt i;
1446: for (i=0;i<tao->numbermonitors;i++) {
1447: if (tao->monitordestroy[i]) {
1448: (*tao->monitordestroy[i])(&tao->monitorcontext[i]);
1449: }
1450: }
1451: tao->numbermonitors=0;
1452: return(0);
1453: }
1455: /*@
1456: TaoMonitorDefault - Default routine for monitoring progress of the
1457: Tao solvers (default). This monitor prints the function value and gradient
1458: norm at each iteration. It can be turned on from the command line using the
1459: -tao_monitor option
1461: Collective on Tao
1463: Input Parameters:
1464: + tao - the Tao context
1465: - ctx - PetscViewer context or NULL
1467: Options Database Keys:
1468: . -tao_monitor
1470: Level: advanced
1472: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1473: @*/
1474: PetscErrorCode TaoMonitorDefault(Tao tao, void *ctx)
1475: {
1477: PetscInt its, tabs;
1478: PetscReal fct,gnorm;
1479: PetscViewer viewer = (PetscViewer)ctx;
1483: its=tao->niter;
1484: fct=tao->fc;
1485: gnorm=tao->residual;
1486: PetscViewerASCIIGetTab(viewer, &tabs);
1487: PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel);
1488: if (its == 0 && ((PetscObject)tao)->prefix) {
1489: PetscViewerASCIIPrintf(viewer," Iteration information for %s solve.\n",((PetscObject)tao)->prefix);
1490: }
1491: ierr=PetscViewerASCIIPrintf(viewer,"%3D TAO,",its);
1492: ierr=PetscViewerASCIIPrintf(viewer," Function value: %g,",(double)fct);
1493: if (gnorm >= PETSC_INFINITY) {
1494: ierr=PetscViewerASCIIPrintf(viewer," Residual: Inf \n");
1495: } else {
1496: ierr=PetscViewerASCIIPrintf(viewer," Residual: %g \n",(double)gnorm);
1497: }
1498: PetscViewerASCIISetTab(viewer, tabs);
1499: return(0);
1500: }
1502: /*@
1503: TaoDefaultSMonitor - Default routine for monitoring progress of the
1504: solver. Same as TaoMonitorDefault() except
1505: it prints fewer digits of the residual as the residual gets smaller.
1506: This is because the later digits are meaningless and are often
1507: different on different machines; by using this routine different
1508: machines will usually generate the same output. It can be turned on
1509: by using the -tao_smonitor option
1511: Collective on Tao
1513: Input Parameters:
1514: + tao - the Tao context
1515: - ctx - PetscViewer context of type ASCII
1517: Options Database Keys:
1518: . -tao_smonitor
1520: Level: advanced
1522: .seealso: TaoMonitorDefault(), TaoSetMonitor()
1523: @*/
1524: PetscErrorCode TaoDefaultSMonitor(Tao tao, void *ctx)
1525: {
1527: PetscInt its;
1528: PetscReal fct,gnorm;
1529: PetscViewer viewer = (PetscViewer)ctx;
1533: its=tao->niter;
1534: fct=tao->fc;
1535: gnorm=tao->residual;
1536: ierr=PetscViewerASCIIPrintf(viewer,"iter = %3D,",its);
1537: ierr=PetscViewerASCIIPrintf(viewer," Function value %g,",(double)fct);
1538: if (gnorm >= PETSC_INFINITY) {
1539: ierr=PetscViewerASCIIPrintf(viewer," Residual: Inf \n");
1540: } else if (gnorm > 1.e-6) {
1541: ierr=PetscViewerASCIIPrintf(viewer," Residual: %g \n",(double)gnorm);
1542: } else if (gnorm > 1.e-11) {
1543: ierr=PetscViewerASCIIPrintf(viewer," Residual: < 1.0e-6 \n");
1544: } else {
1545: ierr=PetscViewerASCIIPrintf(viewer," Residual: < 1.0e-11 \n");
1546: }
1547: return(0);
1548: }
1550: /*@
1551: TaoDefaultCMonitor - same as TaoMonitorDefault() except
1552: it prints the norm of the constraints function. It can be turned on
1553: from the command line using the -tao_cmonitor option
1555: Collective on Tao
1557: Input Parameters:
1558: + tao - the Tao context
1559: - ctx - PetscViewer context or NULL
1561: Options Database Keys:
1562: . -tao_cmonitor
1564: Level: advanced
1566: .seealso: TaoMonitorDefault(), TaoSetMonitor()
1567: @*/
1568: PetscErrorCode TaoDefaultCMonitor(Tao tao, void *ctx)
1569: {
1571: PetscInt its;
1572: PetscReal fct,gnorm;
1573: PetscViewer viewer = (PetscViewer)ctx;
1577: its=tao->niter;
1578: fct=tao->fc;
1579: gnorm=tao->residual;
1580: ierr=PetscViewerASCIIPrintf(viewer,"iter = %D,",its);
1581: ierr=PetscViewerASCIIPrintf(viewer," Function value: %g,",(double)fct);
1582: ierr=PetscViewerASCIIPrintf(viewer," Residual: %g ",(double)gnorm);
1583: PetscViewerASCIIPrintf(viewer," Constraint: %g \n",(double)tao->cnorm);
1584: return(0);
1585: }
1587: /*@C
1588: TaoSolutionMonitor - Views the solution at each iteration
1589: It can be turned on from the command line using the
1590: -tao_view_solution option
1592: Collective on Tao
1594: Input Parameters:
1595: + tao - the Tao context
1596: - ctx - PetscViewer context or NULL
1598: Options Database Keys:
1599: . -tao_view_solution
1601: Level: advanced
1603: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1604: @*/
1605: PetscErrorCode TaoSolutionMonitor(Tao tao, void *ctx)
1606: {
1608: PetscViewer viewer = (PetscViewer)ctx;;
1612: VecView(tao->solution, viewer);
1613: return(0);
1614: }
1616: /*@C
1617: TaoGradientMonitor - Views the gradient at each iteration
1618: It can be turned on from the command line using the
1619: -tao_view_gradient option
1621: Collective on Tao
1623: Input Parameters:
1624: + tao - the Tao context
1625: - ctx - PetscViewer context or NULL
1627: Options Database Keys:
1628: . -tao_view_gradient
1630: Level: advanced
1632: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1633: @*/
1634: PetscErrorCode TaoGradientMonitor(Tao tao, void *ctx)
1635: {
1637: PetscViewer viewer = (PetscViewer)ctx;
1641: VecView(tao->gradient, viewer);
1642: return(0);
1643: }
1645: /*@C
1646: TaoStepDirectionMonitor - Views the gradient at each iteration
1647: It can be turned on from the command line using the
1648: -tao_view_gradient option
1650: Collective on Tao
1652: Input Parameters:
1653: + tao - the Tao context
1654: - ctx - PetscViewer context or NULL
1656: Options Database Keys:
1657: . -tao_view_gradient
1659: Level: advanced
1661: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1662: @*/
1663: PetscErrorCode TaoStepDirectionMonitor(Tao tao, void *ctx)
1664: {
1666: PetscViewer viewer = (PetscViewer)ctx;
1670: VecView(tao->stepdirection, viewer);
1671: return(0);
1672: }
1674: /*@C
1675: TaoDrawSolutionMonitor - Plots the solution at each iteration
1676: It can be turned on from the command line using the
1677: -tao_draw_solution option
1679: Collective on Tao
1681: Input Parameters:
1682: + tao - the Tao context
1683: - ctx - TaoMonitorDraw context
1685: Options Database Keys:
1686: . -tao_draw_solution
1688: Level: advanced
1690: .seealso: TaoSolutionMonitor(), TaoSetMonitor(), TaoDrawGradientMonitor
1691: @*/
1692: PetscErrorCode TaoDrawSolutionMonitor(Tao tao, void *ctx)
1693: {
1694: PetscErrorCode ierr;
1695: TaoMonitorDrawCtx ictx = (TaoMonitorDrawCtx)ctx;
1698: if (!(((ictx->howoften > 0) && (!(tao->niter % ictx->howoften))) || ((ictx->howoften == -1) && tao->reason))) return(0);
1699: VecView(tao->solution,ictx->viewer);
1700: return(0);
1701: }
1703: /*@C
1704: TaoDrawGradientMonitor - Plots the gradient at each iteration
1705: It can be turned on from the command line using the
1706: -tao_draw_gradient option
1708: Collective on Tao
1710: Input Parameters:
1711: + tao - the Tao context
1712: - ctx - PetscViewer context
1714: Options Database Keys:
1715: . -tao_draw_gradient
1717: Level: advanced
1719: .seealso: TaoGradientMonitor(), TaoSetMonitor(), TaoDrawSolutionMonitor
1720: @*/
1721: PetscErrorCode TaoDrawGradientMonitor(Tao tao, void *ctx)
1722: {
1723: PetscErrorCode ierr;
1724: TaoMonitorDrawCtx ictx = (TaoMonitorDrawCtx)ctx;
1727: if (!(((ictx->howoften > 0) && (!(tao->niter % ictx->howoften))) || ((ictx->howoften == -1) && tao->reason))) return(0);
1728: VecView(tao->gradient,ictx->viewer);
1729: return(0);
1730: }
1732: /*@C
1733: TaoDrawStepMonitor - Plots the step direction at each iteration
1734: It can be turned on from the command line using the
1735: -tao_draw_step option
1737: Collective on Tao
1739: Input Parameters:
1740: + tao - the Tao context
1741: - ctx - PetscViewer context
1743: Options Database Keys:
1744: . -tao_draw_step
1746: Level: advanced
1748: .seealso: TaoSetMonitor(), TaoDrawSolutionMonitor
1749: @*/
1750: PetscErrorCode TaoDrawStepMonitor(Tao tao, void *ctx)
1751: {
1753: PetscViewer viewer = (PetscViewer)(ctx);
1756: VecView(tao->stepdirection, viewer);
1757: return(0);
1758: }
1760: /*@C
1761: TaoSeparableObjectiveMonitor - Views the separable objective function at each iteration
1762: It can be turned on from the command line using the
1763: -tao_view_separableobjective option
1765: Collective on Tao
1767: Input Parameters:
1768: + tao - the Tao context
1769: - ctx - PetscViewer context or NULL
1771: Options Database Keys:
1772: . -tao_view_separableobjective
1774: Level: advanced
1776: .seealso: TaoDefaultSMonitor(), TaoSetMonitor()
1777: @*/
1778: PetscErrorCode TaoSeparableObjectiveMonitor(Tao tao, void *ctx)
1779: {
1781: PetscViewer viewer = (PetscViewer)ctx;
1785: VecView(tao->sep_objective,viewer);
1786: return(0);
1787: }
1789: /*@
1790: TaoDefaultConvergenceTest - Determines whether the solver should continue iterating
1791: or terminate.
1793: Collective on Tao
1795: Input Parameters:
1796: + tao - the Tao context
1797: - dummy - unused dummy context
1799: Output Parameter:
1800: . reason - for terminating
1802: Notes:
1803: This routine checks the residual in the optimality conditions, the
1804: relative residual in the optimity conditions, the number of function
1805: evaluations, and the function value to test convergence. Some
1806: solvers may use different convergence routines.
1808: Level: developer
1810: .seealso: TaoSetTolerances(),TaoGetConvergedReason(),TaoSetConvergedReason()
1811: @*/
1813: PetscErrorCode TaoDefaultConvergenceTest(Tao tao,void *dummy)
1814: {
1815: PetscInt niter=tao->niter, nfuncs=PetscMax(tao->nfuncs,tao->nfuncgrads);
1816: PetscInt max_funcs=tao->max_funcs;
1817: PetscReal gnorm=tao->residual, gnorm0=tao->gnorm0;
1818: PetscReal f=tao->fc, steptol=tao->steptol,trradius=tao->step;
1819: PetscReal gatol=tao->gatol,grtol=tao->grtol,gttol=tao->gttol;
1820: PetscReal catol=tao->catol,crtol=tao->crtol;
1821: PetscReal fmin=tao->fmin, cnorm=tao->cnorm;
1822: TaoConvergedReason reason=tao->reason;
1823: PetscErrorCode ierr;
1827: if (reason != TAO_CONTINUE_ITERATING) {
1828: return(0);
1829: }
1831: if (PetscIsInfOrNanReal(f)) {
1832: PetscInfo(tao,"Failed to converged, function value is Inf or NaN\n");
1833: reason = TAO_DIVERGED_NAN;
1834: } else if (f <= fmin && cnorm <=catol) {
1835: PetscInfo2(tao,"Converged due to function value %g < minimum function value %g\n", (double)f,(double)fmin);
1836: reason = TAO_CONVERGED_MINF;
1837: } else if (gnorm<= gatol && cnorm <=catol) {
1838: PetscInfo2(tao,"Converged due to residual norm ||g(X)||=%g < %g\n",(double)gnorm,(double)gatol);
1839: reason = TAO_CONVERGED_GATOL;
1840: } else if ( f!=0 && PetscAbsReal(gnorm/f) <= grtol && cnorm <= crtol) {
1841: PetscInfo2(tao,"Converged due to residual ||g(X)||/|f(X)| =%g < %g\n",(double)(gnorm/f),(double)grtol);
1842: reason = TAO_CONVERGED_GRTOL;
1843: } else if (gnorm0 != 0 && ((gttol == 0 && gnorm == 0) || gnorm/gnorm0 < gttol) && cnorm <= crtol) {
1844: PetscInfo2(tao,"Converged due to relative residual norm ||g(X)||/||g(X0)|| = %g < %g\n",(double)(gnorm/gnorm0),(double)gttol);
1845: reason = TAO_CONVERGED_GTTOL;
1846: } else if (nfuncs > max_funcs){
1847: PetscInfo2(tao,"Exceeded maximum number of function evaluations: %D > %D\n", nfuncs,max_funcs);
1848: reason = TAO_DIVERGED_MAXFCN;
1849: } else if ( tao->lsflag != 0 ){
1850: PetscInfo(tao,"Tao Line Search failure.\n");
1851: reason = TAO_DIVERGED_LS_FAILURE;
1852: } else if (trradius < steptol && niter > 0){
1853: PetscInfo2(tao,"Trust region/step size too small: %g < %g\n", (double)trradius,(double)steptol);
1854: reason = TAO_CONVERGED_STEPTOL;
1855: } else if (niter > tao->max_it) {
1856: PetscInfo2(tao,"Exceeded maximum number of iterations: %D > %D\n",niter,tao->max_it);
1857: reason = TAO_DIVERGED_MAXITS;
1858: } else {
1859: reason = TAO_CONTINUE_ITERATING;
1860: }
1861: tao->reason = reason;
1862: return(0);
1863: }
1865: /*@C
1866: TaoSetOptionsPrefix - Sets the prefix used for searching for all
1867: TAO options in the database.
1870: Logically Collective on Tao
1872: Input Parameters:
1873: + tao - the Tao context
1874: - prefix - the prefix string to prepend to all TAO option requests
1876: Notes:
1877: A hyphen (-) must NOT be given at the beginning of the prefix name.
1878: The first character of all runtime options is AUTOMATICALLY the hyphen.
1880: For example, to distinguish between the runtime options for two
1881: different TAO solvers, one could call
1882: .vb
1883: TaoSetOptionsPrefix(tao1,"sys1_")
1884: TaoSetOptionsPrefix(tao2,"sys2_")
1885: .ve
1887: This would enable use of different options for each system, such as
1888: .vb
1889: -sys1_tao_method blmvm -sys1_tao_gtol 1.e-3
1890: -sys2_tao_method lmvm -sys2_tao_gtol 1.e-4
1891: .ve
1894: Level: advanced
1896: .seealso: TaoAppendOptionsPrefix(), TaoGetOptionsPrefix()
1897: @*/
1899: PetscErrorCode TaoSetOptionsPrefix(Tao tao, const char p[])
1900: {
1904: PetscObjectSetOptionsPrefix((PetscObject)tao,p);
1905: if (tao->linesearch) {
1906: TaoLineSearchSetOptionsPrefix(tao->linesearch,p);
1907: }
1908: if (tao->ksp) {
1909: KSPSetOptionsPrefix(tao->ksp,p);
1910: }
1911: return(0);
1912: }
1914: /*@C
1915: TaoAppendOptionsPrefix - Appends to the prefix used for searching for all
1916: TAO options in the database.
1919: Logically Collective on Tao
1921: Input Parameters:
1922: + tao - the Tao solver context
1923: - prefix - the prefix string to prepend to all TAO option requests
1925: Notes:
1926: A hyphen (-) must NOT be given at the beginning of the prefix name.
1927: The first character of all runtime options is AUTOMATICALLY the hyphen.
1930: Level: advanced
1932: .seealso: TaoSetOptionsPrefix(), TaoGetOptionsPrefix()
1933: @*/
1934: PetscErrorCode TaoAppendOptionsPrefix(Tao tao, const char p[])
1935: {
1939: PetscObjectAppendOptionsPrefix((PetscObject)tao,p);
1940: if (tao->linesearch) {
1941: TaoLineSearchSetOptionsPrefix(tao->linesearch,p);
1942: }
1943: if (tao->ksp) {
1944: KSPSetOptionsPrefix(tao->ksp,p);
1945: }
1946: return(0);
1947: }
1949: /*@C
1950: TaoGetOptionsPrefix - Gets the prefix used for searching for all
1951: TAO options in the database
1953: Not Collective
1955: Input Parameters:
1956: . tao - the Tao context
1958: Output Parameters:
1959: . prefix - pointer to the prefix string used is returned
1961: Notes: On the fortran side, the user should pass in a string 'prefix' of
1962: sufficient length to hold the prefix.
1964: Level: advanced
1966: .seealso: TaoSetOptionsPrefix(), TaoAppendOptionsPrefix()
1967: @*/
1968: PetscErrorCode TaoGetOptionsPrefix(Tao tao, const char *p[])
1969: {
1970: return PetscObjectGetOptionsPrefix((PetscObject)tao,p);
1971: }
1973: /*@C
1974: TaoSetType - Sets the method for the unconstrained minimization solver.
1976: Collective on Tao
1978: Input Parameters:
1979: + solver - the Tao solver context
1980: - type - a known method
1982: Options Database Key:
1983: . -tao_type <type> - Sets the method; use -help for a list
1984: of available methods (for instance, "-tao_type lmvm" or "-tao_type tron")
1986: Available methods include:
1987: + nls - Newton's method with line search for unconstrained minimization
1988: . ntr - Newton's method with trust region for unconstrained minimization
1989: . ntl - Newton's method with trust region, line search for unconstrained minimization
1990: . lmvm - Limited memory variable metric method for unconstrained minimization
1991: . cg - Nonlinear conjugate gradient method for unconstrained minimization
1992: . nm - Nelder-Mead algorithm for derivate-free unconstrained minimization
1993: . tron - Newton Trust Region method for bound constrained minimization
1994: . gpcg - Newton Trust Region method for quadratic bound constrained minimization
1995: . blmvm - Limited memory variable metric method for bound constrained minimization
1996: - pounders - Model-based algorithm pounder extended for nonlinear least squares
1998: Level: intermediate
2000: .seealso: TaoCreate(), TaoGetType(), TaoType
2002: @*/
2003: PetscErrorCode TaoSetType(Tao tao, const TaoType type)
2004: {
2006: PetscErrorCode (*create_xxx)(Tao);
2007: PetscBool issame;
2012: PetscObjectTypeCompare((PetscObject)tao,type,&issame);
2013: if (issame) return(0);
2015: PetscFunctionListFind(TaoList, type, (void(**)(void))&create_xxx);
2016: if (!create_xxx) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested Tao type %s",type);
2018: /* Destroy the existing solver information */
2019: if (tao->ops->destroy) {
2020: (*tao->ops->destroy)(tao);
2021: }
2022: KSPDestroy(&tao->ksp);
2023: TaoLineSearchDestroy(&tao->linesearch);
2024: VecDestroy(&tao->gradient);
2025: VecDestroy(&tao->stepdirection);
2027: tao->ops->setup = 0;
2028: tao->ops->solve = 0;
2029: tao->ops->view = 0;
2030: tao->ops->setfromoptions = 0;
2031: tao->ops->destroy = 0;
2033: tao->setupcalled = PETSC_FALSE;
2035: (*create_xxx)(tao);
2036: PetscObjectChangeTypeName((PetscObject)tao,type);
2037: return(0);
2038: }
2040: /*MC
2041: TaoRegister - Adds a method to the TAO package for unconstrained minimization.
2043: Synopsis:
2044: TaoRegister(char *name_solver,char *path,char *name_Create,int (*routine_Create)(Tao))
2046: Not collective
2048: Input Parameters:
2049: + sname - name of a new user-defined solver
2050: - func - routine to Create method context
2052: Notes:
2053: TaoRegister() may be called multiple times to add several user-defined solvers.
2055: Sample usage:
2056: .vb
2057: TaoRegister("my_solver",MySolverCreate);
2058: .ve
2060: Then, your solver can be chosen with the procedural interface via
2061: $ TaoSetType(tao,"my_solver")
2062: or at runtime via the option
2063: $ -tao_type my_solver
2065: Level: advanced
2067: .seealso: TaoRegisterAll(), TaoRegisterDestroy()
2068: M*/
2069: PetscErrorCode TaoRegister(const char sname[], PetscErrorCode (*func)(Tao))
2070: {
2074: PetscFunctionListAdd(&TaoList,sname, (void (*)(void))func);
2075: return(0);
2076: }
2078: /*@C
2079: TaoRegisterDestroy - Frees the list of minimization solvers that were
2080: registered by TaoRegisterDynamic().
2082: Not Collective
2084: Level: advanced
2086: .seealso: TaoRegisterAll(), TaoRegister()
2087: @*/
2088: PetscErrorCode TaoRegisterDestroy(void)
2089: {
2092: PetscFunctionListDestroy(&TaoList);
2093: TaoRegisterAllCalled = PETSC_FALSE;
2094: return(0);
2095: }
2097: /*@
2098: TaoGetIterationNumber - Gets the number of Tao iterations completed
2099: at this time.
2101: Not Collective
2103: Input Parameter:
2104: . tao - Tao context
2106: Output Parameter:
2107: . iter - iteration number
2109: Notes:
2110: For example, during the computation of iteration 2 this would return 1.
2113: Level: intermediate
2115: .keywords: Tao, nonlinear, get, iteration, number,
2117: .seealso: TaoGetLinearSolveIterations(), TaoGetResidualNorm(), TaoGetObjective()
2118: @*/
2119: PetscErrorCode TaoGetIterationNumber(Tao tao,PetscInt *iter)
2120: {
2124: *iter = tao->niter;
2125: return(0);
2126: }
2128: /*@
2129: TaoGetObjective - Gets the current value of the objective function
2130: at this time.
2132: Not Collective
2134: Input Parameter:
2135: . tao - Tao context
2137: Output Parameter:
2138: . value - the current value
2140: Level: intermediate
2142: .keywords: Tao, nonlinear, get, iteration, number,
2144: .seealso: TaoGetLinearSolveIterations(), TaoGetIterationNumber(), TaoGetResidualNorm()
2145: @*/
2146: PetscErrorCode TaoGetObjective(Tao tao,PetscReal *value)
2147: {
2151: *value = tao->fc;
2152: return(0);
2153: }
2155: /*@
2156: TaoGetResidualNorm - Gets the current value of the norm of the residual
2157: at this time.
2159: Not Collective
2161: Input Parameter:
2162: . tao - Tao context
2164: Output Parameter:
2165: . value - the current value
2167: Level: intermediate
2169: Developer Note: This is the 2-norm of the residual, we cannot use TaoGetGradientNorm() because that has
2170: a different meaning. For some reason Tao sometimes calls the gradient the residual.
2172: .keywords: Tao, nonlinear, get, iteration, number,
2174: .seealso: TaoGetLinearSolveIterations(), TaoGetIterationNumber(), TaoGetObjective()
2175: @*/
2176: PetscErrorCode TaoGetResidualNorm(Tao tao,PetscReal *value)
2177: {
2181: *value = tao->residual;
2182: return(0);
2183: }
2185: /*@
2186: TaoSetIterationNumber - Sets the current iteration number.
2188: Not Collective
2190: Input Parameter:
2191: . tao - Tao context
2192: . iter - iteration number
2194: Level: developer
2196: .keywords: Tao, nonlinear, set, iteration, number,
2198: .seealso: TaoGetLinearSolveIterations()
2199: @*/
2200: PetscErrorCode TaoSetIterationNumber(Tao tao,PetscInt iter)
2201: {
2206: PetscObjectSAWsTakeAccess((PetscObject)tao);
2207: tao->niter = iter;
2208: PetscObjectSAWsGrantAccess((PetscObject)tao);
2209: return(0);
2210: }
2212: /*@
2213: TaoGetTotalIterationNumber - Gets the total number of Tao iterations
2214: completed. This number keeps accumulating if multiple solves
2215: are called with the Tao object.
2217: Not Collective
2219: Input Parameter:
2220: . tao - Tao context
2222: Output Parameter:
2223: . iter - iteration number
2225: Notes:
2226: The total iteration count is updated after each solve, if there is a current
2227: TaoSolve() in progress then those iterations are not yet counted.
2229: Level: intermediate
2231: .keywords: Tao, nonlinear, get, iteration, number,
2233: .seealso: TaoGetLinearSolveIterations()
2234: @*/
2235: PetscErrorCode TaoGetTotalIterationNumber(Tao tao,PetscInt *iter)
2236: {
2240: *iter = tao->ntotalits;
2241: return(0);
2242: }
2244: /*@
2245: TaoSetTotalIterationNumber - Sets the current total iteration number.
2247: Not Collective
2249: Input Parameter:
2250: . tao - Tao context
2251: . iter - iteration number
2253: Level: developer
2255: .keywords: Tao, nonlinear, set, iteration, number,
2257: .seealso: TaoGetLinearSolveIterations()
2258: @*/
2259: PetscErrorCode TaoSetTotalIterationNumber(Tao tao,PetscInt iter)
2260: {
2265: PetscObjectSAWsTakeAccess((PetscObject)tao);
2266: tao->ntotalits = iter;
2267: PetscObjectSAWsGrantAccess((PetscObject)tao);
2268: return(0);
2269: }
2271: /*@
2272: TaoSetConvergedReason - Sets the termination flag on a Tao object
2274: Logically Collective on Tao
2276: Input Parameters:
2277: + tao - the Tao context
2278: - reason - one of
2279: $ TAO_CONVERGED_ATOL (2),
2280: $ TAO_CONVERGED_RTOL (3),
2281: $ TAO_CONVERGED_STEPTOL (4),
2282: $ TAO_CONVERGED_MINF (5),
2283: $ TAO_CONVERGED_USER (6),
2284: $ TAO_DIVERGED_MAXITS (-2),
2285: $ TAO_DIVERGED_NAN (-4),
2286: $ TAO_DIVERGED_MAXFCN (-5),
2287: $ TAO_DIVERGED_LS_FAILURE (-6),
2288: $ TAO_DIVERGED_TR_REDUCTION (-7),
2289: $ TAO_DIVERGED_USER (-8),
2290: $ TAO_CONTINUE_ITERATING (0)
2292: Level: intermediate
2294: @*/
2295: PetscErrorCode TaoSetConvergedReason(Tao tao, TaoConvergedReason reason)
2296: {
2299: tao->reason = reason;
2300: return(0);
2301: }
2303: /*@
2304: TaoGetConvergedReason - Gets the reason the Tao iteration was stopped.
2306: Not Collective
2308: Input Parameter:
2309: . tao - the Tao solver context
2311: Output Parameter:
2312: . reason - one of
2313: $ TAO_CONVERGED_GATOL (3) ||g(X)|| < gatol
2314: $ TAO_CONVERGED_GRTOL (4) ||g(X)|| / f(X) < grtol
2315: $ TAO_CONVERGED_GTTOL (5) ||g(X)|| / ||g(X0)|| < gttol
2316: $ TAO_CONVERGED_STEPTOL (6) step size small
2317: $ TAO_CONVERGED_MINF (7) F < F_min
2318: $ TAO_CONVERGED_USER (8) User defined
2319: $ TAO_DIVERGED_MAXITS (-2) its > maxits
2320: $ TAO_DIVERGED_NAN (-4) Numerical problems
2321: $ TAO_DIVERGED_MAXFCN (-5) fevals > max_funcsals
2322: $ TAO_DIVERGED_LS_FAILURE (-6) line search failure
2323: $ TAO_DIVERGED_TR_REDUCTION (-7) trust region failure
2324: $ TAO_DIVERGED_USER(-8) (user defined)
2325: $ TAO_CONTINUE_ITERATING (0)
2327: where
2328: + X - current solution
2329: . X0 - initial guess
2330: . f(X) - current function value
2331: . f(X*) - true solution (estimated)
2332: . g(X) - current gradient
2333: . its - current iterate number
2334: . maxits - maximum number of iterates
2335: . fevals - number of function evaluations
2336: - max_funcsals - maximum number of function evaluations
2338: Level: intermediate
2340: .seealso: TaoSetConvergenceTest(), TaoSetTolerances()
2342: @*/
2343: PetscErrorCode TaoGetConvergedReason(Tao tao, TaoConvergedReason *reason)
2344: {
2348: *reason = tao->reason;
2349: return(0);
2350: }
2352: /*@
2353: TaoGetSolutionStatus - Get the current iterate, objective value,
2354: residual, infeasibility, and termination
2356: Not Collective
2358: Input Parameters:
2359: . tao - the Tao context
2361: Output Parameters:
2362: + iterate - the current iterate number (>=0)
2363: . f - the current function value
2364: . gnorm - the square of the gradient norm, duality gap, or other measure indicating distance from optimality.
2365: . cnorm - the infeasibility of the current solution with regard to the constraints.
2366: . xdiff - the step length or trust region radius of the most recent iterate.
2367: - reason - The termination reason, which can equal TAO_CONTINUE_ITERATING
2369: Level: intermediate
2371: Note:
2372: TAO returns the values set by the solvers in the routine TaoMonitor().
2374: Note:
2375: If any of the output arguments are set to NULL, no corresponding value will be returned.
2377: .seealso: TaoMonitor(), TaoGetConvergedReason()
2378: @*/
2379: PetscErrorCode TaoGetSolutionStatus(Tao tao, PetscInt *its, PetscReal *f, PetscReal *gnorm, PetscReal *cnorm, PetscReal *xdiff, TaoConvergedReason *reason)
2380: {
2382: if (its) *its=tao->niter;
2383: if (f) *f=tao->fc;
2384: if (gnorm) *gnorm=tao->residual;
2385: if (cnorm) *cnorm=tao->cnorm;
2386: if (reason) *reason=tao->reason;
2387: if (xdiff) *xdiff=tao->step;
2388: return(0);
2389: }
2391: /*@C
2392: TaoGetType - Gets the current Tao algorithm.
2394: Not Collective
2396: Input Parameter:
2397: . tao - the Tao solver context
2399: Output Parameter:
2400: . type - Tao method
2402: Level: intermediate
2404: @*/
2405: PetscErrorCode TaoGetType(Tao tao, const TaoType *type)
2406: {
2410: *type=((PetscObject)tao)->type_name;
2411: return(0);
2412: }
2414: /*@C
2415: TaoMonitor - Monitor the solver and the current solution. This
2416: routine will record the iteration number and residual statistics,
2417: call any monitors specified by the user, and calls the convergence-check routine.
2419: Input Parameters:
2420: + tao - the Tao context
2421: . its - the current iterate number (>=0)
2422: . f - the current objective function value
2423: . res - the gradient norm, square root of the duality gap, or other measure indicating distince from optimality. This measure will be recorded and
2424: used for some termination tests.
2425: . cnorm - the infeasibility of the current solution with regard to the constraints.
2426: - steplength - multiple of the step direction added to the previous iterate.
2428: Output Parameters:
2429: . reason - The termination reason, which can equal TAO_CONTINUE_ITERATING
2431: Options Database Key:
2432: . -tao_monitor - Use the default monitor, which prints statistics to standard output
2434: .seealso TaoGetConvergedReason(), TaoMonitorDefault(), TaoSetMonitor()
2436: Level: developer
2438: @*/
2439: PetscErrorCode TaoMonitor(Tao tao, PetscInt its, PetscReal f, PetscReal res, PetscReal cnorm, PetscReal steplength)
2440: {
2442: PetscInt i;
2446: tao->fc = f;
2447: tao->residual = res;
2448: tao->cnorm = cnorm;
2449: tao->step = steplength;
2450: if (!its) {
2451: tao->cnorm0 = cnorm; tao->gnorm0 = res;
2452: }
2453: if (PetscIsInfOrNanReal(f) || PetscIsInfOrNanReal(res)) SETERRQ(PETSC_COMM_SELF,1, "User provided compute function generated Inf or NaN");
2454: for (i=0;i<tao->numbermonitors;i++) {
2455: (*tao->monitor[i])(tao,tao->monitorcontext[i]);
2456: }
2457: return(0);
2458: }
2460: /*@
2461: TaoSetConvergenceHistory - Sets the array used to hold the convergence history.
2463: Logically Collective on Tao
2465: Input Parameters:
2466: + tao - the Tao solver context
2467: . obj - array to hold objective value history
2468: . resid - array to hold residual history
2469: . cnorm - array to hold constraint violation history
2470: . lits - integer array holds the number of linear iterations for each Tao iteration
2471: . na - size of obj, resid, and cnorm
2472: - reset - PetscTrue indicates each new minimization resets the history counter to zero,
2473: else it continues storing new values for new minimizations after the old ones
2475: Notes:
2476: If set, TAO will fill the given arrays with the indicated
2477: information at each iteration. If 'obj','resid','cnorm','lits' are
2478: *all* NULL then space (using size na, or 1000 if na is PETSC_DECIDE or
2479: PETSC_DEFAULT) is allocated for the history.
2480: If not all are NULL, then only the non-NULL information categories
2481: will be stored, the others will be ignored.
2483: Any convergence information after iteration number 'na' will not be stored.
2485: This routine is useful, e.g., when running a code for purposes
2486: of accurate performance monitoring, when no I/O should be done
2487: during the section of code that is being timed.
2489: Level: intermediate
2491: .seealso: TaoGetConvergenceHistory()
2493: @*/
2494: PetscErrorCode TaoSetConvergenceHistory(Tao tao, PetscReal obj[], PetscReal resid[], PetscReal cnorm[], PetscInt lits[], PetscInt na,PetscBool reset)
2495: {
2505: if (na == PETSC_DECIDE || na == PETSC_DEFAULT) na = 1000;
2506: if (!obj && !resid && !cnorm && !lits) {
2507: PetscCalloc1(na,&obj);
2508: PetscCalloc1(na,&resid);
2509: PetscCalloc1(na,&cnorm);
2510: PetscCalloc1(na,&lits);
2511: tao->hist_malloc=PETSC_TRUE;
2512: }
2514: tao->hist_obj = obj;
2515: tao->hist_resid = resid;
2516: tao->hist_cnorm = cnorm;
2517: tao->hist_lits = lits;
2518: tao->hist_max = na;
2519: tao->hist_reset = reset;
2520: tao->hist_len = 0;
2521: return(0);
2522: }
2524: /*@C
2525: TaoGetConvergenceHistory - Gets the arrays used to hold the convergence history.
2527: Collective on Tao
2529: Input Parameter:
2530: . tao - the Tao context
2532: Output Parameters:
2533: + obj - array used to hold objective value history
2534: . resid - array used to hold residual history
2535: . cnorm - array used to hold constraint violation history
2536: . lits - integer array used to hold linear solver iteration count
2537: - nhist - size of obj, resid, cnorm, and lits (will be less than or equal to na given in TaoSetHistory)
2539: Notes:
2540: This routine must be preceded by calls to TaoSetConvergenceHistory()
2541: and TaoSolve(), otherwise it returns useless information.
2543: The calling sequence for this routine in Fortran is
2544: $ call TaoGetConvergenceHistory(Tao tao, PetscInt nhist, PetscErrorCode ierr)
2546: This routine is useful, e.g., when running a code for purposes
2547: of accurate performance monitoring, when no I/O should be done
2548: during the section of code that is being timed.
2550: Level: advanced
2552: .seealso: TaoSetConvergenceHistory()
2554: @*/
2555: PetscErrorCode TaoGetConvergenceHistory(Tao tao, PetscReal **obj, PetscReal **resid, PetscReal **cnorm, PetscInt **lits, PetscInt *nhist)
2556: {
2559: if (obj) *obj = tao->hist_obj;
2560: if (cnorm) *cnorm = tao->hist_cnorm;
2561: if (resid) *resid = tao->hist_resid;
2562: if (nhist) *nhist = tao->hist_len;
2563: return(0);
2564: }
2566: /*@
2567: TaoSetApplicationContext - Sets the optional user-defined context for
2568: a solver.
2570: Logically Collective on Tao
2572: Input Parameters:
2573: + tao - the Tao context
2574: - usrP - optional user context
2576: Level: intermediate
2578: .seealso: TaoGetApplicationContext(), TaoSetApplicationContext()
2579: @*/
2580: PetscErrorCode TaoSetApplicationContext(Tao tao,void *usrP)
2581: {
2584: tao->user = usrP;
2585: return(0);
2586: }
2588: /*@
2589: TaoGetApplicationContext - Gets the user-defined context for a
2590: TAO solvers.
2592: Not Collective
2594: Input Parameter:
2595: . tao - Tao context
2597: Output Parameter:
2598: . usrP - user context
2600: Level: intermediate
2602: .seealso: TaoSetApplicationContext()
2603: @*/
2604: PetscErrorCode TaoGetApplicationContext(Tao tao,void *usrP)
2605: {
2608: *(void**)usrP = tao->user;
2609: return(0);
2610: }
2612: /*@
2613: TaoSetGradientNorm - Sets the matrix used to define the inner product that measures the size of the gradient.
2615: Collective on tao
2617: Input Parameters:
2618: + tao - the Tao context
2619: - M - gradient norm
2621: Level: beginner
2623: .seealso: TaoGetGradientNorm(), TaoGradientNorm()
2624: @*/
2625: PetscErrorCode TaoSetGradientNorm(Tao tao, Mat M)
2626: {
2632: if (tao->gradient_norm) {
2633: PetscObjectDereference((PetscObject)tao->gradient_norm);
2634: VecDestroy(&tao->gradient_norm_tmp);
2635: }
2637: PetscObjectReference((PetscObject)M);
2638: tao->gradient_norm = M;
2639: MatCreateVecs(M, NULL, &tao->gradient_norm_tmp);
2640: return(0);
2641: }
2643: /*@
2644: TaoGetGradientNorm - Returns the matrix used to define the inner product for measuring the size of the gradient.
2646: Not Collective
2648: Input Parameter:
2649: . tao - Tao context
2651: Output Parameter:
2652: . M - gradient norm
2654: Level: beginner
2656: .seealso: TaoSetGradientNorm(), TaoGradientNorm()
2657: @*/
2658: PetscErrorCode TaoGetGradientNorm(Tao tao, Mat *M)
2659: {
2662: *M = tao->gradient_norm;
2663: return(0);
2664: }
2666: /*c
2667: TaoGradientNorm - Compute the norm with respect to the inner product the user has set.
2669: Collective on tao
2671: Input Parameter:
2672: . tao - the Tao context
2673: . gradient - the gradient to be computed
2674: . norm - the norm type
2676: Output Parameter:
2677: . gnorm - the gradient norm
2679: Level: developer
2681: .seealso: TaoSetGradientNorm(), TaoGetGradientNorm()
2682: @*/
2683: PetscErrorCode TaoGradientNorm(Tao tao, Vec gradient, NormType type, PetscReal *gnorm)
2684: {
2690: if (tao->gradient_norm) {
2691: PetscScalar gnorms;
2693: if (type != NORM_2) SETERRQ(PetscObjectComm((PetscObject)gradient), PETSC_ERR_ARG_WRONGSTATE, "Norm type must be NORM_2 if an inner product for the gradient norm is set.");
2694: MatMult(tao->gradient_norm, gradient, tao->gradient_norm_tmp);
2695: VecDot(gradient, tao->gradient_norm_tmp, &gnorms);
2696: *gnorm = PetscRealPart(PetscSqrtScalar(gnorms));
2697: } else {
2698: VecNorm(gradient, type, gnorm);
2699: }
2700: return(0);
2701: }
2703: /*@C
2704: TaoMonitorDrawCtxCreate - Creates the monitor context for TaoMonitorDrawCtx
2706: Collective on Tao
2708: Output Patameter:
2709: . ctx - the monitor context
2711: Options Database:
2712: . -tao_draw_solution_initial - show initial guess as well as current solution
2714: Level: intermediate
2716: .keywords: Tao, vector, monitor, view
2718: .seealso: TaoMonitorSet(), TaoMonitorDefault(), VecView(), TaoMonitorDrawCtx()
2719: @*/
2720: PetscErrorCode TaoMonitorDrawCtxCreate(MPI_Comm comm,const char host[],const char label[],int x,int y,int m,int n,PetscInt howoften,TaoMonitorDrawCtx *ctx)
2721: {
2722: PetscErrorCode ierr;
2725: PetscNew(ctx);
2726: PetscViewerDrawOpen(comm,host,label,x,y,m,n,&(*ctx)->viewer);
2727: PetscViewerSetFromOptions((*ctx)->viewer);
2728: (*ctx)->howoften = howoften;
2729: return(0);
2730: }
2732: /*@C
2733: TaoMonitorDrawCtxDestroy - Destroys the monitor context for TaoMonitorDrawSolution()
2735: Collective on Tao
2737: Input Parameters:
2738: . ctx - the monitor context
2740: Level: intermediate
2742: .keywords: Tao, vector, monitor, view
2744: .seealso: TaoMonitorSet(), TaoMonitorDefault(), VecView(), TaoMonitorDrawSolution()
2745: @*/
2746: PetscErrorCode TaoMonitorDrawCtxDestroy(TaoMonitorDrawCtx *ictx)
2747: {
2751: PetscViewerDestroy(&(*ictx)->viewer);
2752: PetscFree(*ictx);
2753: return(0);
2754: }