Actual source code: taosolver_fg.c
petsc-3.11.4 2019-09-28
1: #include <petsc/private/taoimpl.h>
3: /*@
4: TaoSetInitialVector - Sets the initial guess for the solve
6: Logically collective on Tao
8: Input Parameters:
9: + tao - the Tao context
10: - x0 - the initial guess
12: Level: beginner
13: .seealso: TaoCreate(), TaoSolve()
14: @*/
16: PetscErrorCode TaoSetInitialVector(Tao tao, Vec x0)
17: {
22: if (x0) {
24: PetscObjectReference((PetscObject)x0);
25: }
26: VecDestroy(&tao->solution);
27: tao->solution = x0;
28: return(0);
29: }
31: PetscErrorCode TaoTestGradient(Tao tao,Vec x,Vec g1)
32: {
33: Vec g2,g3;
34: PetscBool complete_print = PETSC_FALSE,test = PETSC_FALSE;
35: PetscReal hcnorm,fdnorm,hcmax,fdmax,diffmax,diffnorm;
36: PetscScalar dot;
37: MPI_Comm comm;
38: PetscViewer viewer,mviewer;
39: PetscViewerFormat format;
40: PetscInt tabs;
41: static PetscBool directionsprinted = PETSC_FALSE;
42: PetscErrorCode ierr;
45: PetscObjectOptionsBegin((PetscObject)tao);
46: PetscOptionsName("-tao_test_gradient","Compare hand-coded and finite difference Gradients","None",&test);
47: PetscOptionsViewer("-tao_test_gradient_view","View difference between hand-coded and finite difference Gradients element entries","None",&mviewer,&format,&complete_print);
48: PetscOptionsEnd();
49: if (!test) return(0);
51: PetscObjectGetComm((PetscObject)tao,&comm);
52: PetscViewerASCIIGetStdout(comm,&viewer);
53: PetscViewerASCIIGetTab(viewer, &tabs);
54: PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel);
55: PetscViewerASCIIPrintf(viewer," ---------- Testing Gradient -------------\n");
56: if (!complete_print && !directionsprinted) {
57: PetscViewerASCIIPrintf(viewer," Run with -tao_test_gradient_view and optionally -tao_test_gradient <threshold> to show difference\n");
58: PetscViewerASCIIPrintf(viewer," of hand-coded and finite difference gradient entries greater than <threshold>.\n");
59: }
60: if (!directionsprinted) {
61: PetscViewerASCIIPrintf(viewer," Testing hand-coded Gradient, if (for double precision runs) ||G - Gfd||_F/||G||_F is\n");
62: PetscViewerASCIIPrintf(viewer," O(1.e-8), the hand-coded Gradient is probably correct.\n");
63: directionsprinted = PETSC_TRUE;
64: }
65: if (complete_print) {
66: PetscViewerPushFormat(mviewer,format);
67: }
69: VecDuplicate(x,&g2);
70: VecDuplicate(x,&g3);
72: /* Compute finite difference gradient, assume the gradient is already computed by TaoComputeGradient() and put into g1 */
73: TaoDefaultComputeGradient(tao,x,g2,NULL);
75: VecNorm(g2,NORM_2,&fdnorm);
76: VecNorm(g1,NORM_2,&hcnorm);
77: VecNorm(g2,NORM_INFINITY,&fdmax);
78: VecNorm(g1,NORM_INFINITY,&hcmax);
79: VecDot(g1,g2,&dot);
80: VecCopy(g1,g3);
81: VecAXPY(g3,-1.0,g2);
82: VecNorm(g3,NORM_2,&diffnorm);
83: VecNorm(g3,NORM_INFINITY,&diffmax);
84: PetscViewerASCIIPrintf(viewer," ||Gfd|| %g, ||G|| = %g, angle cosine = (Gfd'G)/||Gfd||||G|| = %g\n", (double)fdnorm, (double)hcnorm, (double)(PetscRealPart(dot)/(fdnorm*hcnorm)));
85: PetscViewerASCIIPrintf(viewer," 2-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n",(double)(diffnorm/PetscMax(hcnorm,fdnorm)),(double)diffnorm);
86: PetscViewerASCIIPrintf(viewer," max-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n",(double)(diffmax/PetscMax(hcmax,fdmax)),(double)diffmax);
88: if (complete_print) {
89: PetscViewerASCIIPrintf(viewer," Hand-coded gradient ----------\n");
90: VecView(g1,mviewer);
91: PetscViewerASCIIPrintf(viewer," Finite difference gradient ----------\n");
92: VecView(g2,mviewer);
93: PetscViewerASCIIPrintf(viewer," Hand-coded minus finite-difference gradient ----------\n");
94: VecView(g3,mviewer);
95: }
96: VecDestroy(&g2);
97: VecDestroy(&g3);
99: if (complete_print) {
100: PetscViewerPopFormat(mviewer);
101: }
102: PetscViewerASCIISetTab(viewer,tabs);
103: return(0);
104: }
106: /*@
107: TaoComputeGradient - Computes the gradient of the objective function
109: Collective on Tao
111: Input Parameters:
112: + tao - the Tao context
113: - X - input vector
115: Output Parameter:
116: . G - gradient vector
118: Options Database Keys:
119: + -tao_test_gradient - compare the user provided gradient with one compute via finite differences to check for errors
120: - -tao_test_gradient_view - display the user provided gradient, the finite difference gradient and the difference between them to help users detect the location of errors in the user provided gradient
122: Notes:
123: TaoComputeGradient() is typically used within minimization implementations,
124: so most users would not generally call this routine themselves.
126: Level: advanced
128: .seealso: TaoComputeObjective(), TaoComputeObjectiveAndGradient(), TaoSetGradientRoutine()
129: @*/
130: PetscErrorCode TaoComputeGradient(Tao tao, Vec X, Vec G)
131: {
133: PetscReal dummy;
141: VecLockReadPush(X);
142: if (tao->ops->computegradient) {
143: PetscLogEventBegin(TAO_GradientEval,tao,X,G,NULL);
144: PetscStackPush("Tao user gradient evaluation routine");
145: (*tao->ops->computegradient)(tao,X,G,tao->user_gradP);
146: PetscStackPop;
147: PetscLogEventEnd(TAO_GradientEval,tao,X,G,NULL);
148: tao->ngrads++;
149: } else if (tao->ops->computeobjectiveandgradient) {
150: PetscLogEventBegin(TAO_ObjGradEval,tao,X,G,NULL);
151: PetscStackPush("Tao user objective/gradient evaluation routine");
152: (*tao->ops->computeobjectiveandgradient)(tao,X,&dummy,G,tao->user_objgradP);
153: PetscStackPop;
154: PetscLogEventEnd(TAO_ObjGradEval,tao,X,G,NULL);
155: tao->nfuncgrads++;
156: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetGradientRoutine() has not been called");
157: VecLockReadPop(X);
159: TaoTestGradient(tao,X,G);
160: return(0);
161: }
163: /*@
164: TaoComputeObjective - Computes the objective function value at a given point
166: Collective on Tao
168: Input Parameters:
169: + tao - the Tao context
170: - X - input vector
172: Output Parameter:
173: . f - Objective value at X
175: Notes:
176: TaoComputeObjective() is typically used within minimization implementations,
177: so most users would not generally call this routine themselves.
179: Level: advanced
181: .seealso: TaoComputeGradient(), TaoComputeObjectiveAndGradient(), TaoSetObjectiveRoutine()
182: @*/
183: PetscErrorCode TaoComputeObjective(Tao tao, Vec X, PetscReal *f)
184: {
186: Vec temp;
192: VecLockReadPush(X);
193: if (tao->ops->computeobjective) {
194: PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL);
195: PetscStackPush("Tao user objective evaluation routine");
196: (*tao->ops->computeobjective)(tao,X,f,tao->user_objP);
197: PetscStackPop;
198: PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL);
199: tao->nfuncs++;
200: } else if (tao->ops->computeobjectiveandgradient) {
201: PetscInfo(tao,"Duplicating variable vector in order to call func/grad routine\n");
202: VecDuplicate(X,&temp);
203: PetscLogEventBegin(TAO_ObjGradEval,tao,X,NULL,NULL);
204: PetscStackPush("Tao user objective/gradient evaluation routine");
205: (*tao->ops->computeobjectiveandgradient)(tao,X,f,temp,tao->user_objgradP);
206: PetscStackPop;
207: PetscLogEventEnd(TAO_ObjGradEval,tao,X,NULL,NULL);
208: VecDestroy(&temp);
209: tao->nfuncgrads++;
210: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetObjectiveRoutine() has not been called");
211: PetscInfo1(tao,"TAO Function evaluation: %20.19e\n",(double)(*f));
212: VecLockReadPop(X);
213: return(0);
214: }
216: /*@
217: TaoComputeObjectiveAndGradient - Computes the objective function value at a given point
219: Collective on Tao
221: Input Parameters:
222: + tao - the Tao context
223: - X - input vector
225: Output Parameter:
226: + f - Objective value at X
227: - g - Gradient vector at X
229: Notes:
230: TaoComputeObjectiveAndGradient() is typically used within minimization implementations,
231: so most users would not generally call this routine themselves.
233: Level: advanced
235: .seealso: TaoComputeGradient(), TaoComputeObjectiveAndGradient(), TaoSetObjectiveRoutine()
236: @*/
237: PetscErrorCode TaoComputeObjectiveAndGradient(Tao tao, Vec X, PetscReal *f, Vec G)
238: {
247: VecLockReadPush(X);
248: if (tao->ops->computeobjectiveandgradient) {
249: PetscLogEventBegin(TAO_ObjGradEval,tao,X,G,NULL);
250: if (tao->ops->computegradient == TaoDefaultComputeGradient) {
251: TaoComputeObjective(tao,X,f);
252: TaoDefaultComputeGradient(tao,X,G,NULL);
253: } else {
254: PetscStackPush("Tao user objective/gradient evaluation routine");
255: (*tao->ops->computeobjectiveandgradient)(tao,X,f,G,tao->user_objgradP);
256: PetscStackPop;
257: }
258: PetscLogEventEnd(TAO_ObjGradEval,tao,X,G,NULL);
259: tao->nfuncgrads++;
260: } else if (tao->ops->computeobjective && tao->ops->computegradient) {
261: PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL);
262: PetscStackPush("Tao user objective evaluation routine");
263: (*tao->ops->computeobjective)(tao,X,f,tao->user_objP);
264: PetscStackPop;
265: PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL);
266: tao->nfuncs++;
267: PetscLogEventBegin(TAO_GradientEval,tao,X,G,NULL);
268: PetscStackPush("Tao user gradient evaluation routine");
269: (*tao->ops->computegradient)(tao,X,G,tao->user_gradP);
270: PetscStackPop;
271: PetscLogEventEnd(TAO_GradientEval,tao,X,G,NULL);
272: tao->ngrads++;
273: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetObjectiveRoutine() or TaoSetGradientRoutine() not set");
274: PetscInfo1(tao,"TAO Function evaluation: %20.19e\n",(double)(*f));
275: VecLockReadPop(X);
277: TaoTestGradient(tao,X,G);
278: return(0);
279: }
281: /*@C
282: TaoSetObjectiveRoutine - Sets the function evaluation routine for minimization
284: Logically collective on Tao
286: Input Parameter:
287: + tao - the Tao context
288: . func - the objective function
289: - ctx - [optional] user-defined context for private data for the function evaluation
290: routine (may be NULL)
292: Calling sequence of func:
293: $ func (Tao tao, Vec x, PetscReal *f, void *ctx);
295: + x - input vector
296: . f - function value
297: - ctx - [optional] user-defined function context
299: Level: beginner
301: .seealso: TaoSetGradientRoutine(), TaoSetHessianRoutine() TaoSetObjectiveAndGradientRoutine()
302: @*/
303: PetscErrorCode TaoSetObjectiveRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal*,void*),void *ctx)
304: {
307: tao->user_objP = ctx;
308: tao->ops->computeobjective = func;
309: return(0);
310: }
312: /*@C
313: TaoSetResidualRoutine - Sets the residual evaluation routine for least-square applications
315: Logically collective on Tao
317: Input Parameter:
318: + tao - the Tao context
319: . func - the residual evaluation routine
320: - ctx - [optional] user-defined context for private data for the function evaluation
321: routine (may be NULL)
323: Calling sequence of func:
324: $ func (Tao tao, Vec x, Vec f, void *ctx);
326: + x - input vector
327: . f - function value vector
328: - ctx - [optional] user-defined function context
330: Level: beginner
332: .seealso: TaoSetObjectiveRoutine(), TaoSetJacobianRoutine()
333: @*/
334: PetscErrorCode TaoSetResidualRoutine(Tao tao, Vec res, PetscErrorCode (*func)(Tao, Vec, Vec, void*),void *ctx)
335: {
337:
341: PetscObjectReference((PetscObject)res);
342: if (tao->ls_res) {
343: VecDestroy(&tao->ls_res);
344: }
345: tao->ls_res = res;
346: tao->user_lsresP = ctx;
347: tao->ops->computeresidual = func;
348:
349: return(0);
350: }
352: /*@
353: TaoSetResidualWeights - Give weights for the residual values. A vector can be used if only diagonal terms are used, otherwise a matrix can be give. If this function is not used, or if sigma_v and sigma_w are both NULL, then the default identity matrix will be used for weights.
355: Collective on Tao
357: Input Parameters:
358: + tao - the Tao context
359: . sigma_v - vector of weights (diagonal terms only)
360: . n - the number of weights (if using off-diagonal)
361: . rows - index list of rows for sigma_w
362: . cols - index list of columns for sigma_w
363: - vals - array of weights
367: Note: Either sigma_v or sigma_w (or both) should be NULL
369: Level: intermediate
371: .seealso: TaoSetResidualRoutine()
372: @*/
373: PetscErrorCode TaoSetResidualWeights(Tao tao, Vec sigma_v, PetscInt n, PetscInt *rows, PetscInt *cols, PetscReal *vals)
374: {
376: PetscInt i;
379: if (sigma_v) {
381: PetscObjectReference((PetscObject)sigma_v);
382: }
383: if (tao->res_weights_v) {
384: VecDestroy(&tao->res_weights_v);
385: }
386: tao->res_weights_v=sigma_v;
387: if (vals) {
388: if (tao->res_weights_n) {
389: PetscFree(tao->res_weights_rows);
390: PetscFree(tao->res_weights_cols);
391: PetscFree(tao->res_weights_w);
392: }
393: PetscMalloc1(n,&tao->res_weights_rows);
394: PetscMalloc1(n,&tao->res_weights_cols);
395: PetscMalloc1(n,&tao->res_weights_w);
396: tao->res_weights_n=n;
397: for (i=0;i<n;i++) {
398: tao->res_weights_rows[i]=rows[i];
399: tao->res_weights_cols[i]=cols[i];
400: tao->res_weights_w[i]=vals[i];
401: }
402: } else {
403: tao->res_weights_n=0;
404: tao->res_weights_rows=0;
405: tao->res_weights_cols=0;
406: }
407: return(0);
408: }
410: /*@
411: TaoComputeResidual - Computes a least-squares residual vector at a given point
413: Collective on Tao
415: Input Parameters:
416: + tao - the Tao context
417: - X - input vector
419: Output Parameter:
420: . f - Objective vector at X
422: Notes:
423: TaoComputeResidual() is typically used within minimization implementations,
424: so most users would not generally call this routine themselves.
426: Level: advanced
428: .seealso: TaoSetResidualRoutine()
429: @*/
430: PetscErrorCode TaoComputeResidual(Tao tao, Vec X, Vec F)
431: {
440: if (tao->ops->computeresidual) {
441: PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL);
442: PetscStackPush("Tao user least-squares residual evaluation routine");
443: (*tao->ops->computeresidual)(tao,X,F,tao->user_lsresP);
444: PetscStackPop;
445: PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL);
446: tao->nfuncs++;
447: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetResidualRoutine() has not been called");
448: PetscInfo(tao,"TAO least-squares residual evaluation.\n");
449: return(0);
450: }
452: /*@C
453: TaoSetGradientRoutine - Sets the gradient evaluation routine for minimization
455: Logically collective on Tao
457: Input Parameter:
458: + tao - the Tao context
459: . func - the gradient function
460: - ctx - [optional] user-defined context for private data for the gradient evaluation
461: routine (may be NULL)
463: Calling sequence of func:
464: $ func (Tao tao, Vec x, Vec g, void *ctx);
466: + x - input vector
467: . g - gradient value (output)
468: - ctx - [optional] user-defined function context
470: Level: beginner
472: .seealso: TaoSetObjectiveRoutine(), TaoSetHessianRoutine() TaoSetObjectiveAndGradientRoutine()
473: @*/
474: PetscErrorCode TaoSetGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, Vec, void*),void *ctx)
475: {
478: tao->user_gradP = ctx;
479: tao->ops->computegradient = func;
480: return(0);
481: }
483: /*@C
484: TaoSetObjectiveAndGradientRoutine - Sets a combined objective function and gradient evaluation routine for minimization
486: Logically collective on Tao
488: Input Parameter:
489: + tao - the Tao context
490: . func - the gradient function
491: - ctx - [optional] user-defined context for private data for the gradient evaluation
492: routine (may be NULL)
494: Calling sequence of func:
495: $ func (Tao tao, Vec x, PetscReal *f, Vec g, void *ctx);
497: + x - input vector
498: . f - objective value (output)
499: . g - gradient value (output)
500: - ctx - [optional] user-defined function context
502: Level: beginner
504: .seealso: TaoSetObjectiveRoutine(), TaoSetHessianRoutine() TaoSetObjectiveAndGradientRoutine()
505: @*/
506: PetscErrorCode TaoSetObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal *, Vec, void*), void *ctx)
507: {
510: tao->user_objgradP = ctx;
511: tao->ops->computeobjectiveandgradient = func;
512: return(0);
513: }
515: /*@
516: TaoIsObjectiveDefined -- Checks to see if the user has
517: declared an objective-only routine. Useful for determining when
518: it is appropriate to call TaoComputeObjective() or
519: TaoComputeObjectiveAndGradient()
521: Collective on Tao
523: Input Parameter:
524: + tao - the Tao context
525: - ctx - PETSC_TRUE if objective function routine is set by user,
526: PETSC_FALSE otherwise
527: Level: developer
529: .seealso: TaoSetObjectiveRoutine(), TaoIsGradientDefined(), TaoIsObjectiveAndGradientDefined()
530: @*/
531: PetscErrorCode TaoIsObjectiveDefined(Tao tao, PetscBool *flg)
532: {
535: if (tao->ops->computeobjective == 0) *flg = PETSC_FALSE;
536: else *flg = PETSC_TRUE;
537: return(0);
538: }
540: /*@
541: TaoIsGradientDefined -- Checks to see if the user has
542: declared an objective-only routine. Useful for determining when
543: it is appropriate to call TaoComputeGradient() or
544: TaoComputeGradientAndGradient()
546: Not Collective
548: Input Parameter:
549: + tao - the Tao context
550: - ctx - PETSC_TRUE if gradient routine is set by user, PETSC_FALSE otherwise
551: Level: developer
553: .seealso: TaoSetGradientRoutine(), TaoIsObjectiveDefined(), TaoIsObjectiveAndGradientDefined()
554: @*/
555: PetscErrorCode TaoIsGradientDefined(Tao tao, PetscBool *flg)
556: {
559: if (tao->ops->computegradient == 0) *flg = PETSC_FALSE;
560: else *flg = PETSC_TRUE;
561: return(0);
562: }
564: /*@
565: TaoIsObjectiveAndGradientDefined -- Checks to see if the user has
566: declared a joint objective/gradient routine. Useful for determining when
567: it is appropriate to call TaoComputeObjective() or
568: TaoComputeObjectiveAndGradient()
570: Not Collective
572: Input Parameter:
573: + tao - the Tao context
574: - ctx - PETSC_TRUE if objective/gradient routine is set by user, PETSC_FALSE otherwise
575: Level: developer
577: .seealso: TaoSetObjectiveAndGradientRoutine(), TaoIsObjectiveDefined(), TaoIsGradientDefined()
578: @*/
579: PetscErrorCode TaoIsObjectiveAndGradientDefined(Tao tao, PetscBool *flg)
580: {
583: if (tao->ops->computeobjectiveandgradient == 0) *flg = PETSC_FALSE;
584: else *flg = PETSC_TRUE;
585: return(0);
586: }