Actual source code: taosolver_fg.c
petsc-3.13.6 2020-09-29
1: #include <petsc/private/taoimpl.h>
3: /*@
4: TaoSetInitialVector - Sets the initial guess for the solve
6: Logically collective on Tao
8: Input Parameters:
9: + tao - the Tao context
10: - x0 - the initial guess
12: Level: beginner
13: .seealso: TaoCreate(), TaoSolve()
14: @*/
16: PetscErrorCode TaoSetInitialVector(Tao tao, Vec x0)
17: {
22: if (x0) {
24: PetscObjectReference((PetscObject)x0);
25: }
26: VecDestroy(&tao->solution);
27: tao->solution = x0;
28: return(0);
29: }
31: PetscErrorCode TaoTestGradient(Tao tao,Vec x,Vec g1)
32: {
33: Vec g2,g3;
34: PetscBool complete_print = PETSC_FALSE,test = PETSC_FALSE;
35: PetscReal hcnorm,fdnorm,hcmax,fdmax,diffmax,diffnorm;
36: PetscScalar dot;
37: MPI_Comm comm;
38: PetscViewer viewer,mviewer;
39: PetscViewerFormat format;
40: PetscInt tabs;
41: static PetscBool directionsprinted = PETSC_FALSE;
42: PetscErrorCode ierr;
45: PetscObjectOptionsBegin((PetscObject)tao);
46: PetscOptionsName("-tao_test_gradient","Compare hand-coded and finite difference Gradients","None",&test);
47: PetscOptionsViewer("-tao_test_gradient_view","View difference between hand-coded and finite difference Gradients element entries","None",&mviewer,&format,&complete_print);
48: PetscOptionsEnd();
49: if (!test) {
50: if (complete_print) {
51: PetscViewerDestroy(&mviewer);
52: }
53: return(0);
54: }
56: PetscObjectGetComm((PetscObject)tao,&comm);
57: PetscViewerASCIIGetStdout(comm,&viewer);
58: PetscViewerASCIIGetTab(viewer, &tabs);
59: PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel);
60: PetscViewerASCIIPrintf(viewer," ---------- Testing Gradient -------------\n");
61: if (!complete_print && !directionsprinted) {
62: PetscViewerASCIIPrintf(viewer," Run with -tao_test_gradient_view and optionally -tao_test_gradient <threshold> to show difference\n");
63: PetscViewerASCIIPrintf(viewer," of hand-coded and finite difference gradient entries greater than <threshold>.\n");
64: }
65: if (!directionsprinted) {
66: PetscViewerASCIIPrintf(viewer," Testing hand-coded Gradient, if (for double precision runs) ||G - Gfd||/||G|| is\n");
67: PetscViewerASCIIPrintf(viewer," O(1.e-8), the hand-coded Gradient is probably correct.\n");
68: directionsprinted = PETSC_TRUE;
69: }
70: if (complete_print) {
71: PetscViewerPushFormat(mviewer,format);
72: }
74: VecDuplicate(x,&g2);
75: VecDuplicate(x,&g3);
77: /* Compute finite difference gradient, assume the gradient is already computed by TaoComputeGradient() and put into g1 */
78: TaoDefaultComputeGradient(tao,x,g2,NULL);
80: VecNorm(g2,NORM_2,&fdnorm);
81: VecNorm(g1,NORM_2,&hcnorm);
82: VecNorm(g2,NORM_INFINITY,&fdmax);
83: VecNorm(g1,NORM_INFINITY,&hcmax);
84: VecDot(g1,g2,&dot);
85: VecCopy(g1,g3);
86: VecAXPY(g3,-1.0,g2);
87: VecNorm(g3,NORM_2,&diffnorm);
88: VecNorm(g3,NORM_INFINITY,&diffmax);
89: PetscViewerASCIIPrintf(viewer," ||Gfd|| %g, ||G|| = %g, angle cosine = (Gfd'G)/||Gfd||||G|| = %g\n", (double)fdnorm, (double)hcnorm, (double)(PetscRealPart(dot)/(fdnorm*hcnorm)));
90: PetscViewerASCIIPrintf(viewer," 2-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n",(double)(diffnorm/PetscMax(hcnorm,fdnorm)),(double)diffnorm);
91: PetscViewerASCIIPrintf(viewer," max-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n",(double)(diffmax/PetscMax(hcmax,fdmax)),(double)diffmax);
93: if (complete_print) {
94: PetscViewerASCIIPrintf(viewer," Hand-coded gradient ----------\n");
95: VecView(g1,mviewer);
96: PetscViewerASCIIPrintf(viewer," Finite difference gradient ----------\n");
97: VecView(g2,mviewer);
98: PetscViewerASCIIPrintf(viewer," Hand-coded minus finite-difference gradient ----------\n");
99: VecView(g3,mviewer);
100: }
101: VecDestroy(&g2);
102: VecDestroy(&g3);
104: if (complete_print) {
105: PetscViewerPopFormat(mviewer);
106: PetscViewerDestroy(&mviewer);
107: }
108: PetscViewerASCIISetTab(viewer,tabs);
109: return(0);
110: }
112: /*@
113: TaoComputeGradient - Computes the gradient of the objective function
115: Collective on Tao
117: Input Parameters:
118: + tao - the Tao context
119: - X - input vector
121: Output Parameter:
122: . G - gradient vector
124: Options Database Keys:
125: + -tao_test_gradient - compare the user provided gradient with one compute via finite differences to check for errors
126: - -tao_test_gradient_view - display the user provided gradient, the finite difference gradient and the difference between them to help users detect the location of errors in the user provided gradient
128: Notes:
129: TaoComputeGradient() is typically used within minimization implementations,
130: so most users would not generally call this routine themselves.
132: Level: advanced
134: .seealso: TaoComputeObjective(), TaoComputeObjectiveAndGradient(), TaoSetGradientRoutine()
135: @*/
136: PetscErrorCode TaoComputeGradient(Tao tao, Vec X, Vec G)
137: {
139: PetscReal dummy;
147: VecLockReadPush(X);
148: if (tao->ops->computegradient) {
149: PetscLogEventBegin(TAO_GradientEval,tao,X,G,NULL);
150: PetscStackPush("Tao user gradient evaluation routine");
151: (*tao->ops->computegradient)(tao,X,G,tao->user_gradP);
152: PetscStackPop;
153: PetscLogEventEnd(TAO_GradientEval,tao,X,G,NULL);
154: tao->ngrads++;
155: } else if (tao->ops->computeobjectiveandgradient) {
156: PetscLogEventBegin(TAO_ObjGradEval,tao,X,G,NULL);
157: PetscStackPush("Tao user objective/gradient evaluation routine");
158: (*tao->ops->computeobjectiveandgradient)(tao,X,&dummy,G,tao->user_objgradP);
159: PetscStackPop;
160: PetscLogEventEnd(TAO_ObjGradEval,tao,X,G,NULL);
161: tao->nfuncgrads++;
162: } else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"TaoSetGradientRoutine() has not been called");
163: VecLockReadPop(X);
165: TaoTestGradient(tao,X,G);
166: return(0);
167: }
169: /*@
170: TaoComputeObjective - Computes the objective function value at a given point
172: Collective on Tao
174: Input Parameters:
175: + tao - the Tao context
176: - X - input vector
178: Output Parameter:
179: . f - Objective value at X
181: Notes:
182: TaoComputeObjective() is typically used within minimization implementations,
183: so most users would not generally call this routine themselves.
185: Level: advanced
187: .seealso: TaoComputeGradient(), TaoComputeObjectiveAndGradient(), TaoSetObjectiveRoutine()
188: @*/
189: PetscErrorCode TaoComputeObjective(Tao tao, Vec X, PetscReal *f)
190: {
192: Vec temp;
198: VecLockReadPush(X);
199: if (tao->ops->computeobjective) {
200: PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL);
201: PetscStackPush("Tao user objective evaluation routine");
202: (*tao->ops->computeobjective)(tao,X,f,tao->user_objP);
203: PetscStackPop;
204: PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL);
205: tao->nfuncs++;
206: } else if (tao->ops->computeobjectiveandgradient) {
207: PetscInfo(tao,"Duplicating variable vector in order to call func/grad routine\n");
208: VecDuplicate(X,&temp);
209: PetscLogEventBegin(TAO_ObjGradEval,tao,X,NULL,NULL);
210: PetscStackPush("Tao user objective/gradient evaluation routine");
211: (*tao->ops->computeobjectiveandgradient)(tao,X,f,temp,tao->user_objgradP);
212: PetscStackPop;
213: PetscLogEventEnd(TAO_ObjGradEval,tao,X,NULL,NULL);
214: VecDestroy(&temp);
215: tao->nfuncgrads++;
216: } else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"TaoSetObjectiveRoutine() has not been called");
217: PetscInfo1(tao,"TAO Function evaluation: %20.19e\n",(double)(*f));
218: VecLockReadPop(X);
219: return(0);
220: }
222: /*@
223: TaoComputeObjectiveAndGradient - Computes the objective function value at a given point
225: Collective on Tao
227: Input Parameters:
228: + tao - the Tao context
229: - X - input vector
231: Output Parameter:
232: + f - Objective value at X
233: - g - Gradient vector at X
235: Notes:
236: TaoComputeObjectiveAndGradient() is typically used within minimization implementations,
237: so most users would not generally call this routine themselves.
239: Level: advanced
241: .seealso: TaoComputeGradient(), TaoComputeObjectiveAndGradient(), TaoSetObjectiveRoutine()
242: @*/
243: PetscErrorCode TaoComputeObjectiveAndGradient(Tao tao, Vec X, PetscReal *f, Vec G)
244: {
253: VecLockReadPush(X);
254: if (tao->ops->computeobjectiveandgradient) {
255: PetscLogEventBegin(TAO_ObjGradEval,tao,X,G,NULL);
256: if (tao->ops->computegradient == TaoDefaultComputeGradient) {
257: TaoComputeObjective(tao,X,f);
258: TaoDefaultComputeGradient(tao,X,G,NULL);
259: } else {
260: PetscStackPush("Tao user objective/gradient evaluation routine");
261: (*tao->ops->computeobjectiveandgradient)(tao,X,f,G,tao->user_objgradP);
262: PetscStackPop;
263: }
264: PetscLogEventEnd(TAO_ObjGradEval,tao,X,G,NULL);
265: tao->nfuncgrads++;
266: } else if (tao->ops->computeobjective && tao->ops->computegradient) {
267: PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL);
268: PetscStackPush("Tao user objective evaluation routine");
269: (*tao->ops->computeobjective)(tao,X,f,tao->user_objP);
270: PetscStackPop;
271: PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL);
272: tao->nfuncs++;
273: PetscLogEventBegin(TAO_GradientEval,tao,X,G,NULL);
274: PetscStackPush("Tao user gradient evaluation routine");
275: (*tao->ops->computegradient)(tao,X,G,tao->user_gradP);
276: PetscStackPop;
277: PetscLogEventEnd(TAO_GradientEval,tao,X,G,NULL);
278: tao->ngrads++;
279: } else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"TaoSetObjectiveRoutine() or TaoSetGradientRoutine() not set");
280: PetscInfo1(tao,"TAO Function evaluation: %20.19e\n",(double)(*f));
281: VecLockReadPop(X);
283: TaoTestGradient(tao,X,G);
284: return(0);
285: }
287: /*@C
288: TaoSetObjectiveRoutine - Sets the function evaluation routine for minimization
290: Logically collective on Tao
292: Input Parameter:
293: + tao - the Tao context
294: . func - the objective function
295: - ctx - [optional] user-defined context for private data for the function evaluation
296: routine (may be NULL)
298: Calling sequence of func:
299: $ func (Tao tao, Vec x, PetscReal *f, void *ctx);
301: + x - input vector
302: . f - function value
303: - ctx - [optional] user-defined function context
305: Level: beginner
307: .seealso: TaoSetGradientRoutine(), TaoSetHessianRoutine() TaoSetObjectiveAndGradientRoutine()
308: @*/
309: PetscErrorCode TaoSetObjectiveRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal*,void*),void *ctx)
310: {
313: tao->user_objP = ctx;
314: tao->ops->computeobjective = func;
315: return(0);
316: }
318: /*@C
319: TaoSetResidualRoutine - Sets the residual evaluation routine for least-square applications
321: Logically collective on Tao
323: Input Parameter:
324: + tao - the Tao context
325: . func - the residual evaluation routine
326: - ctx - [optional] user-defined context for private data for the function evaluation
327: routine (may be NULL)
329: Calling sequence of func:
330: $ func (Tao tao, Vec x, Vec f, void *ctx);
332: + x - input vector
333: . f - function value vector
334: - ctx - [optional] user-defined function context
336: Level: beginner
338: .seealso: TaoSetObjectiveRoutine(), TaoSetJacobianRoutine()
339: @*/
340: PetscErrorCode TaoSetResidualRoutine(Tao tao, Vec res, PetscErrorCode (*func)(Tao, Vec, Vec, void*),void *ctx)
341: {
343:
347: PetscObjectReference((PetscObject)res);
348: if (tao->ls_res) {
349: VecDestroy(&tao->ls_res);
350: }
351: tao->ls_res = res;
352: tao->user_lsresP = ctx;
353: tao->ops->computeresidual = func;
354:
355: return(0);
356: }
358: /*@
359: TaoSetResidualWeights - Give weights for the residual values. A vector can be used if only diagonal terms are used, otherwise a matrix can be give. If this function is not used, or if sigma_v and sigma_w are both NULL, then the default identity matrix will be used for weights.
361: Collective on Tao
363: Input Parameters:
364: + tao - the Tao context
365: . sigma_v - vector of weights (diagonal terms only)
366: . n - the number of weights (if using off-diagonal)
367: . rows - index list of rows for sigma_w
368: . cols - index list of columns for sigma_w
369: - vals - array of weights
373: Note: Either sigma_v or sigma_w (or both) should be NULL
375: Level: intermediate
377: .seealso: TaoSetResidualRoutine()
378: @*/
379: PetscErrorCode TaoSetResidualWeights(Tao tao, Vec sigma_v, PetscInt n, PetscInt *rows, PetscInt *cols, PetscReal *vals)
380: {
382: PetscInt i;
385: if (sigma_v) {
387: PetscObjectReference((PetscObject)sigma_v);
388: }
389: if (tao->res_weights_v) {
390: VecDestroy(&tao->res_weights_v);
391: }
392: tao->res_weights_v=sigma_v;
393: if (vals) {
394: if (tao->res_weights_n) {
395: PetscFree(tao->res_weights_rows);
396: PetscFree(tao->res_weights_cols);
397: PetscFree(tao->res_weights_w);
398: }
399: PetscMalloc1(n,&tao->res_weights_rows);
400: PetscMalloc1(n,&tao->res_weights_cols);
401: PetscMalloc1(n,&tao->res_weights_w);
402: tao->res_weights_n=n;
403: for (i=0;i<n;i++) {
404: tao->res_weights_rows[i]=rows[i];
405: tao->res_weights_cols[i]=cols[i];
406: tao->res_weights_w[i]=vals[i];
407: }
408: } else {
409: tao->res_weights_n=0;
410: tao->res_weights_rows=0;
411: tao->res_weights_cols=0;
412: }
413: return(0);
414: }
416: /*@
417: TaoComputeResidual - Computes a least-squares residual vector at a given point
419: Collective on Tao
421: Input Parameters:
422: + tao - the Tao context
423: - X - input vector
425: Output Parameter:
426: . f - Objective vector at X
428: Notes:
429: TaoComputeResidual() is typically used within minimization implementations,
430: so most users would not generally call this routine themselves.
432: Level: advanced
434: .seealso: TaoSetResidualRoutine()
435: @*/
436: PetscErrorCode TaoComputeResidual(Tao tao, Vec X, Vec F)
437: {
446: if (tao->ops->computeresidual) {
447: PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL);
448: PetscStackPush("Tao user least-squares residual evaluation routine");
449: (*tao->ops->computeresidual)(tao,X,F,tao->user_lsresP);
450: PetscStackPop;
451: PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL);
452: tao->nfuncs++;
453: } else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"TaoSetResidualRoutine() has not been called");
454: PetscInfo(tao,"TAO least-squares residual evaluation.\n");
455: return(0);
456: }
458: /*@C
459: TaoSetGradientRoutine - Sets the gradient evaluation routine for minimization
461: Logically collective on Tao
463: Input Parameter:
464: + tao - the Tao context
465: . func - the gradient function
466: - ctx - [optional] user-defined context for private data for the gradient evaluation
467: routine (may be NULL)
469: Calling sequence of func:
470: $ func (Tao tao, Vec x, Vec g, void *ctx);
472: + x - input vector
473: . g - gradient value (output)
474: - ctx - [optional] user-defined function context
476: Level: beginner
478: .seealso: TaoSetObjectiveRoutine(), TaoSetHessianRoutine() TaoSetObjectiveAndGradientRoutine()
479: @*/
480: PetscErrorCode TaoSetGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, Vec, void*),void *ctx)
481: {
484: tao->user_gradP = ctx;
485: tao->ops->computegradient = func;
486: return(0);
487: }
489: /*@C
490: TaoSetObjectiveAndGradientRoutine - Sets a combined objective function and gradient evaluation routine for minimization
492: Logically collective on Tao
494: Input Parameter:
495: + tao - the Tao context
496: . func - the gradient function
497: - ctx - [optional] user-defined context for private data for the gradient evaluation
498: routine (may be NULL)
500: Calling sequence of func:
501: $ func (Tao tao, Vec x, PetscReal *f, Vec g, void *ctx);
503: + x - input vector
504: . f - objective value (output)
505: . g - gradient value (output)
506: - ctx - [optional] user-defined function context
508: Level: beginner
510: .seealso: TaoSetObjectiveRoutine(), TaoSetHessianRoutine() TaoSetObjectiveAndGradientRoutine()
511: @*/
512: PetscErrorCode TaoSetObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal *, Vec, void*), void *ctx)
513: {
516: tao->user_objgradP = ctx;
517: tao->ops->computeobjectiveandgradient = func;
518: return(0);
519: }
521: /*@
522: TaoIsObjectiveDefined -- Checks to see if the user has
523: declared an objective-only routine. Useful for determining when
524: it is appropriate to call TaoComputeObjective() or
525: TaoComputeObjectiveAndGradient()
527: Collective on Tao
529: Input Parameter:
530: + tao - the Tao context
531: - ctx - PETSC_TRUE if objective function routine is set by user,
532: PETSC_FALSE otherwise
533: Level: developer
535: .seealso: TaoSetObjectiveRoutine(), TaoIsGradientDefined(), TaoIsObjectiveAndGradientDefined()
536: @*/
537: PetscErrorCode TaoIsObjectiveDefined(Tao tao, PetscBool *flg)
538: {
541: if (tao->ops->computeobjective == 0) *flg = PETSC_FALSE;
542: else *flg = PETSC_TRUE;
543: return(0);
544: }
546: /*@
547: TaoIsGradientDefined -- Checks to see if the user has
548: declared an objective-only routine. Useful for determining when
549: it is appropriate to call TaoComputeGradient() or
550: TaoComputeGradientAndGradient()
552: Not Collective
554: Input Parameter:
555: + tao - the Tao context
556: - ctx - PETSC_TRUE if gradient routine is set by user, PETSC_FALSE otherwise
557: Level: developer
559: .seealso: TaoSetGradientRoutine(), TaoIsObjectiveDefined(), TaoIsObjectiveAndGradientDefined()
560: @*/
561: PetscErrorCode TaoIsGradientDefined(Tao tao, PetscBool *flg)
562: {
565: if (tao->ops->computegradient == 0) *flg = PETSC_FALSE;
566: else *flg = PETSC_TRUE;
567: return(0);
568: }
570: /*@
571: TaoIsObjectiveAndGradientDefined -- Checks to see if the user has
572: declared a joint objective/gradient routine. Useful for determining when
573: it is appropriate to call TaoComputeObjective() or
574: TaoComputeObjectiveAndGradient()
576: Not Collective
578: Input Parameter:
579: + tao - the Tao context
580: - ctx - PETSC_TRUE if objective/gradient routine is set by user, PETSC_FALSE otherwise
581: Level: developer
583: .seealso: TaoSetObjectiveAndGradientRoutine(), TaoIsObjectiveDefined(), TaoIsGradientDefined()
584: @*/
585: PetscErrorCode TaoIsObjectiveAndGradientDefined(Tao tao, PetscBool *flg)
586: {
589: if (tao->ops->computeobjectiveandgradient == 0) *flg = PETSC_FALSE;
590: else *flg = PETSC_TRUE;
591: return(0);
592: }