Actual source code: taosolver_fg.c
petsc-3.10.5 2019-03-28
1: #include <petsc/private/taoimpl.h>
3: /*@
4: TaoSetInitialVector - Sets the initial guess for the solve
6: Logically collective on Tao
8: Input Parameters:
9: + tao - the Tao context
10: - x0 - the initial guess
12: Level: beginner
13: .seealso: TaoCreate(), TaoSolve()
14: @*/
16: PetscErrorCode TaoSetInitialVector(Tao tao, Vec x0)
17: {
22: if (x0) {
24: PetscObjectReference((PetscObject)x0);
25: }
26: VecDestroy(&tao->solution);
27: tao->solution = x0;
28: return(0);
29: }
31: PetscErrorCode TaoTestGradient(Tao tao,Vec x,Vec g1)
32: {
33: Vec g2,g3;
34: PetscBool complete_print = PETSC_FALSE,test = PETSC_FALSE;
35: PetscReal hcnorm,fdnorm,hcmax,fdmax,diffmax,diffnorm;
36: PetscScalar dot;
37: MPI_Comm comm;
38: PetscViewer viewer,mviewer;
39: PetscViewerFormat format;
40: PetscInt tabs;
41: static PetscBool directionsprinted = PETSC_FALSE;
42: PetscErrorCode ierr;
45: PetscObjectOptionsBegin((PetscObject)tao);
46: PetscOptionsName("-tao_test_gradient","Compare hand-coded and finite difference Gradients","None",&test);
47: PetscOptionsViewer("-tao_test_gradient_view","View difference between hand-coded and finite difference Gradients element entries","None",&mviewer,&format,&complete_print);
48: PetscOptionsEnd();
49: if (!test) return(0);
51: PetscObjectGetComm((PetscObject)tao,&comm);
52: PetscViewerASCIIGetStdout(comm,&viewer);
53: PetscViewerASCIIGetTab(viewer, &tabs);
54: PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel);
55: PetscViewerASCIIPrintf(viewer," ---------- Testing Gradient -------------\n");
56: if (!complete_print && !directionsprinted) {
57: PetscViewerASCIIPrintf(viewer," Run with -tao_test_gradient_view and optionally -tao_test_gradient <threshold> to show difference\n");
58: PetscViewerASCIIPrintf(viewer," of hand-coded and finite difference gradient entries greater than <threshold>.\n");
59: }
60: if (!directionsprinted) {
61: PetscViewerASCIIPrintf(viewer," Testing hand-coded Gradient, if (for double precision runs) ||G - Gfd||_F/||G||_F is\n");
62: PetscViewerASCIIPrintf(viewer," O(1.e-8), the hand-coded Hessian is probably correct.\n");
63: directionsprinted = PETSC_TRUE;
64: }
65: if (complete_print) {
66: PetscViewerPushFormat(mviewer,format);
67: }
69: VecDuplicate(x,&g2);
70: VecDuplicate(x,&g3);
72: /* Compute finite difference gradient, assume the gradient is already computed by TaoComputeGradient() and put into g1 */
73: TaoDefaultComputeGradient(tao,x,g2,NULL);
75: VecNorm(g2,NORM_2,&fdnorm);
76: VecNorm(g1,NORM_2,&hcnorm);
77: VecNorm(g2,NORM_INFINITY,&fdmax);
78: VecNorm(g1,NORM_INFINITY,&hcmax);
79: VecDot(g1,g2,&dot);
80: VecCopy(g1,g3);
81: VecAXPY(g3,-1.0,g2);
82: VecNorm(g3,NORM_2,&diffnorm);
83: VecNorm(g3,NORM_INFINITY,&diffmax);
84: PetscViewerASCIIPrintf(viewer," ||Gfd|| %g, ||G|| = %g, angle cosine = (Gfd'G)/||Gfd||||G|| = %g\n", (double)fdnorm, (double)hcnorm, (double)(PetscRealPart(dot)/(fdnorm*hcnorm)));
85: PetscViewerASCIIPrintf(viewer," 2-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n",(double)(diffnorm/PetscMax(hcnorm,fdnorm)),(double)diffnorm);
86: PetscViewerASCIIPrintf(viewer," max-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n",(double)(diffmax/PetscMax(hcmax,fdmax)),(double)diffmax);
88: if (complete_print) {
89: PetscViewerASCIIPrintf(viewer," Hand-coded gradient ----------\n");
90: VecView(g1,mviewer);
91: PetscViewerASCIIPrintf(viewer," Finite difference gradient ----------\n");
92: VecView(g2,mviewer);
93: PetscViewerASCIIPrintf(viewer," Hand-coded minus finite-difference gradient ----------\n");
94: VecView(g3,mviewer);
95: }
96: VecDestroy(&g2);
97: VecDestroy(&g3);
99: if (complete_print) {
100: PetscViewerPopFormat(mviewer);
101: }
102: PetscViewerASCIISetTab(viewer,tabs);
103: return(0);
104: }
106: /*@
107: TaoComputeGradient - Computes the gradient of the objective function
109: Collective on Tao
111: Input Parameters:
112: + tao - the Tao context
113: - X - input vector
115: Output Parameter:
116: . G - gradient vector
118: Options Database Keys:
119: + -tao_test_gradient - compare the user provided gradient with one compute via finite differences to check for errors
120: - -tao_test_gradient_view - display the user provided gradient, the finite difference gradient and the difference between them to help users detect the location of errors in the user provided gradient
122: Notes:
123: TaoComputeGradient() is typically used within minimization implementations,
124: so most users would not generally call this routine themselves.
126: Level: advanced
128: .seealso: TaoComputeObjective(), TaoComputeObjectiveAndGradient(), TaoSetGradientRoutine()
129: @*/
130: PetscErrorCode TaoComputeGradient(Tao tao, Vec X, Vec G)
131: {
133: PetscReal dummy;
141: VecLockPush(X);
142: if (tao->ops->computegradient) {
143: PetscLogEventBegin(TAO_GradientEval,tao,X,G,NULL);
144: PetscStackPush("Tao user gradient evaluation routine");
145: (*tao->ops->computegradient)(tao,X,G,tao->user_gradP);
146: PetscStackPop;
147: PetscLogEventEnd(TAO_GradientEval,tao,X,G,NULL);
148: tao->ngrads++;
149: } else if (tao->ops->computeobjectiveandgradient) {
150: PetscLogEventBegin(TAO_ObjGradEval,tao,X,G,NULL);
151: PetscStackPush("Tao user objective/gradient evaluation routine");
152: (*tao->ops->computeobjectiveandgradient)(tao,X,&dummy,G,tao->user_objgradP);
153: PetscStackPop;
154: PetscLogEventEnd(TAO_ObjGradEval,tao,X,G,NULL);
155: tao->nfuncgrads++;
156: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetGradientRoutine() has not been called");
157: VecLockPop(X);
159: TaoTestGradient(tao,X,G);
160: return(0);
161: }
163: /*@
164: TaoComputeObjective - Computes the objective function value at a given point
166: Collective on Tao
168: Input Parameters:
169: + tao - the Tao context
170: - X - input vector
172: Output Parameter:
173: . f - Objective value at X
175: Notes:
176: TaoComputeObjective() is typically used within minimization implementations,
177: so most users would not generally call this routine themselves.
179: Level: advanced
181: .seealso: TaoComputeGradient(), TaoComputeObjectiveAndGradient(), TaoSetObjectiveRoutine()
182: @*/
183: PetscErrorCode TaoComputeObjective(Tao tao, Vec X, PetscReal *f)
184: {
186: Vec temp;
192: VecLockPush(X);
193: if (tao->ops->computeobjective) {
194: PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL);
195: PetscStackPush("Tao user objective evaluation routine");
196: (*tao->ops->computeobjective)(tao,X,f,tao->user_objP);
197: PetscStackPop;
198: PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL);
199: tao->nfuncs++;
200: } else if (tao->ops->computeobjectiveandgradient) {
201: PetscInfo(tao,"Duplicating variable vector in order to call func/grad routine\n");
202: VecDuplicate(X,&temp);
203: PetscLogEventBegin(TAO_ObjGradEval,tao,X,NULL,NULL);
204: PetscStackPush("Tao user objective/gradient evaluation routine");
205: (*tao->ops->computeobjectiveandgradient)(tao,X,f,temp,tao->user_objgradP);
206: PetscStackPop;
207: PetscLogEventEnd(TAO_ObjGradEval,tao,X,NULL,NULL);
208: VecDestroy(&temp);
209: tao->nfuncgrads++;
210: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetObjectiveRoutine() has not been called");
211: PetscInfo1(tao,"TAO Function evaluation: %20.19e\n",(double)(*f));
212: VecLockPop(X);
213: return(0);
214: }
216: /*@
217: TaoComputeObjectiveAndGradient - Computes the objective function value at a given point
219: Collective on Tao
221: Input Parameters:
222: + tao - the Tao context
223: - X - input vector
225: Output Parameter:
226: + f - Objective value at X
227: - g - Gradient vector at X
229: Notes:
230: TaoComputeObjectiveAndGradient() is typically used within minimization implementations,
231: so most users would not generally call this routine themselves.
233: Level: advanced
235: .seealso: TaoComputeGradient(), TaoComputeObjectiveAndGradient(), TaoSetObjectiveRoutine()
236: @*/
237: PetscErrorCode TaoComputeObjectiveAndGradient(Tao tao, Vec X, PetscReal *f, Vec G)
238: {
247: VecLockPush(X);
248: if (tao->ops->computeobjectiveandgradient) {
249: PetscLogEventBegin(TAO_ObjGradEval,tao,X,G,NULL);
250: if (tao->ops->computegradient == TaoDefaultComputeGradient) {
251: TaoComputeObjective(tao,X,f);
252: TaoDefaultComputeGradient(tao,X,G,NULL);
253: } else {
254: PetscStackPush("Tao user objective/gradient evaluation routine");
255: (*tao->ops->computeobjectiveandgradient)(tao,X,f,G,tao->user_objgradP);
256: PetscStackPop;
257: }
258: PetscLogEventEnd(TAO_ObjGradEval,tao,X,G,NULL);
259: tao->nfuncgrads++;
260: } else if (tao->ops->computeobjective && tao->ops->computegradient) {
261: PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL);
262: PetscStackPush("Tao user objective evaluation routine");
263: (*tao->ops->computeobjective)(tao,X,f,tao->user_objP);
264: PetscStackPop;
265: PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL);
266: tao->nfuncs++;
267: PetscLogEventBegin(TAO_GradientEval,tao,X,G,NULL);
268: PetscStackPush("Tao user gradient evaluation routine");
269: (*tao->ops->computegradient)(tao,X,G,tao->user_gradP);
270: PetscStackPop;
271: PetscLogEventEnd(TAO_GradientEval,tao,X,G,NULL);
272: tao->ngrads++;
273: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetObjectiveRoutine() or TaoSetGradientRoutine() not set");
274: PetscInfo1(tao,"TAO Function evaluation: %20.19e\n",(double)(*f));
275: VecLockPop(X);
277: TaoTestGradient(tao,X,G);
278: return(0);
279: }
281: /*@C
282: TaoSetObjectiveRoutine - Sets the function evaluation routine for minimization
284: Logically collective on Tao
286: Input Parameter:
287: + tao - the Tao context
288: . func - the objective function
289: - ctx - [optional] user-defined context for private data for the function evaluation
290: routine (may be NULL)
292: Calling sequence of func:
293: $ func (Tao tao, Vec x, PetscReal *f, void *ctx);
295: + x - input vector
296: . f - function value
297: - ctx - [optional] user-defined function context
299: Level: beginner
301: .seealso: TaoSetGradientRoutine(), TaoSetHessianRoutine() TaoSetObjectiveAndGradientRoutine()
302: @*/
303: PetscErrorCode TaoSetObjectiveRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal*,void*),void *ctx)
304: {
307: tao->user_objP = ctx;
308: tao->ops->computeobjective = func;
309: return(0);
310: }
312: /*@C
313: TaoSetSeparableObjectiveRoutine - Sets the function evaluation routine for least-square applications
315: Logically collective on Tao
317: Input Parameter:
318: + tao - the Tao context
319: . func - the objective function evaluation routine
320: - ctx - [optional] user-defined context for private data for the function evaluation
321: routine (may be NULL)
323: Calling sequence of func:
324: $ func (Tao tao, Vec x, Vec f, void *ctx);
326: + x - input vector
327: . f - function value vector
328: - ctx - [optional] user-defined function context
330: Level: beginner
332: .seealso: TaoSetObjectiveRoutine(), TaoSetJacobianRoutine()
333: @*/
334: PetscErrorCode TaoSetSeparableObjectiveRoutine(Tao tao, Vec sepobj, PetscErrorCode (*func)(Tao, Vec, Vec, void*),void *ctx)
335: {
339: tao->user_sepobjP = ctx;
340: tao->sep_objective = sepobj;
341: tao->ops->computeseparableobjective = func;
342: return(0);
343: }
345: /*@
346: TaoSetSeparableObjectiveWeights - Give weights for the separable objective values. A vector can be used if only diagonal terms are used, otherwise a matrix can be give. If this function is not used, or if sigma_v and sigma_w are both NULL, then the default identity matrix will be used for weights.
348: Collective on Tao
350: Input Parameters:
351: + tao - the Tao context
352: . sigma_v - vector of weights (diagonal terms only)
353: . n - the number of weights (if using off-diagonal)
354: . rows - index list of rows for sigma_w
355: . cols - index list of columns for sigma_w
356: - vals - array of weights
360: Note: Either sigma_v or sigma_w (or both) should be NULL
362: Level: intermediate
364: .seealso: TaoSetSeparableObjectiveRoutine()
365: @*/
366: PetscErrorCode TaoSetSeparableObjectiveWeights(Tao tao, Vec sigma_v, PetscInt n, PetscInt *rows, PetscInt *cols, PetscReal *vals)
367: {
369: PetscInt i;
372: VecDestroy(&tao->sep_weights_v);
373: tao->sep_weights_v=sigma_v;
374: if (sigma_v) {
375: PetscObjectReference((PetscObject)sigma_v);
376: }
377: if (vals) {
378: if (tao->sep_weights_n) {
379: PetscFree(tao->sep_weights_rows);
380: PetscFree(tao->sep_weights_cols);
381: PetscFree(tao->sep_weights_w);
382: }
383: PetscMalloc1(n,&tao->sep_weights_rows);
384: PetscMalloc1(n,&tao->sep_weights_cols);
385: PetscMalloc1(n,&tao->sep_weights_w);
386: tao->sep_weights_n=n;
387: for (i=0;i<n;i++) {
388: tao->sep_weights_rows[i]=rows[i];
389: tao->sep_weights_cols[i]=cols[i];
390: tao->sep_weights_w[i]=vals[i];
391: }
392: } else {
393: tao->sep_weights_n=0;
394: tao->sep_weights_rows=0;
395: tao->sep_weights_cols=0;
396: }
397: return(0);
398: }
399: /*@
400: TaoComputeSeparableObjective - Computes a separable objective function vector at a given point (for least-square applications)
402: Collective on Tao
404: Input Parameters:
405: + tao - the Tao context
406: - X - input vector
408: Output Parameter:
409: . f - Objective vector at X
411: Notes:
412: TaoComputeSeparableObjective() is typically used within minimization implementations,
413: so most users would not generally call this routine themselves.
415: Level: advanced
417: .seealso: TaoSetSeparableObjectiveRoutine()
418: @*/
419: PetscErrorCode TaoComputeSeparableObjective(Tao tao, Vec X, Vec F)
420: {
429: if (tao->ops->computeseparableobjective) {
430: PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL);
431: PetscStackPush("Tao user separable objective evaluation routine");
432: (*tao->ops->computeseparableobjective)(tao,X,F,tao->user_sepobjP);
433: PetscStackPop;
434: PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL);
435: tao->nfuncs++;
436: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetSeparableObjectiveRoutine() has not been called");
437: PetscInfo(tao,"TAO separable function evaluation.\n");
438: return(0);
439: }
441: /*@C
442: TaoSetGradientRoutine - Sets the gradient evaluation routine for minimization
444: Logically collective on Tao
446: Input Parameter:
447: + tao - the Tao context
448: . func - the gradient function
449: - ctx - [optional] user-defined context for private data for the gradient evaluation
450: routine (may be NULL)
452: Calling sequence of func:
453: $ func (Tao tao, Vec x, Vec g, void *ctx);
455: + x - input vector
456: . g - gradient value (output)
457: - ctx - [optional] user-defined function context
459: Level: beginner
461: .seealso: TaoSetObjectiveRoutine(), TaoSetHessianRoutine() TaoSetObjectiveAndGradientRoutine()
462: @*/
463: PetscErrorCode TaoSetGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, Vec, void*),void *ctx)
464: {
467: tao->user_gradP = ctx;
468: tao->ops->computegradient = func;
469: return(0);
470: }
472: /*@C
473: TaoSetObjectiveAndGradientRoutine - Sets a combined objective function and gradient evaluation routine for minimization
475: Logically collective on Tao
477: Input Parameter:
478: + tao - the Tao context
479: . func - the gradient function
480: - ctx - [optional] user-defined context for private data for the gradient evaluation
481: routine (may be NULL)
483: Calling sequence of func:
484: $ func (Tao tao, Vec x, PetscReal *f, Vec g, void *ctx);
486: + x - input vector
487: . f - objective value (output)
488: . g - gradient value (output)
489: - ctx - [optional] user-defined function context
491: Level: beginner
493: .seealso: TaoSetObjectiveRoutine(), TaoSetHessianRoutine() TaoSetObjectiveAndGradientRoutine()
494: @*/
495: PetscErrorCode TaoSetObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal *, Vec, void*), void *ctx)
496: {
499: tao->user_objgradP = ctx;
500: tao->ops->computeobjectiveandgradient = func;
501: return(0);
502: }
504: /*@
505: TaoIsObjectiveDefined -- Checks to see if the user has
506: declared an objective-only routine. Useful for determining when
507: it is appropriate to call TaoComputeObjective() or
508: TaoComputeObjectiveAndGradient()
510: Collective on Tao
512: Input Parameter:
513: + tao - the Tao context
514: - ctx - PETSC_TRUE if objective function routine is set by user,
515: PETSC_FALSE otherwise
516: Level: developer
518: .seealso: TaoSetObjectiveRoutine(), TaoIsGradientDefined(), TaoIsObjectiveAndGradientDefined()
519: @*/
520: PetscErrorCode TaoIsObjectiveDefined(Tao tao, PetscBool *flg)
521: {
524: if (tao->ops->computeobjective == 0) *flg = PETSC_FALSE;
525: else *flg = PETSC_TRUE;
526: return(0);
527: }
529: /*@
530: TaoIsGradientDefined -- Checks to see if the user has
531: declared an objective-only routine. Useful for determining when
532: it is appropriate to call TaoComputeGradient() or
533: TaoComputeGradientAndGradient()
535: Not Collective
537: Input Parameter:
538: + tao - the Tao context
539: - ctx - PETSC_TRUE if gradient routine is set by user, PETSC_FALSE otherwise
540: Level: developer
542: .seealso: TaoSetGradientRoutine(), TaoIsObjectiveDefined(), TaoIsObjectiveAndGradientDefined()
543: @*/
544: PetscErrorCode TaoIsGradientDefined(Tao tao, PetscBool *flg)
545: {
548: if (tao->ops->computegradient == 0) *flg = PETSC_FALSE;
549: else *flg = PETSC_TRUE;
550: return(0);
551: }
553: /*@
554: TaoIsObjectiveAndGradientDefined -- Checks to see if the user has
555: declared a joint objective/gradient routine. Useful for determining when
556: it is appropriate to call TaoComputeObjective() or
557: TaoComputeObjectiveAndGradient()
559: Not Collective
561: Input Parameter:
562: + tao - the Tao context
563: - ctx - PETSC_TRUE if objective/gradient routine is set by user, PETSC_FALSE otherwise
564: Level: developer
566: .seealso: TaoSetObjectiveAndGradientRoutine(), TaoIsObjectiveDefined(), TaoIsGradientDefined()
567: @*/
568: PetscErrorCode TaoIsObjectiveAndGradientDefined(Tao tao, PetscBool *flg)
569: {
572: if (tao->ops->computeobjectiveandgradient == 0) *flg = PETSC_FALSE;
573: else *flg = PETSC_TRUE;
574: return(0);
575: }