Actual source code: brgn.c
1: #include <../src/tao/leastsquares/impls/brgn/brgn.h>
3: #define BRGN_REGULARIZATION_USER 0
4: #define BRGN_REGULARIZATION_L2PROX 1
5: #define BRGN_REGULARIZATION_L2PURE 2
6: #define BRGN_REGULARIZATION_L1DICT 3
7: #define BRGN_REGULARIZATION_LM 4
8: #define BRGN_REGULARIZATION_TYPES 5
10: static const char *BRGN_REGULARIZATION_TABLE[64] = {"user","l2prox","l2pure","l1dict","lm"};
12: static PetscErrorCode GNHessianProd(Mat H,Vec in,Vec out)
13: {
14: TAO_BRGN *gn;
15: PetscErrorCode ierr;
18: MatShellGetContext(H,&gn);
19: MatMult(gn->subsolver->ls_jac,in,gn->r_work);
20: MatMultTranspose(gn->subsolver->ls_jac,gn->r_work,out);
21: switch (gn->reg_type) {
22: case BRGN_REGULARIZATION_USER:
23: MatMult(gn->Hreg,in,gn->x_work);
24: VecAXPY(out,gn->lambda,gn->x_work);
25: break;
26: case BRGN_REGULARIZATION_L2PURE:
27: VecAXPY(out,gn->lambda,in);
28: break;
29: case BRGN_REGULARIZATION_L2PROX:
30: VecAXPY(out,gn->lambda,in);
31: break;
32: case BRGN_REGULARIZATION_L1DICT:
33: /* out = out + lambda*D'*(diag.*(D*in)) */
34: if (gn->D) {
35: MatMult(gn->D,in,gn->y);/* y = D*in */
36: } else {
37: VecCopy(in,gn->y);
38: }
39: VecPointwiseMult(gn->y_work,gn->diag,gn->y); /* y_work = diag.*(D*in), where diag = epsilon^2 ./ sqrt(x.^2+epsilon^2).^3 */
40: if (gn->D) {
41: MatMultTranspose(gn->D,gn->y_work,gn->x_work); /* x_work = D'*(diag.*(D*in)) */
42: } else {
43: VecCopy(gn->y_work,gn->x_work);
44: }
45: VecAXPY(out,gn->lambda,gn->x_work);
46: break;
47: case BRGN_REGULARIZATION_LM:
48: VecPointwiseMult(gn->x_work,gn->damping,in);
49: VecAXPY(out,1,gn->x_work);
50: break;
51: }
52: return(0);
53: }
54: static PetscErrorCode ComputeDamping(TAO_BRGN *gn)
55: {
56: const PetscScalar *diag_ary;
57: PetscScalar *damping_ary;
58: PetscInt i,n;
59: PetscErrorCode ierr;
62: /* update damping */
63: VecGetArray(gn->damping,&damping_ary);
64: VecGetArrayRead(gn->diag,&diag_ary);
65: VecGetLocalSize(gn->damping,&n);
66: for (i=0; i<n; i++) {
67: damping_ary[i] = PetscClipInterval(diag_ary[i],PETSC_SQRT_MACHINE_EPSILON,PetscSqrtReal(PETSC_MAX_REAL));
68: }
69: VecScale(gn->damping,gn->lambda);
70: VecRestoreArray(gn->damping,&damping_ary);
71: VecRestoreArrayRead(gn->diag,&diag_ary);
72: return(0);
73: }
75: PetscErrorCode TaoBRGNGetDampingVector(Tao tao,Vec *d)
76: {
77: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
80: if (gn->reg_type != BRGN_REGULARIZATION_LM) SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_SUP,"Damping vector is only available if regularization type is lm.");
81: *d = gn->damping;
82: return(0);
83: }
85: static PetscErrorCode GNObjectiveGradientEval(Tao tao,Vec X,PetscReal *fcn,Vec G,void *ptr)
86: {
87: TAO_BRGN *gn = (TAO_BRGN *)ptr;
88: PetscInt K; /* dimension of D*X */
89: PetscScalar yESum;
90: PetscErrorCode ierr;
91: PetscReal f_reg;
94: /* compute objective *fcn*/
95: /* compute first term 0.5*||ls_res||_2^2 */
96: TaoComputeResidual(tao,X,tao->ls_res);
97: VecDot(tao->ls_res,tao->ls_res,fcn);
98: *fcn *= 0.5;
99: /* compute gradient G */
100: TaoComputeResidualJacobian(tao,X,tao->ls_jac,tao->ls_jac_pre);
101: MatMultTranspose(tao->ls_jac,tao->ls_res,G);
102: /* add the regularization contribution */
103: switch (gn->reg_type) {
104: case BRGN_REGULARIZATION_USER:
105: (*gn->regularizerobjandgrad)(tao,X,&f_reg,gn->x_work,gn->reg_obj_ctx);
106: *fcn += gn->lambda*f_reg;
107: VecAXPY(G,gn->lambda,gn->x_work);
108: break;
109: case BRGN_REGULARIZATION_L2PURE:
110: /* compute f = f + lambda*0.5*xk'*xk */
111: VecDot(X,X,&f_reg);
112: *fcn += gn->lambda*0.5*f_reg;
113: /* compute G = G + lambda*xk */
114: VecAXPY(G,gn->lambda,X);
115: break;
116: case BRGN_REGULARIZATION_L2PROX:
117: /* compute f = f + lambda*0.5*(xk - xkm1)'*(xk - xkm1) */
118: VecAXPBYPCZ(gn->x_work,1.0,-1.0,0.0,X,gn->x_old);
119: VecDot(gn->x_work,gn->x_work,&f_reg);
120: *fcn += gn->lambda*0.5*f_reg;
121: /* compute G = G + lambda*(xk - xkm1) */
122: VecAXPBYPCZ(G,gn->lambda,-gn->lambda,1.0,X,gn->x_old);
123: break;
124: case BRGN_REGULARIZATION_L1DICT:
125: /* compute f = f + lambda*sum(sqrt(y.^2+epsilon^2) - epsilon), where y = D*x*/
126: if (gn->D) {
127: MatMult(gn->D,X,gn->y);/* y = D*x */
128: } else {
129: VecCopy(X,gn->y);
130: }
131: VecPointwiseMult(gn->y_work,gn->y,gn->y);
132: VecShift(gn->y_work,gn->epsilon*gn->epsilon);
133: VecSqrtAbs(gn->y_work); /* gn->y_work = sqrt(y.^2+epsilon^2) */
134: VecSum(gn->y_work,&yESum);
135: VecGetSize(gn->y,&K);
136: *fcn += gn->lambda*(yESum - K*gn->epsilon);
137: /* compute G = G + lambda*D'*(y./sqrt(y.^2+epsilon^2)),where y = D*x */
138: VecPointwiseDivide(gn->y_work,gn->y,gn->y_work); /* reuse y_work = y./sqrt(y.^2+epsilon^2) */
139: if (gn->D) {
140: MatMultTranspose(gn->D,gn->y_work,gn->x_work);
141: } else {
142: VecCopy(gn->y_work,gn->x_work);
143: }
144: VecAXPY(G,gn->lambda,gn->x_work);
145: break;
146: }
147: return(0);
148: }
150: static PetscErrorCode GNComputeHessian(Tao tao,Vec X,Mat H,Mat Hpre,void *ptr)
151: {
152: TAO_BRGN *gn = (TAO_BRGN *)ptr;
153: PetscInt i,n,cstart,cend;
154: PetscScalar *cnorms,*diag_ary;
158: TaoComputeResidualJacobian(tao,X,tao->ls_jac,tao->ls_jac_pre);
159: if (gn->mat_explicit) {
160: MatTransposeMatMult(tao->ls_jac, tao->ls_jac, MAT_REUSE_MATRIX, PETSC_DEFAULT, &gn->H);
161: }
163: switch (gn->reg_type) {
164: case BRGN_REGULARIZATION_USER:
165: (*gn->regularizerhessian)(tao,X,gn->Hreg,gn->reg_hess_ctx);
166: if (gn->mat_explicit) {
167: MatAXPY(gn->H, 1.0, gn->Hreg, DIFFERENT_NONZERO_PATTERN);
168: }
169: break;
170: case BRGN_REGULARIZATION_L2PURE:
171: if (gn->mat_explicit) {
172: MatShift(gn->H, gn->lambda);
173: }
174: break;
175: case BRGN_REGULARIZATION_L2PROX:
176: if (gn->mat_explicit) {
177: MatShift(gn->H, gn->lambda);
178: }
179: break;
180: case BRGN_REGULARIZATION_L1DICT:
181: /* calculate and store diagonal matrix as a vector: diag = epsilon^2 ./ sqrt(x.^2+epsilon^2).^3* --> diag = epsilon^2 ./ sqrt(y.^2+epsilon^2).^3,where y = D*x */
182: if (gn->D) {
183: MatMult(gn->D,X,gn->y);/* y = D*x */
184: } else {
185: VecCopy(X,gn->y);
186: }
187: VecPointwiseMult(gn->y_work,gn->y,gn->y);
188: VecShift(gn->y_work,gn->epsilon*gn->epsilon);
189: VecCopy(gn->y_work,gn->diag); /* gn->diag = y.^2+epsilon^2 */
190: VecSqrtAbs(gn->y_work); /* gn->y_work = sqrt(y.^2+epsilon^2) */
191: VecPointwiseMult(gn->diag,gn->y_work,gn->diag);/* gn->diag = sqrt(y.^2+epsilon^2).^3 */
192: VecReciprocal(gn->diag);
193: VecScale(gn->diag,gn->epsilon*gn->epsilon);
194: if (gn->mat_explicit) {
195: MatDiagonalSet(gn->H, gn->diag, ADD_VALUES);
196: }
197: break;
198: case BRGN_REGULARIZATION_LM:
199: /* compute diagonal of J^T J */
200: MatGetSize(gn->parent->ls_jac,NULL,&n);
201: PetscMalloc1(n,&cnorms);
202: MatGetColumnNorms(gn->parent->ls_jac,NORM_2,cnorms);
203: MatGetOwnershipRangeColumn(gn->parent->ls_jac,&cstart,&cend);
204: VecGetArray(gn->diag,&diag_ary);
205: for (i = 0; i < cend-cstart; i++) {
206: diag_ary[i] = cnorms[cstart+i] * cnorms[cstart+i];
207: }
208: VecRestoreArray(gn->diag,&diag_ary);
209: PetscFree(cnorms);
210: ComputeDamping(gn);
211: if (gn->mat_explicit) {
212: MatDiagonalSet(gn->H, gn->damping, ADD_VALUES);
213: }
214: break;
215: }
216: return(0);
217: }
219: static PetscErrorCode GNHookFunction(Tao tao,PetscInt iter, void *ctx)
220: {
221: TAO_BRGN *gn = (TAO_BRGN *)ctx;
222: PetscErrorCode ierr;
225: /* Update basic tao information from the subsolver */
226: gn->parent->nfuncs = tao->nfuncs;
227: gn->parent->ngrads = tao->ngrads;
228: gn->parent->nfuncgrads = tao->nfuncgrads;
229: gn->parent->nhess = tao->nhess;
230: gn->parent->niter = tao->niter;
231: gn->parent->ksp_its = tao->ksp_its;
232: gn->parent->ksp_tot_its = tao->ksp_tot_its;
233: gn->parent->fc = tao->fc;
234: TaoGetConvergedReason(tao,&gn->parent->reason);
235: /* Update the solution vectors */
236: if (iter == 0) {
237: VecSet(gn->x_old,0.0);
238: } else {
239: VecCopy(tao->solution,gn->x_old);
240: VecCopy(tao->solution,gn->parent->solution);
241: }
242: /* Update the gradient */
243: VecCopy(tao->gradient,gn->parent->gradient);
245: /* Update damping parameter for LM */
246: if (gn->reg_type == BRGN_REGULARIZATION_LM) {
247: if (iter > 0) {
248: if (gn->fc_old > tao->fc) {
249: gn->lambda = gn->lambda * gn->downhill_lambda_change;
250: } else {
251: /* uphill step */
252: gn->lambda = gn->lambda * gn->uphill_lambda_change;
253: }
254: }
255: gn->fc_old = tao->fc;
256: }
258: /* Call general purpose update function */
259: if (gn->parent->ops->update) {
260: (*gn->parent->ops->update)(gn->parent,gn->parent->niter,gn->parent->user_update);
261: }
262: return(0);
263: }
265: static PetscErrorCode TaoSolve_BRGN(Tao tao)
266: {
267: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
268: PetscErrorCode ierr;
271: TaoSolve(gn->subsolver);
272: /* Update basic tao information from the subsolver */
273: tao->nfuncs = gn->subsolver->nfuncs;
274: tao->ngrads = gn->subsolver->ngrads;
275: tao->nfuncgrads = gn->subsolver->nfuncgrads;
276: tao->nhess = gn->subsolver->nhess;
277: tao->niter = gn->subsolver->niter;
278: tao->ksp_its = gn->subsolver->ksp_its;
279: tao->ksp_tot_its = gn->subsolver->ksp_tot_its;
280: TaoGetConvergedReason(gn->subsolver,&tao->reason);
281: /* Update vectors */
282: VecCopy(gn->subsolver->solution,tao->solution);
283: VecCopy(gn->subsolver->gradient,tao->gradient);
284: return(0);
285: }
287: static PetscErrorCode TaoSetFromOptions_BRGN(PetscOptionItems *PetscOptionsObject,Tao tao)
288: {
289: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
290: TaoLineSearch ls;
291: PetscErrorCode ierr;
294: PetscOptionsHead(PetscOptionsObject,"least-squares problems with regularizer: ||f(x)||^2 + lambda*g(x), g(x) = ||xk-xkm1||^2 or ||Dx||_1 or user defined function.");
295: PetscOptionsBool("-tao_brgn_mat_explicit","switches the Hessian construction to be an explicit matrix rather than MATSHELL","",gn->mat_explicit,&gn->mat_explicit,NULL);
296: PetscOptionsReal("-tao_brgn_regularizer_weight","regularizer weight (default 1e-4)","",gn->lambda,&gn->lambda,NULL);
297: PetscOptionsReal("-tao_brgn_l1_smooth_epsilon","L1-norm smooth approximation parameter: ||x||_1 = sum(sqrt(x.^2+epsilon^2)-epsilon) (default 1e-6)","",gn->epsilon,&gn->epsilon,NULL);
298: PetscOptionsReal("-tao_brgn_lm_downhill_lambda_change","Factor to decrease trust region by on downhill steps","",gn->downhill_lambda_change,&gn->downhill_lambda_change,NULL);
299: PetscOptionsReal("-tao_brgn_lm_uphill_lambda_change","Factor to increase trust region by on uphill steps","",gn->uphill_lambda_change,&gn->uphill_lambda_change,NULL);
300: PetscOptionsEList("-tao_brgn_regularization_type","regularization type", "",BRGN_REGULARIZATION_TABLE,BRGN_REGULARIZATION_TYPES,BRGN_REGULARIZATION_TABLE[gn->reg_type],&gn->reg_type,NULL);
301: PetscOptionsTail();
302: /* set unit line search direction as the default when using the lm regularizer */
303: if (gn->reg_type == BRGN_REGULARIZATION_LM) {
304: TaoGetLineSearch(gn->subsolver,&ls);
305: TaoLineSearchSetType(ls,TAOLINESEARCHUNIT);
306: }
307: TaoSetFromOptions(gn->subsolver);
308: return(0);
309: }
311: static PetscErrorCode TaoView_BRGN(Tao tao,PetscViewer viewer)
312: {
313: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
314: PetscErrorCode ierr;
317: PetscViewerASCIIPushTab(viewer);
318: TaoView(gn->subsolver,viewer);
319: PetscViewerASCIIPopTab(viewer);
320: return(0);
321: }
323: static PetscErrorCode TaoSetUp_BRGN(Tao tao)
324: {
325: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
326: PetscErrorCode ierr;
327: PetscBool is_bnls,is_bntr,is_bntl;
328: PetscInt i,n,N,K; /* dict has size K*N*/
331: if (!tao->ls_res) SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ORDER,"TaoSetResidualRoutine() must be called before setup!");
332: PetscObjectTypeCompare((PetscObject)gn->subsolver,TAOBNLS,&is_bnls);
333: PetscObjectTypeCompare((PetscObject)gn->subsolver,TAOBNTR,&is_bntr);
334: PetscObjectTypeCompare((PetscObject)gn->subsolver,TAOBNTL,&is_bntl);
335: if ((is_bnls || is_bntr || is_bntl) && !tao->ls_jac) SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ORDER,"TaoSetResidualJacobianRoutine() must be called before setup!");
336: if (!tao->gradient) {
337: VecDuplicate(tao->solution,&tao->gradient);
338: }
339: if (!gn->x_work) {
340: VecDuplicate(tao->solution,&gn->x_work);
341: }
342: if (!gn->r_work) {
343: VecDuplicate(tao->ls_res,&gn->r_work);
344: }
345: if (!gn->x_old) {
346: VecDuplicate(tao->solution,&gn->x_old);
347: VecSet(gn->x_old,0.0);
348: }
350: if (BRGN_REGULARIZATION_L1DICT == gn->reg_type) {
351: if (!gn->y) {
352: if (gn->D) {
353: MatGetSize(gn->D,&K,&N); /* Shell matrices still must have sizes defined. K = N for identity matrix, K=N-1 or N for gradient matrix */
354: MatCreateVecs(gn->D,NULL,&gn->y);
355: } else {
356: VecDuplicate(tao->solution,&gn->y); /* If user does not setup dict matrix, use identiy matrix, K=N */
357: }
358: VecSet(gn->y,0.0);
359: }
360: if (!gn->y_work) {
361: VecDuplicate(gn->y,&gn->y_work);
362: }
363: if (!gn->diag) {
364: VecDuplicate(gn->y,&gn->diag);
365: VecSet(gn->diag,0.0);
366: }
367: }
368: if (BRGN_REGULARIZATION_LM == gn->reg_type) {
369: if (!gn->diag) {
370: MatCreateVecs(tao->ls_jac,&gn->diag,NULL);
371: }
372: if (!gn->damping) {
373: MatCreateVecs(tao->ls_jac,&gn->damping,NULL);
374: }
375: }
377: if (!tao->setupcalled) {
378: /* Hessian setup */
379: if (gn->mat_explicit) {
380: TaoComputeResidualJacobian(tao,tao->solution,tao->ls_jac,tao->ls_jac_pre);
381: MatTransposeMatMult(tao->ls_jac, tao->ls_jac, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &gn->H);
382: } else {
383: VecGetLocalSize(tao->solution,&n);
384: VecGetSize(tao->solution,&N);
385: MatCreate(PetscObjectComm((PetscObject)tao),&gn->H);
386: MatSetSizes(gn->H,n,n,N,N);
387: MatSetType(gn->H,MATSHELL);
388: MatSetOption(gn->H, MAT_SYMMETRIC, PETSC_TRUE);
389: MatShellSetOperation(gn->H,MATOP_MULT,(void (*)(void))GNHessianProd);
390: MatShellSetContext(gn->H,(void*)gn);
391: }
392: MatSetUp(gn->H);
393: /* Subsolver setup,include initial vector and dicttionary D */
394: TaoSetUpdate(gn->subsolver,GNHookFunction,(void*)gn);
395: TaoSetInitialVector(gn->subsolver,tao->solution);
396: if (tao->bounded) {
397: TaoSetVariableBounds(gn->subsolver,tao->XL,tao->XU);
398: }
399: TaoSetResidualRoutine(gn->subsolver,tao->ls_res,tao->ops->computeresidual,tao->user_lsresP);
400: TaoSetJacobianResidualRoutine(gn->subsolver,tao->ls_jac,tao->ls_jac,tao->ops->computeresidualjacobian,tao->user_lsjacP);
401: TaoSetObjectiveAndGradientRoutine(gn->subsolver,GNObjectiveGradientEval,(void*)gn);
402: TaoSetHessianRoutine(gn->subsolver,gn->H,gn->H,GNComputeHessian,(void*)gn);
403: /* Propagate some options down */
404: TaoSetTolerances(gn->subsolver,tao->gatol,tao->grtol,tao->gttol);
405: TaoSetMaximumIterations(gn->subsolver,tao->max_it);
406: TaoSetMaximumFunctionEvaluations(gn->subsolver,tao->max_funcs);
407: for (i=0; i<tao->numbermonitors; ++i) {
408: TaoSetMonitor(gn->subsolver,tao->monitor[i],tao->monitorcontext[i],tao->monitordestroy[i]);
409: PetscObjectReference((PetscObject)(tao->monitorcontext[i]));
410: }
411: TaoSetUp(gn->subsolver);
412: }
413: return(0);
414: }
416: static PetscErrorCode TaoDestroy_BRGN(Tao tao)
417: {
418: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
419: PetscErrorCode ierr;
422: if (tao->setupcalled) {
423: VecDestroy(&tao->gradient);
424: VecDestroy(&gn->x_work);
425: VecDestroy(&gn->r_work);
426: VecDestroy(&gn->x_old);
427: VecDestroy(&gn->diag);
428: VecDestroy(&gn->y);
429: VecDestroy(&gn->y_work);
430: }
431: VecDestroy(&gn->damping);
432: VecDestroy(&gn->diag);
433: MatDestroy(&gn->H);
434: MatDestroy(&gn->D);
435: MatDestroy(&gn->Hreg);
436: TaoDestroy(&gn->subsolver);
437: gn->parent = NULL;
438: PetscFree(tao->data);
439: return(0);
440: }
442: /*MC
443: TAOBRGN - Bounded Regularized Gauss-Newton method for solving nonlinear least-squares
444: problems with bound constraints. This algorithm is a thin wrapper around TAOBNTL
445: that constructs the Gauss-Newton problem with the user-provided least-squares
446: residual and Jacobian. The algorithm offers an L2-norm ("l2pure"), L2-norm proximal point ("l2prox")
447: regularizer, and L1-norm dictionary regularizer ("l1dict"), where we approximate the
448: L1-norm ||x||_1 by sum_i(sqrt(x_i^2+epsilon^2)-epsilon) with a small positive number epsilon.
449: Also offered is the "lm" regularizer which uses a scaled diagonal of J^T J.
450: With the "lm" regularizer, BRGN is a Levenberg-Marquardt optimizer.
451: The user can also provide own regularization function.
453: Options Database Keys:
454: + -tao_brgn_regularization_type - regularization type ("user", "l2prox", "l2pure", "l1dict", "lm") (default "l2prox")
455: . -tao_brgn_regularizer_weight - regularizer weight (default 1e-4)
456: - -tao_brgn_l1_smooth_epsilon - L1-norm smooth approximation parameter: ||x||_1 = sum(sqrt(x.^2+epsilon^2)-epsilon) (default 1e-6)
458: Level: beginner
459: M*/
460: PETSC_EXTERN PetscErrorCode TaoCreate_BRGN(Tao tao)
461: {
462: TAO_BRGN *gn;
466: PetscNewLog(tao,&gn);
468: tao->ops->destroy = TaoDestroy_BRGN;
469: tao->ops->setup = TaoSetUp_BRGN;
470: tao->ops->setfromoptions = TaoSetFromOptions_BRGN;
471: tao->ops->view = TaoView_BRGN;
472: tao->ops->solve = TaoSolve_BRGN;
474: tao->data = (void*)gn;
475: gn->reg_type = BRGN_REGULARIZATION_L2PROX;
476: gn->lambda = 1e-4;
477: gn->epsilon = 1e-6;
478: gn->downhill_lambda_change = 1./5.;
479: gn->uphill_lambda_change = 1.5;
480: gn->parent = tao;
482: TaoCreate(PetscObjectComm((PetscObject)tao),&gn->subsolver);
483: TaoSetType(gn->subsolver,TAOBNLS);
484: TaoSetOptionsPrefix(gn->subsolver,"tao_brgn_subsolver_");
485: return(0);
486: }
488: /*@
489: TaoBRGNGetSubsolver - Get the pointer to the subsolver inside BRGN
491: Collective on Tao
493: Level: advanced
495: Input Parameters:
496: + tao - the Tao solver context
497: - subsolver - the Tao sub-solver context
498: @*/
499: PetscErrorCode TaoBRGNGetSubsolver(Tao tao,Tao *subsolver)
500: {
501: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
504: *subsolver = gn->subsolver;
505: return(0);
506: }
508: /*@
509: TaoBRGNSetRegularizerWeight - Set the regularizer weight for the Gauss-Newton least-squares algorithm
511: Collective on Tao
513: Input Parameters:
514: + tao - the Tao solver context
515: - lambda - L1-norm regularizer weight
517: Level: beginner
518: @*/
519: PetscErrorCode TaoBRGNSetRegularizerWeight(Tao tao,PetscReal lambda)
520: {
521: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
523: /* Initialize lambda here */
526: gn->lambda = lambda;
527: return(0);
528: }
530: /*@
531: TaoBRGNSetL1SmoothEpsilon - Set the L1-norm smooth approximation parameter for L1-regularized least-squares algorithm
533: Collective on Tao
535: Input Parameters:
536: + tao - the Tao solver context
537: - epsilon - L1-norm smooth approximation parameter
539: Level: advanced
540: @*/
541: PetscErrorCode TaoBRGNSetL1SmoothEpsilon(Tao tao,PetscReal epsilon)
542: {
543: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
545: /* Initialize epsilon here */
548: gn->epsilon = epsilon;
549: return(0);
550: }
552: /*@
553: TaoBRGNSetDictionaryMatrix - bind the dictionary matrix from user application context to gn->D, for compressed sensing (with least-squares problem)
555: Input Parameters:
556: + tao - the Tao context
557: - dict - the user specified dictionary matrix. We allow to set a null dictionary, which means identity matrix by default
559: Level: advanced
560: @*/
561: PetscErrorCode TaoBRGNSetDictionaryMatrix(Tao tao,Mat dict)
562: {
563: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
567: if (dict) {
570: PetscObjectReference((PetscObject)dict);
571: }
572: MatDestroy(&gn->D);
573: gn->D = dict;
574: return(0);
575: }
577: /*@C
578: TaoBRGNSetRegularizerObjectiveAndGradientRoutine - Sets the user-defined regularizer call-back
579: function into the algorithm.
581: Input Parameters:
582: + tao - the Tao context
583: . func - function pointer for the regularizer value and gradient evaluation
584: - ctx - user context for the regularizer
586: Level: advanced
587: @*/
588: PetscErrorCode TaoBRGNSetRegularizerObjectiveAndGradientRoutine(Tao tao,PetscErrorCode (*func)(Tao,Vec,PetscReal *,Vec,void*),void *ctx)
589: {
590: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
594: if (ctx) {
595: gn->reg_obj_ctx = ctx;
596: }
597: if (func) {
598: gn->regularizerobjandgrad = func;
599: }
600: return(0);
601: }
603: /*@C
604: TaoBRGNSetRegularizerHessianRoutine - Sets the user-defined regularizer call-back
605: function into the algorithm.
607: Input Parameters:
608: + tao - the Tao context
609: . Hreg - user-created matrix for the Hessian of the regularization term
610: . func - function pointer for the regularizer Hessian evaluation
611: - ctx - user context for the regularizer Hessian
613: Level: advanced
614: @*/
615: PetscErrorCode TaoBRGNSetRegularizerHessianRoutine(Tao tao,Mat Hreg,PetscErrorCode (*func)(Tao,Vec,Mat,void*),void *ctx)
616: {
617: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
622: if (Hreg) {
625: } else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONG,"NULL Hessian detected! User must provide valid Hessian for the regularizer.");
626: if (ctx) {
627: gn->reg_hess_ctx = ctx;
628: }
629: if (func) {
630: gn->regularizerhessian = func;
631: }
632: if (Hreg) {
633: PetscObjectReference((PetscObject)Hreg);
634: MatDestroy(&gn->Hreg);
635: gn->Hreg = Hreg;
636: }
637: return(0);
638: }