1: /*
2: Context for Bounded Regularized Gauss-Newton algorithm.
3: Extended with L1-regularizer with a linear transformation matrix D:
4: 0.5*||Ax-b||^2 + lambda*||D*x||_1
5: When D is an identity matrix, we have the classic lasso, aka basis pursuit denoising in compressive sensing problem.
6: */
11: #include <../src/tao/bound/impls/bnk/bnk.h> 13: typedef struct {
14: PetscErrorCode (*regularizerobjandgrad)(Tao,Vec,PetscReal*,Vec,void*);
15: PetscErrorCode (*regularizerhessian)(Tao,Vec,Mat,void*);
16: void *reg_obj_ctx;
17: void *reg_hess_ctx;
18: Mat H,Hreg,D; /* Hessian, Hessian for regulization part, and Dictionary matrix have size N*N, and K*N respectively. (Jacobian M*N not used here) */
19: Vec x_old,x_work,r_work,diag,y,y_work; /* x, r=J*x, and y=D*x have size N, M, and K respectively. */
20: Vec damping; /* Optional diagonal damping matrix. */
21: Tao subsolver,parent;
22: PetscReal lambda,epsilon,fc_old; /* lambda is regularizer weight for both L2-norm Gaussian-Newton and L1-norm, ||x||_1 is approximated with sum(sqrt(x.^2+epsilon^2)-epsilon)*/
23: PetscReal downhill_lambda_change,uphill_lambda_change; /* With the lm regularizer lambda diag(J^T J),
24: lambda = downhill_lambda_change * lambda on steps that decrease the objective.
25: lambda = uphill_lambda_change * lambda on steps that increase the objective. */
26: PetscInt reg_type;
27: } TAO_BRGN;
29: #endif /* if !defined(__TAO_BRGN_H) */