Actual source code: brgn.c

  1: #include <../src/tao/leastsquares/impls/brgn/brgn.h>

  3: #define BRGN_REGULARIZATION_USER    0
  4: #define BRGN_REGULARIZATION_L2PROX  1
  5: #define BRGN_REGULARIZATION_L2PURE  2
  6: #define BRGN_REGULARIZATION_L1DICT  3
  7: #define BRGN_REGULARIZATION_LM      4
  8: #define BRGN_REGULARIZATION_TYPES   5

 10: static const char *BRGN_REGULARIZATION_TABLE[64] = {"user","l2prox","l2pure","l1dict","lm"};

 12: static PetscErrorCode GNHessianProd(Mat H,Vec in,Vec out)
 13: {
 14:   TAO_BRGN              *gn;

 16:   MatShellGetContext(H,&gn);
 17:   MatMult(gn->subsolver->ls_jac,in,gn->r_work);
 18:   MatMultTranspose(gn->subsolver->ls_jac,gn->r_work,out);
 19:   switch (gn->reg_type) {
 20:   case BRGN_REGULARIZATION_USER:
 21:     MatMult(gn->Hreg,in,gn->x_work);
 22:     VecAXPY(out,gn->lambda,gn->x_work);
 23:     break;
 24:   case BRGN_REGULARIZATION_L2PURE:
 25:     VecAXPY(out,gn->lambda,in);
 26:     break;
 27:   case BRGN_REGULARIZATION_L2PROX:
 28:     VecAXPY(out,gn->lambda,in);
 29:     break;
 30:   case BRGN_REGULARIZATION_L1DICT:
 31:     /* out = out + lambda*D'*(diag.*(D*in)) */
 32:     if (gn->D) {
 33:       MatMult(gn->D,in,gn->y);/* y = D*in */
 34:     } else {
 35:       VecCopy(in,gn->y);
 36:     }
 37:     VecPointwiseMult(gn->y_work,gn->diag,gn->y);   /* y_work = diag.*(D*in), where diag = epsilon^2 ./ sqrt(x.^2+epsilon^2).^3 */
 38:     if (gn->D) {
 39:       MatMultTranspose(gn->D,gn->y_work,gn->x_work); /* x_work = D'*(diag.*(D*in)) */
 40:     } else {
 41:       VecCopy(gn->y_work,gn->x_work);
 42:     }
 43:     VecAXPY(out,gn->lambda,gn->x_work);
 44:     break;
 45:   case BRGN_REGULARIZATION_LM:
 46:     VecPointwiseMult(gn->x_work,gn->damping,in);
 47:     VecAXPY(out,1,gn->x_work);
 48:     break;
 49:   }
 50:   return 0;
 51: }
 52: static PetscErrorCode ComputeDamping(TAO_BRGN *gn)
 53: {
 54:   const PetscScalar *diag_ary;
 55:   PetscScalar       *damping_ary;
 56:   PetscInt          i,n;

 58:   /* update damping */
 59:   VecGetArray(gn->damping,&damping_ary);
 60:   VecGetArrayRead(gn->diag,&diag_ary);
 61:   VecGetLocalSize(gn->damping,&n);
 62:   for (i=0; i<n; i++) {
 63:     damping_ary[i] = PetscClipInterval(diag_ary[i],PETSC_SQRT_MACHINE_EPSILON,PetscSqrtReal(PETSC_MAX_REAL));
 64:   }
 65:   VecScale(gn->damping,gn->lambda);
 66:   VecRestoreArray(gn->damping,&damping_ary);
 67:   VecRestoreArrayRead(gn->diag,&diag_ary);
 68:   return 0;
 69: }

 71: PetscErrorCode TaoBRGNGetDampingVector(Tao tao,Vec *d)
 72: {
 73:   TAO_BRGN *gn = (TAO_BRGN *)tao->data;

 76:   *d = gn->damping;
 77:   return 0;
 78: }

 80: static PetscErrorCode GNObjectiveGradientEval(Tao tao,Vec X,PetscReal *fcn,Vec G,void *ptr)
 81: {
 82:   TAO_BRGN              *gn = (TAO_BRGN *)ptr;
 83:   PetscInt              K;                    /* dimension of D*X */
 84:   PetscScalar           yESum;
 85:   PetscReal             f_reg;

 87:   /* compute objective *fcn*/
 88:   /* compute first term 0.5*||ls_res||_2^2 */
 89:   TaoComputeResidual(tao,X,tao->ls_res);
 90:   VecDot(tao->ls_res,tao->ls_res,fcn);
 91:   *fcn *= 0.5;
 92:   /* compute gradient G */
 93:   TaoComputeResidualJacobian(tao,X,tao->ls_jac,tao->ls_jac_pre);
 94:   MatMultTranspose(tao->ls_jac,tao->ls_res,G);
 95:   /* add the regularization contribution */
 96:   switch (gn->reg_type) {
 97:   case BRGN_REGULARIZATION_USER:
 98:     (*gn->regularizerobjandgrad)(tao,X,&f_reg,gn->x_work,gn->reg_obj_ctx);
 99:     *fcn += gn->lambda*f_reg;
100:     VecAXPY(G,gn->lambda,gn->x_work);
101:     break;
102:   case BRGN_REGULARIZATION_L2PURE:
103:     /* compute f = f + lambda*0.5*xk'*xk */
104:     VecDot(X,X,&f_reg);
105:     *fcn += gn->lambda*0.5*f_reg;
106:     /* compute G = G + lambda*xk */
107:     VecAXPY(G,gn->lambda,X);
108:     break;
109:   case BRGN_REGULARIZATION_L2PROX:
110:     /* compute f = f + lambda*0.5*(xk - xkm1)'*(xk - xkm1) */
111:     VecAXPBYPCZ(gn->x_work,1.0,-1.0,0.0,X,gn->x_old);
112:     VecDot(gn->x_work,gn->x_work,&f_reg);
113:     *fcn += gn->lambda*0.5*f_reg;
114:     /* compute G = G + lambda*(xk - xkm1) */
115:     VecAXPBYPCZ(G,gn->lambda,-gn->lambda,1.0,X,gn->x_old);
116:     break;
117:   case BRGN_REGULARIZATION_L1DICT:
118:     /* compute f = f + lambda*sum(sqrt(y.^2+epsilon^2) - epsilon), where y = D*x*/
119:     if (gn->D) {
120:       MatMult(gn->D,X,gn->y);/* y = D*x */
121:     } else {
122:       VecCopy(X,gn->y);
123:     }
124:     VecPointwiseMult(gn->y_work,gn->y,gn->y);
125:     VecShift(gn->y_work,gn->epsilon*gn->epsilon);
126:     VecSqrtAbs(gn->y_work);  /* gn->y_work = sqrt(y.^2+epsilon^2) */
127:     VecSum(gn->y_work,&yESum);
128:     VecGetSize(gn->y,&K);
129:     *fcn += gn->lambda*(yESum - K*gn->epsilon);
130:     /* compute G = G + lambda*D'*(y./sqrt(y.^2+epsilon^2)),where y = D*x */
131:     VecPointwiseDivide(gn->y_work,gn->y,gn->y_work); /* reuse y_work = y./sqrt(y.^2+epsilon^2) */
132:     if (gn->D) {
133:       MatMultTranspose(gn->D,gn->y_work,gn->x_work);
134:     } else {
135:       VecCopy(gn->y_work,gn->x_work);
136:     }
137:     VecAXPY(G,gn->lambda,gn->x_work);
138:     break;
139:   }
140:   return 0;
141: }

143: static PetscErrorCode GNComputeHessian(Tao tao,Vec X,Mat H,Mat Hpre,void *ptr)
144: {
145:   TAO_BRGN       *gn = (TAO_BRGN *)ptr;
146:   PetscInt       i,n,cstart,cend;
147:   PetscScalar    *cnorms,*diag_ary;

149:   TaoComputeResidualJacobian(tao,X,tao->ls_jac,tao->ls_jac_pre);
150:   if (gn->mat_explicit) {
151:     MatTransposeMatMult(tao->ls_jac, tao->ls_jac, MAT_REUSE_MATRIX, PETSC_DEFAULT, &gn->H);
152:   }

154:   switch (gn->reg_type) {
155:   case BRGN_REGULARIZATION_USER:
156:     (*gn->regularizerhessian)(tao,X,gn->Hreg,gn->reg_hess_ctx);
157:     if (gn->mat_explicit) {
158:       MatAXPY(gn->H, 1.0, gn->Hreg, DIFFERENT_NONZERO_PATTERN);
159:     }
160:     break;
161:   case BRGN_REGULARIZATION_L2PURE:
162:     if (gn->mat_explicit) {
163:       MatShift(gn->H, gn->lambda);
164:     }
165:     break;
166:   case BRGN_REGULARIZATION_L2PROX:
167:     if (gn->mat_explicit) {
168:       MatShift(gn->H, gn->lambda);
169:     }
170:     break;
171:   case BRGN_REGULARIZATION_L1DICT:
172:     /* calculate and store diagonal matrix as a vector: diag = epsilon^2 ./ sqrt(x.^2+epsilon^2).^3* --> diag = epsilon^2 ./ sqrt(y.^2+epsilon^2).^3,where y = D*x */
173:     if (gn->D) {
174:       MatMult(gn->D,X,gn->y);/* y = D*x */
175:     } else {
176:       VecCopy(X,gn->y);
177:     }
178:     VecPointwiseMult(gn->y_work,gn->y,gn->y);
179:     VecShift(gn->y_work,gn->epsilon*gn->epsilon);
180:     VecCopy(gn->y_work,gn->diag);                  /* gn->diag = y.^2+epsilon^2 */
181:     VecSqrtAbs(gn->y_work);                        /* gn->y_work = sqrt(y.^2+epsilon^2) */
182:     VecPointwiseMult(gn->diag,gn->y_work,gn->diag);/* gn->diag = sqrt(y.^2+epsilon^2).^3 */
183:     VecReciprocal(gn->diag);
184:     VecScale(gn->diag,gn->epsilon*gn->epsilon);
185:     if (gn->mat_explicit) {
186:       MatDiagonalSet(gn->H, gn->diag, ADD_VALUES);
187:     }
188:     break;
189:   case BRGN_REGULARIZATION_LM:
190:     /* compute diagonal of J^T J */
191:     MatGetSize(gn->parent->ls_jac,NULL,&n);
192:     PetscMalloc1(n,&cnorms);
193:     MatGetColumnNorms(gn->parent->ls_jac,NORM_2,cnorms);
194:     MatGetOwnershipRangeColumn(gn->parent->ls_jac,&cstart,&cend);
195:     VecGetArray(gn->diag,&diag_ary);
196:     for (i = 0; i < cend-cstart; i++) {
197:       diag_ary[i] = cnorms[cstart+i] * cnorms[cstart+i];
198:     }
199:     VecRestoreArray(gn->diag,&diag_ary);
200:     PetscFree(cnorms);
201:     ComputeDamping(gn);
202:     if (gn->mat_explicit) {
203:       MatDiagonalSet(gn->H, gn->damping, ADD_VALUES);
204:     }
205:     break;
206:   }
207:   return 0;
208: }

210: static PetscErrorCode GNHookFunction(Tao tao,PetscInt iter, void *ctx)
211: {
212:   TAO_BRGN              *gn = (TAO_BRGN *)ctx;

214:   /* Update basic tao information from the subsolver */
215:   gn->parent->nfuncs = tao->nfuncs;
216:   gn->parent->ngrads = tao->ngrads;
217:   gn->parent->nfuncgrads = tao->nfuncgrads;
218:   gn->parent->nhess = tao->nhess;
219:   gn->parent->niter = tao->niter;
220:   gn->parent->ksp_its = tao->ksp_its;
221:   gn->parent->ksp_tot_its = tao->ksp_tot_its;
222:   gn->parent->fc = tao->fc;
223:   TaoGetConvergedReason(tao,&gn->parent->reason);
224:   /* Update the solution vectors */
225:   if (iter == 0) {
226:     VecSet(gn->x_old,0.0);
227:   } else {
228:     VecCopy(tao->solution,gn->x_old);
229:     VecCopy(tao->solution,gn->parent->solution);
230:   }
231:   /* Update the gradient */
232:   VecCopy(tao->gradient,gn->parent->gradient);

234:   /* Update damping parameter for LM */
235:   if (gn->reg_type == BRGN_REGULARIZATION_LM) {
236:     if (iter > 0) {
237:       if (gn->fc_old > tao->fc) {
238:         gn->lambda = gn->lambda * gn->downhill_lambda_change;
239:       } else {
240:         /* uphill step */
241:         gn->lambda = gn->lambda * gn->uphill_lambda_change;
242:       }
243:     }
244:     gn->fc_old = tao->fc;
245:   }

247:   /* Call general purpose update function */
248:   if (gn->parent->ops->update) {
249:     (*gn->parent->ops->update)(gn->parent,gn->parent->niter,gn->parent->user_update);
250:   }
251:   return 0;
252: }

254: static PetscErrorCode TaoSolve_BRGN(Tao tao)
255: {
256:   TAO_BRGN              *gn = (TAO_BRGN *)tao->data;

258:   TaoSolve(gn->subsolver);
259:   /* Update basic tao information from the subsolver */
260:   tao->nfuncs = gn->subsolver->nfuncs;
261:   tao->ngrads = gn->subsolver->ngrads;
262:   tao->nfuncgrads = gn->subsolver->nfuncgrads;
263:   tao->nhess = gn->subsolver->nhess;
264:   tao->niter = gn->subsolver->niter;
265:   tao->ksp_its = gn->subsolver->ksp_its;
266:   tao->ksp_tot_its = gn->subsolver->ksp_tot_its;
267:   TaoGetConvergedReason(gn->subsolver,&tao->reason);
268:   /* Update vectors */
269:   VecCopy(gn->subsolver->solution,tao->solution);
270:   VecCopy(gn->subsolver->gradient,tao->gradient);
271:   return 0;
272: }

274: static PetscErrorCode TaoSetFromOptions_BRGN(PetscOptionItems *PetscOptionsObject,Tao tao)
275: {
276:   TAO_BRGN              *gn = (TAO_BRGN *)tao->data;
277:   TaoLineSearch         ls;

279:   PetscOptionsHead(PetscOptionsObject,"least-squares problems with regularizer: ||f(x)||^2 + lambda*g(x), g(x) = ||xk-xkm1||^2 or ||Dx||_1 or user defined function.");
280:   PetscOptionsBool("-tao_brgn_mat_explicit","switches the Hessian construction to be an explicit matrix rather than MATSHELL","",gn->mat_explicit,&gn->mat_explicit,NULL);
281:   PetscOptionsReal("-tao_brgn_regularizer_weight","regularizer weight (default 1e-4)","",gn->lambda,&gn->lambda,NULL);
282:   PetscOptionsReal("-tao_brgn_l1_smooth_epsilon","L1-norm smooth approximation parameter: ||x||_1 = sum(sqrt(x.^2+epsilon^2)-epsilon) (default 1e-6)","",gn->epsilon,&gn->epsilon,NULL);
283:   PetscOptionsReal("-tao_brgn_lm_downhill_lambda_change","Factor to decrease trust region by on downhill steps","",gn->downhill_lambda_change,&gn->downhill_lambda_change,NULL);
284:   PetscOptionsReal("-tao_brgn_lm_uphill_lambda_change","Factor to increase trust region by on uphill steps","",gn->uphill_lambda_change,&gn->uphill_lambda_change,NULL);
285:   PetscOptionsEList("-tao_brgn_regularization_type","regularization type", "",BRGN_REGULARIZATION_TABLE,BRGN_REGULARIZATION_TYPES,BRGN_REGULARIZATION_TABLE[gn->reg_type],&gn->reg_type,NULL);
286:   PetscOptionsTail();
287:   /* set unit line search direction as the default when using the lm regularizer */
288:   if (gn->reg_type == BRGN_REGULARIZATION_LM) {
289:     TaoGetLineSearch(gn->subsolver,&ls);
290:     TaoLineSearchSetType(ls,TAOLINESEARCHUNIT);
291:   }
292:   TaoSetFromOptions(gn->subsolver);
293:   return 0;
294: }

296: static PetscErrorCode TaoView_BRGN(Tao tao,PetscViewer viewer)
297: {
298:   TAO_BRGN              *gn = (TAO_BRGN *)tao->data;

300:   PetscViewerASCIIPushTab(viewer);
301:   TaoView(gn->subsolver,viewer);
302:   PetscViewerASCIIPopTab(viewer);
303:   return 0;
304: }

306: static PetscErrorCode TaoSetUp_BRGN(Tao tao)
307: {
308:   TAO_BRGN              *gn = (TAO_BRGN *)tao->data;
309:   PetscBool             is_bnls,is_bntr,is_bntl;
310:   PetscInt              i,n,N,K; /* dict has size K*N*/

313:   PetscObjectTypeCompare((PetscObject)gn->subsolver,TAOBNLS,&is_bnls);
314:   PetscObjectTypeCompare((PetscObject)gn->subsolver,TAOBNTR,&is_bntr);
315:   PetscObjectTypeCompare((PetscObject)gn->subsolver,TAOBNTL,&is_bntl);
317:   if (!tao->gradient) {
318:     VecDuplicate(tao->solution,&tao->gradient);
319:   }
320:   if (!gn->x_work) {
321:     VecDuplicate(tao->solution,&gn->x_work);
322:   }
323:   if (!gn->r_work) {
324:     VecDuplicate(tao->ls_res,&gn->r_work);
325:   }
326:   if (!gn->x_old) {
327:     VecDuplicate(tao->solution,&gn->x_old);
328:     VecSet(gn->x_old,0.0);
329:   }

331:   if (BRGN_REGULARIZATION_L1DICT == gn->reg_type) {
332:     if (!gn->y) {
333:       if (gn->D) {
334:         MatGetSize(gn->D,&K,&N); /* Shell matrices still must have sizes defined. K = N for identity matrix, K=N-1 or N for gradient matrix */
335:         MatCreateVecs(gn->D,NULL,&gn->y);
336:       } else {
337:         VecDuplicate(tao->solution,&gn->y); /* If user does not setup dict matrix, use identiy matrix, K=N */
338:       }
339:       VecSet(gn->y,0.0);
340:     }
341:     if (!gn->y_work) {
342:       VecDuplicate(gn->y,&gn->y_work);
343:     }
344:     if (!gn->diag) {
345:       VecDuplicate(gn->y,&gn->diag);
346:       VecSet(gn->diag,0.0);
347:     }
348:   }
349:   if (BRGN_REGULARIZATION_LM == gn->reg_type) {
350:     if (!gn->diag) {
351:       MatCreateVecs(tao->ls_jac,&gn->diag,NULL);
352:     }
353:     if (!gn->damping) {
354:       MatCreateVecs(tao->ls_jac,&gn->damping,NULL);
355:     }
356:   }

358:   if (!tao->setupcalled) {
359:     /* Hessian setup */
360:     if (gn->mat_explicit) {
361:       TaoComputeResidualJacobian(tao,tao->solution,tao->ls_jac,tao->ls_jac_pre);
362:       MatTransposeMatMult(tao->ls_jac, tao->ls_jac, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &gn->H);
363:     } else {
364:       VecGetLocalSize(tao->solution,&n);
365:       VecGetSize(tao->solution,&N);
366:       MatCreate(PetscObjectComm((PetscObject)tao),&gn->H);
367:       MatSetSizes(gn->H,n,n,N,N);
368:       MatSetType(gn->H,MATSHELL);
369:       MatSetOption(gn->H, MAT_SYMMETRIC, PETSC_TRUE);
370:       MatShellSetOperation(gn->H,MATOP_MULT,(void (*)(void))GNHessianProd);
371:       MatShellSetContext(gn->H,gn);
372:     }
373:     MatSetUp(gn->H);
374:     /* Subsolver setup,include initial vector and dictionary D */
375:     TaoSetUpdate(gn->subsolver,GNHookFunction,gn);
376:     TaoSetSolution(gn->subsolver,tao->solution);
377:     if (tao->bounded) {
378:       TaoSetVariableBounds(gn->subsolver,tao->XL,tao->XU);
379:     }
380:     TaoSetResidualRoutine(gn->subsolver,tao->ls_res,tao->ops->computeresidual,tao->user_lsresP);
381:     TaoSetJacobianResidualRoutine(gn->subsolver,tao->ls_jac,tao->ls_jac,tao->ops->computeresidualjacobian,tao->user_lsjacP);
382:     TaoSetObjectiveAndGradient(gn->subsolver,NULL,GNObjectiveGradientEval,gn);
383:     TaoSetHessian(gn->subsolver,gn->H,gn->H,GNComputeHessian,gn);
384:     /* Propagate some options down */
385:     TaoSetTolerances(gn->subsolver,tao->gatol,tao->grtol,tao->gttol);
386:     TaoSetMaximumIterations(gn->subsolver,tao->max_it);
387:     TaoSetMaximumFunctionEvaluations(gn->subsolver,tao->max_funcs);
388:     for (i=0; i<tao->numbermonitors; ++i) {
389:       TaoSetMonitor(gn->subsolver,tao->monitor[i],tao->monitorcontext[i],tao->monitordestroy[i]);
390:       PetscObjectReference((PetscObject)(tao->monitorcontext[i]));
391:     }
392:     TaoSetUp(gn->subsolver);
393:   }
394:   return 0;
395: }

397: static PetscErrorCode TaoDestroy_BRGN(Tao tao)
398: {
399:   TAO_BRGN              *gn = (TAO_BRGN *)tao->data;

401:   if (tao->setupcalled) {
402:     VecDestroy(&tao->gradient);
403:     VecDestroy(&gn->x_work);
404:     VecDestroy(&gn->r_work);
405:     VecDestroy(&gn->x_old);
406:     VecDestroy(&gn->diag);
407:     VecDestroy(&gn->y);
408:     VecDestroy(&gn->y_work);
409:   }
410:   VecDestroy(&gn->damping);
411:   VecDestroy(&gn->diag);
412:   MatDestroy(&gn->H);
413:   MatDestroy(&gn->D);
414:   MatDestroy(&gn->Hreg);
415:   TaoDestroy(&gn->subsolver);
416:   gn->parent = NULL;
417:   PetscFree(tao->data);
418:   return 0;
419: }

421: /*MC
422:   TAOBRGN - Bounded Regularized Gauss-Newton method for solving nonlinear least-squares
423:             problems with bound constraints. This algorithm is a thin wrapper around TAOBNTL
424:             that constructs the Gauss-Newton problem with the user-provided least-squares
425:             residual and Jacobian. The algorithm offers an L2-norm ("l2pure"), L2-norm proximal point ("l2prox")
426:             regularizer, and L1-norm dictionary regularizer ("l1dict"), where we approximate the
427:             L1-norm ||x||_1 by sum_i(sqrt(x_i^2+epsilon^2)-epsilon) with a small positive number epsilon.
428:             Also offered is the "lm" regularizer which uses a scaled diagonal of J^T J.
429:             With the "lm" regularizer, BRGN is a Levenberg-Marquardt optimizer.
430:             The user can also provide own regularization function.

432:   Options Database Keys:
433: + -tao_brgn_regularization_type - regularization type ("user", "l2prox", "l2pure", "l1dict", "lm") (default "l2prox")
434: . -tao_brgn_regularizer_weight  - regularizer weight (default 1e-4)
435: - -tao_brgn_l1_smooth_epsilon   - L1-norm smooth approximation parameter: ||x||_1 = sum(sqrt(x.^2+epsilon^2)-epsilon) (default 1e-6)

437:   Level: beginner
438: M*/
439: PETSC_EXTERN PetscErrorCode TaoCreate_BRGN(Tao tao)
440: {
441:   TAO_BRGN       *gn;

443:   PetscNewLog(tao,&gn);

445:   tao->ops->destroy = TaoDestroy_BRGN;
446:   tao->ops->setup = TaoSetUp_BRGN;
447:   tao->ops->setfromoptions = TaoSetFromOptions_BRGN;
448:   tao->ops->view = TaoView_BRGN;
449:   tao->ops->solve = TaoSolve_BRGN;

451:   tao->data = gn;
452:   gn->reg_type = BRGN_REGULARIZATION_L2PROX;
453:   gn->lambda = 1e-4;
454:   gn->epsilon = 1e-6;
455:   gn->downhill_lambda_change = 1./5.;
456:   gn->uphill_lambda_change = 1.5;
457:   gn->parent = tao;

459:   TaoCreate(PetscObjectComm((PetscObject)tao),&gn->subsolver);
460:   TaoSetType(gn->subsolver,TAOBNLS);
461:   TaoSetOptionsPrefix(gn->subsolver,"tao_brgn_subsolver_");
462:   return 0;
463: }

465: /*@
466:   TaoBRGNGetSubsolver - Get the pointer to the subsolver inside BRGN

468:   Collective on Tao

470:   Level: advanced

472:   Input Parameters:
473: +  tao - the Tao solver context
474: -  subsolver - the Tao sub-solver context
475: @*/
476: PetscErrorCode TaoBRGNGetSubsolver(Tao tao,Tao *subsolver)
477: {
478:   TAO_BRGN       *gn = (TAO_BRGN *)tao->data;

480:   *subsolver = gn->subsolver;
481:   return 0;
482: }

484: /*@
485:   TaoBRGNSetRegularizerWeight - Set the regularizer weight for the Gauss-Newton least-squares algorithm

487:   Collective on Tao

489:   Input Parameters:
490: +  tao - the Tao solver context
491: -  lambda - L1-norm regularizer weight

493:   Level: beginner
494: @*/
495: PetscErrorCode TaoBRGNSetRegularizerWeight(Tao tao,PetscReal lambda)
496: {
497:   TAO_BRGN       *gn = (TAO_BRGN *)tao->data;

499:   /* Initialize lambda here */

501:   gn->lambda = lambda;
502:   return 0;
503: }

505: /*@
506:   TaoBRGNSetL1SmoothEpsilon - Set the L1-norm smooth approximation parameter for L1-regularized least-squares algorithm

508:   Collective on Tao

510:   Input Parameters:
511: +  tao - the Tao solver context
512: -  epsilon - L1-norm smooth approximation parameter

514:   Level: advanced
515: @*/
516: PetscErrorCode TaoBRGNSetL1SmoothEpsilon(Tao tao,PetscReal epsilon)
517: {
518:   TAO_BRGN       *gn = (TAO_BRGN *)tao->data;

520:   /* Initialize epsilon here */

522:   gn->epsilon = epsilon;
523:   return 0;
524: }

526: /*@
527:    TaoBRGNSetDictionaryMatrix - bind the dictionary matrix from user application context to gn->D, for compressed sensing (with least-squares problem)

529:    Input Parameters:
530: +  tao  - the Tao context
531: -  dict - the user specified dictionary matrix.  We allow to set a null dictionary, which means identity matrix by default

533:     Level: advanced
534: @*/
535: PetscErrorCode TaoBRGNSetDictionaryMatrix(Tao tao,Mat dict)
536: {
537:   TAO_BRGN       *gn = (TAO_BRGN *)tao->data;
539:   if (dict) {
542:     PetscObjectReference((PetscObject)dict);
543:   }
544:   MatDestroy(&gn->D);
545:   gn->D = dict;
546:   return 0;
547: }

549: /*@C
550:    TaoBRGNSetRegularizerObjectiveAndGradientRoutine - Sets the user-defined regularizer call-back
551:    function into the algorithm.

553:    Input Parameters:
554: + tao - the Tao context
555: . func - function pointer for the regularizer value and gradient evaluation
556: - ctx - user context for the regularizer

558:    Level: advanced
559: @*/
560: PetscErrorCode TaoBRGNSetRegularizerObjectiveAndGradientRoutine(Tao tao,PetscErrorCode (*func)(Tao,Vec,PetscReal *,Vec,void*),void *ctx)
561: {
562:   TAO_BRGN       *gn = (TAO_BRGN *)tao->data;

565:   if (ctx) {
566:     gn->reg_obj_ctx = ctx;
567:   }
568:   if (func) {
569:     gn->regularizerobjandgrad = func;
570:   }
571:   return 0;
572: }

574: /*@C
575:    TaoBRGNSetRegularizerHessianRoutine - Sets the user-defined regularizer call-back
576:    function into the algorithm.

578:    Input Parameters:
579: + tao - the Tao context
580: . Hreg - user-created matrix for the Hessian of the regularization term
581: . func - function pointer for the regularizer Hessian evaluation
582: - ctx - user context for the regularizer Hessian

584:    Level: advanced
585: @*/
586: PetscErrorCode TaoBRGNSetRegularizerHessianRoutine(Tao tao,Mat Hreg,PetscErrorCode (*func)(Tao,Vec,Mat,void*),void *ctx)
587: {
588:   TAO_BRGN       *gn = (TAO_BRGN *)tao->data;

591:   if (Hreg) {
594:   } else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONG,"NULL Hessian detected! User must provide valid Hessian for the regularizer.");
595:   if (ctx) {
596:     gn->reg_hess_ctx = ctx;
597:   }
598:   if (func) {
599:     gn->regularizerhessian = func;
600:   }
601:   if (Hreg) {
602:     PetscObjectReference((PetscObject)Hreg);
603:     MatDestroy(&gn->Hreg);
604:     gn->Hreg = Hreg;
605:   }
606:   return 0;
607: }