Actual source code: admm.c

  1: #include <../src/tao/constrained/impls/admm/admm.h>
  2: #include <petsctao.h>
  3: #include <petsc/private/petscimpl.h>

  5: /* Updates terminating criteria
  6:  *
  7:  * 1  ||r_k|| = ||Ax+Bz-c|| =< catol_admm* max{||Ax||,||Bz||,||c||}
  8:  *
  9:  * 2. Updates dual residual, d_k
 10:  *
 11:  * 3. ||d_k|| = ||mu*A^T*B(z_k-z_{k-1})|| =< gatol_admm * ||A^Ty||   */

 13: static PetscBool cited = PETSC_FALSE;
 14: static const char citation[] =
 15:   "@misc{xu2017adaptive,\n"
 16:   "   title={Adaptive Relaxed ADMM: Convergence Theory and Practical Implementation},\n"
 17:   "   author={Zheng Xu and Mario A. T. Figueiredo and Xiaoming Yuan and Christoph Studer and Tom Goldstein},\n"
 18:   "   year={2017},\n"
 19:   "   eprint={1704.02712},\n"
 20:   "   archivePrefix={arXiv},\n"
 21:   "   primaryClass={cs.CV}\n"
 22:   "}  \n";

 24: static PetscErrorCode TaoADMMToleranceUpdate(Tao tao)
 25: {
 26:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;
 28:   PetscReal      Axnorm,Bznorm,ATynorm,temp;
 29:   Vec            tempJR,tempL;
 30:   Tao            mis;

 33:   mis    = am->subsolverX;
 34:   tempJR = am->workJacobianRight;
 35:   tempL  = am->workLeft;
 36:   /* ATy */
 37:   TaoComputeJacobianEquality(mis, am->y, mis->jacobian_equality, mis->jacobian_equality_pre);
 38:   MatMultTranspose(mis->jacobian_equality,am->y,tempJR);
 39:   VecNorm(tempJR,NORM_2,&ATynorm);
 40:   /* dualres = mu * ||AT(Bz-Bzold)||_2 */
 41:   VecWAXPY(tempJR,-1.,am->Bzold,am->Bz);
 42:   MatMultTranspose(mis->jacobian_equality,tempJR,tempL);
 43:   VecNorm(tempL,NORM_2,&am->dualres);
 44:   am->dualres *= am->mu;

 46:   /* ||Ax||_2, ||Bz||_2 */
 47:   VecNorm(am->Ax,NORM_2,&Axnorm);
 48:   VecNorm(am->Bz,NORM_2,&Bznorm);

 50:   /* Set catol to be catol_admm *  max{||Ax||,||Bz||,||c||} *
 51:    * Set gatol to be gatol_admm *  ||A^Ty|| *
 52:    * while cnorm is ||r_k||_2, and gnorm is ||d_k||_2 */
 53:   temp = am->catol_admm * PetscMax(Axnorm, (!am->const_norm) ? Bznorm : PetscMax(Bznorm,am->const_norm));
 54:   TaoSetConstraintTolerances(tao,temp,PETSC_DEFAULT);
 55:   TaoSetTolerances(tao, am->gatol_admm*ATynorm, PETSC_DEFAULT,PETSC_DEFAULT);
 56:   return(0);
 57: }

 59: /* Penaly Update for Adaptive ADMM. */
 60: static PetscErrorCode AdaptiveADMMPenaltyUpdate(Tao tao)
 61: {
 62:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;
 64:   PetscReal      ydiff_norm, yhatdiff_norm, Axdiff_norm, Bzdiff_norm, Axyhat, Bzy, a_sd, a_mg, a_k, b_sd, b_mg, b_k;
 65:   PetscBool      hflag, gflag;
 66:   Vec            tempJR,tempJR2;

 69:   tempJR  = am->workJacobianRight;
 70:   tempJR2 = am->workJacobianRight2;
 71:   hflag   = PETSC_FALSE;
 72:   gflag   = PETSC_FALSE;
 73:   a_k     = -1;
 74:   b_k     = -1;

 76:   VecWAXPY(tempJR,-1.,am->Axold,am->Ax);
 77:   VecWAXPY(tempJR2,-1.,am->yhatold,am->yhat);
 78:   VecNorm(tempJR,NORM_2,&Axdiff_norm);
 79:   VecNorm(tempJR2,NORM_2,&yhatdiff_norm);
 80:   VecDot(tempJR,tempJR2,&Axyhat);

 82:   VecWAXPY(tempJR,-1.,am->Bz0,am->Bz);
 83:   VecWAXPY(tempJR2,-1.,am->y,am->y0);
 84:   VecNorm(tempJR,NORM_2,&Bzdiff_norm);
 85:   VecNorm(tempJR2,NORM_2,&ydiff_norm);
 86:   VecDot(tempJR,tempJR2,&Bzy);

 88:   if (Axyhat > am->orthval*Axdiff_norm*yhatdiff_norm + am->mueps) {
 89:     hflag = PETSC_TRUE;
 90:     a_sd  = PetscSqr(yhatdiff_norm)/Axyhat; /* alphaSD */
 91:     a_mg  = Axyhat/PetscSqr(Axdiff_norm);   /* alphaMG */
 92:     a_k   = (a_mg/a_sd) > 0.5 ? a_mg : a_sd - 0.5*a_mg;
 93:   }
 94:   if (Bzy > am->orthval*Bzdiff_norm*ydiff_norm + am->mueps) {
 95:     gflag = PETSC_TRUE;
 96:     b_sd  = PetscSqr(ydiff_norm)/Bzy;  /* betaSD */
 97:     b_mg  = Bzy/PetscSqr(Bzdiff_norm); /* betaMG */
 98:     b_k   = (b_mg/b_sd) > 0.5 ? b_mg : b_sd - 0.5*b_mg;
 99:   }
100:   am->muold = am->mu;
101:   if (gflag && hflag) {
102:     am->mu = PetscSqrtReal(a_k*b_k);
103:   } else if (hflag) {
104:     am->mu = a_k;
105:   } else if (gflag) {
106:     am->mu = b_k;
107:   }
108:   if (am->mu > am->muold) {
109:     am->mu = am->muold;
110:   }
111:   if (am->mu < am->mumin) {
112:     am->mu = am->mumin;
113:   }
114:   return(0);
115: }

117: static PetscErrorCode  TaoADMMSetRegularizerType_ADMM(Tao tao, TaoADMMRegularizerType type)
118: {
119:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;

122:   am->regswitch = type;
123:   return(0);
124: }

126: static PetscErrorCode  TaoADMMGetRegularizerType_ADMM(Tao tao, TaoADMMRegularizerType *type)
127: {
128:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;

131:   *type = am->regswitch;
132:   return(0);
133: }

135: static PetscErrorCode  TaoADMMSetUpdateType_ADMM(Tao tao, TaoADMMUpdateType type)
136: {
137:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;

140:   am->update = type;
141:   return(0);
142: }

144: static PetscErrorCode  TaoADMMGetUpdateType_ADMM(Tao tao, TaoADMMUpdateType *type)
145: {
146:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;

149:   *type = am->update;
150:   return(0);
151: }

153: /* This routine updates Jacobians with new x,z vectors,
154:  * and then updates Ax and Bz vectors, then computes updated residual vector*/
155: static PetscErrorCode ADMMUpdateConstraintResidualVector(Tao tao, Vec x, Vec z, Vec Ax, Vec Bz, Vec residual)
156: {
157:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;
158:   Tao            mis,reg;

162:   mis  = am->subsolverX;
163:   reg  = am->subsolverZ;
164:   TaoComputeJacobianEquality(mis, x, mis->jacobian_equality, mis->jacobian_equality_pre);
165:   MatMult(mis->jacobian_equality,x,Ax);
166:   TaoComputeJacobianEquality(reg, z, reg->jacobian_equality, reg->jacobian_equality_pre);
167:   MatMult(reg->jacobian_equality,z,Bz);

169:   VecWAXPY(residual,1.,Bz,Ax);
170:   if (am->constraint != NULL) {
171:     VecAXPY(residual,-1.,am->constraint);
172:   }
173:   return(0);
174: }

176: /* Updates Augmented Lagrangians to given routines *
177:  * For subsolverX, routine needs to be ComputeObjectiveAndGraidnet
178:  * Separate Objective and Gradient routines are not supported.  */
179: static PetscErrorCode SubObjGradUpdate(Tao tao, Vec x, PetscReal *f, Vec g, void *ptr)
180: {
181:   Tao            parent = (Tao)ptr;
182:   TAO_ADMM       *am    = (TAO_ADMM*)parent->data;
184:   PetscReal      temp,temp2;
185:   Vec            tempJR;

188:   tempJR = am->workJacobianRight;
189:   ADMMUpdateConstraintResidualVector(parent, x, am->subsolverZ->solution, am->Ax, am->Bz, am->residual);
190:   (*am->ops->misfitobjgrad)(am->subsolverX,x,f,g,am->misfitobjgradP);

192:   am->last_misfit_val = *f;
193:   /* Objective  Add + yT(Ax+Bz-c) + mu/2*||Ax+Bz-c||_2^2 */
194:   VecTDot(am->residual,am->y,&temp);
195:   VecTDot(am->residual,am->residual,&temp2);
196:   *f   += temp + (am->mu/2)*temp2;

198:   /* Gradient. Add + mu*AT(Ax+Bz-c) + yTA*/
199:   MatMultTranspose(tao->jacobian_equality,am->residual,tempJR);
200:   VecAXPY(g,am->mu,tempJR);
201:   MatMultTranspose(tao->jacobian_equality,am->y,tempJR);
202:   VecAXPY(g,1.,tempJR);
203:   return(0);
204: }

206: /* Updates Augmented Lagrangians to given routines
207:  * For subsolverZ, routine needs to be ComputeObjectiveAndGraidnet
208:  * Separate Objective and Gradient routines are not supported.  */
209: static PetscErrorCode RegObjGradUpdate(Tao tao, Vec z, PetscReal *f, Vec g, void *ptr)
210: {
211:   Tao            parent = (Tao)ptr;
212:   TAO_ADMM       *am    = (TAO_ADMM*)parent->data;
214:   PetscReal      temp,temp2;
215:   Vec            tempJR;

218:   tempJR = am->workJacobianRight;
219:   ADMMUpdateConstraintResidualVector(parent, am->subsolverX->solution, z, am->Ax, am->Bz, am->residual);
220:   (*am->ops->regobjgrad)(am->subsolverZ,z,f,g,am->regobjgradP);
221:   am->last_reg_val= *f;
222:   /* Objective  Add  + yT(Ax+Bz-c) + mu/2*||Ax+Bz-c||_2^2 */
223:   VecTDot(am->residual,am->y,&temp);
224:   VecTDot(am->residual,am->residual,&temp2);
225:   *f   += temp + (am->mu/2)*temp2;

227:   /* Gradient. Add + mu*BT(Ax+Bz-c) + yTB*/
228:   MatMultTranspose(am->subsolverZ->jacobian_equality,am->residual,tempJR);
229:   VecAXPY(g,am->mu,tempJR);
230:   MatMultTranspose(am->subsolverZ->jacobian_equality,am->y,tempJR);
231:   VecAXPY(g,1.,tempJR);
232:   return(0);
233: }

235: /* Computes epsilon padded L1 norm lambda*sum(sqrt(x^2+eps^2)-eps */
236: static PetscErrorCode ADMML1EpsilonNorm(Tao tao, Vec x, PetscReal eps, PetscReal *norm)
237: {
238:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;
239:   PetscInt       N;

243:   VecGetSize(am->workLeft,&N);
244:   VecPointwiseMult(am->workLeft,x,x);
245:   VecShift(am->workLeft,am->l1epsilon*am->l1epsilon);
246:   VecSqrtAbs(am->workLeft);
247:   VecSum(am->workLeft,norm);
248:   *norm += N*am->l1epsilon;
249:   *norm *= am->lambda;
250:   return(0);
251: }

253: static PetscErrorCode ADMMInternalHessianUpdate(Mat H, Mat Constraint, PetscBool Identity, void *ptr)
254: {
255:   TAO_ADMM       *am = (TAO_ADMM*)ptr;

259:   switch (am->update) {
260:   case (TAO_ADMM_UPDATE_BASIC):
261:     break;
262:   case (TAO_ADMM_UPDATE_ADAPTIVE):
263:   case (TAO_ADMM_UPDATE_ADAPTIVE_RELAXED):
264:     if (H && (am->muold != am->mu)) {
265:       if (!Identity) {
266:         MatAXPY(H,am->mu-am->muold,Constraint,DIFFERENT_NONZERO_PATTERN);
267:       } else {
268:         MatShift(H,am->mu-am->muold);
269:       }
270:     }
271:     break;
272:   }
273:   return(0);
274: }

276: /* Updates Hessian - adds second derivative of augmented Lagrangian
277:  * H \gets H + \rho*ATA
278:  * Here, \rho does not change in TAO_ADMM_UPDATE_BASIC - thus no-op
279:  * For ADAPTAIVE,ADAPTIVE_RELAXED,
280:  * H \gets H + (\rho-\rhoold)*ATA
281:  * Here, we assume that A is linear constraint i.e., doesnt change.
282:  * Thus, for both ADAPTIVE, and RELAXED, ATA matrix is pre-set (except for A=I (null case)) see TaoSetUp_ADMM */
283: static PetscErrorCode SubHessianUpdate(Tao tao, Vec x, Mat H, Mat Hpre, void *ptr)
284: {
285:   Tao            parent = (Tao)ptr;
286:   TAO_ADMM       *am    = (TAO_ADMM*)parent->data;

290:   if (am->Hxchange) {
291:     /* Case where Hessian gets updated with respect to x vector input. */
292:     (*am->ops->misfithess)(am->subsolverX,x,H,Hpre,am->misfithessP);
293:     ADMMInternalHessianUpdate(am->subsolverX->hessian,am->ATA,am->xJI,am);
294:   } else if (am->Hxbool) {
295:     /* Hessian doesn't get updated. H(x) = c */
296:     /* Update Lagrangian only only per TAO call */
297:     ADMMInternalHessianUpdate(am->subsolverX->hessian,am->ATA,am->xJI,am);
298:     am->Hxbool = PETSC_FALSE;
299:   }
300:   return(0);
301: }

303: /* Same as SubHessianUpdate, except for B matrix instead of A matrix */
304: static PetscErrorCode RegHessianUpdate(Tao tao, Vec z, Mat H, Mat Hpre, void *ptr)
305: {
306:   Tao            parent = (Tao)ptr;
307:   TAO_ADMM       *am    = (TAO_ADMM*)parent->data;


312:   if (am->Hzchange) {
313:     /* Case where Hessian gets updated with respect to x vector input. */
314:     (*am->ops->reghess)(am->subsolverZ,z,H,Hpre,am->reghessP);
315:     ADMMInternalHessianUpdate(am->subsolverZ->hessian,am->BTB,am->zJI,am);
316:   } else if (am->Hzbool) {
317:     /* Hessian doesn't get updated. H(x) = c */
318:     /* Update Lagrangian only only per TAO call */
319:     ADMMInternalHessianUpdate(am->subsolverZ->hessian,am->BTB,am->zJI,am);
320:     am->Hzbool = PETSC_FALSE;
321:   }
322:   return(0);
323: }

325: /* Shell Matrix routine for A matrix.
326:  * This gets used when user puts NULL for
327:  * TaoSetJacobianEqualityRoutine(tao, NULL,NULL, ...)
328:  * Essentially sets A=I*/
329: static PetscErrorCode JacobianIdentity(Mat mat,Vec in,Vec out)
330: {

334:   VecCopy(in,out);
335:   return(0);
336: }

338: /* Shell Matrix routine for B matrix.
339:  * This gets used when user puts NULL for
340:  * TaoADMMSetRegularizerConstraintJacobian(tao, NULL,NULL, ...)
341:  * Sets B=-I */
342: static PetscErrorCode JacobianIdentityB(Mat mat,Vec in,Vec out)
343: {

347:   VecCopy(in,out);
348:   VecScale(out,-1.);
349:   return(0);
350: }

352: /* Solve f(x) + g(z) s.t. Ax + Bz = c */
353: static PetscErrorCode TaoSolve_ADMM(Tao tao)
354: {
355:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;
357:   PetscInt       N;
358:   PetscReal      reg_func;
359:   PetscBool      is_reg_shell;
360:   Vec            tempL;

363:   if (am->regswitch != TAO_ADMM_REGULARIZER_SOFT_THRESH) {
364:     if (!am->subsolverX->ops->computejacobianequality) SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"Must call TaoADMMSetMisfitConstraintJacobian() first");
365:     if (!am->subsolverZ->ops->computejacobianequality) SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"Must call TaoADMMSetRegularizerConstraintJacobian() first");
366:     if (am->constraint != NULL) {
367:       VecNorm(am->constraint,NORM_2,&am->const_norm);
368:     }
369:   }
370:   tempL = am->workLeft;
371:   VecGetSize(tempL,&N);

373:   if (am->Hx && am->ops->misfithess) {
374:     TaoSetHessianRoutine(am->subsolverX, am->Hx, am->Hx, SubHessianUpdate, tao);
375:   }

377:   if (!am->zJI) {
378:     /* Currently, B is assumed to be a linear system, i.e., not getting updated*/
379:     MatTransposeMatMult(am->JB,am->JB,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&(am->BTB));
380:   }
381:   if (!am->xJI) {
382:     /* Currently, A is assumed to be a linear system, i.e., not getting updated*/
383:     MatTransposeMatMult(am->subsolverX->jacobian_equality,am->subsolverX->jacobian_equality,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&(am->ATA));
384:   }

386:   is_reg_shell = PETSC_FALSE;

388:   PetscObjectTypeCompare((PetscObject)am->subsolverZ, TAOSHELL, &is_reg_shell);

390:   if (!is_reg_shell) {
391:     switch (am->regswitch) {
392:     case (TAO_ADMM_REGULARIZER_USER):
393:       if (!am->ops->regobjgrad) SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"Must call TaoADMMSetRegularizerObjectiveAndGradientRoutine() first if one wishes to use TAO_ADMM_REGULARIZER_USER with non-TAOSHELL type");
394:       break;
395:     case (TAO_ADMM_REGULARIZER_SOFT_THRESH):
396:       /* Soft Threshold. */
397:       break;
398:     }
399:     if (am->ops->regobjgrad) {
400:       TaoSetObjectiveAndGradientRoutine(am->subsolverZ, RegObjGradUpdate, tao);
401:     }
402:     if (am->Hz && am->ops->reghess) {
403:       TaoSetHessianRoutine(am->subsolverZ, am->Hz, am->Hzpre, RegHessianUpdate, tao);
404:     }
405:   }

407:   switch (am->update) {
408:   case TAO_ADMM_UPDATE_BASIC:
409:     if (am->subsolverX->hessian) {
410:       /* In basic case, Hessian does not get updated w.r.t. to spectral penalty
411:        * Here, when A is set, i.e., am->xJI, add mu*ATA to Hessian*/
412:       if (!am->xJI) {
413:         MatAXPY(am->subsolverX->hessian,am->mu,am->ATA,DIFFERENT_NONZERO_PATTERN);
414:       } else {
415:         MatShift(am->subsolverX->hessian,am->mu);
416:       }
417:     }
418:     if (am->subsolverZ->hessian && am->regswitch == TAO_ADMM_REGULARIZER_USER) {
419:       if (am->regswitch == TAO_ADMM_REGULARIZER_USER && !am->zJI) {
420:         MatAXPY(am->subsolverZ->hessian,am->mu,am->BTB,DIFFERENT_NONZERO_PATTERN);
421:       } else {
422:         MatShift(am->subsolverZ->hessian,am->mu);
423:       }
424:     }
425:     break;
426:   case TAO_ADMM_UPDATE_ADAPTIVE:
427:   case TAO_ADMM_UPDATE_ADAPTIVE_RELAXED:
428:     break;
429:   }

431:   PetscCitationsRegister(citation,&cited);
432:   tao->reason = TAO_CONTINUE_ITERATING;

434:   while (tao->reason == TAO_CONTINUE_ITERATING) {
435:     if (tao->ops->update) {
436:       (*tao->ops->update)(tao, tao->niter, tao->user_update);
437:     }
438:     VecCopy(am->Bz, am->Bzold);

440:     /* x update */
441:     TaoSolve(am->subsolverX);
442:     TaoComputeJacobianEquality(am->subsolverX, am->subsolverX->solution, am->subsolverX->jacobian_equality, am->subsolverX->jacobian_equality_pre);
443:     MatMult(am->subsolverX->jacobian_equality, am->subsolverX->solution,am->Ax);

445:     am->Hxbool = PETSC_TRUE;

447:     /* z update */
448:     switch (am->regswitch) {
449:     case TAO_ADMM_REGULARIZER_USER:
450:       TaoSolve(am->subsolverZ);
451:       break;
452:     case TAO_ADMM_REGULARIZER_SOFT_THRESH:
453:       /* L1 assumes A,B jacobians are identity nxn matrix */
454:       VecWAXPY(am->workJacobianRight,1/am->mu,am->y,am->Ax);
455:       TaoSoftThreshold(am->workJacobianRight,-am->lambda/am->mu,am->lambda/am->mu,am->subsolverZ->solution);
456:       break;
457:     }
458:     am->Hzbool = PETSC_TRUE;
459:     /* Returns Ax + Bz - c with updated Ax,Bz vectors */
460:     ADMMUpdateConstraintResidualVector(tao, am->subsolverX->solution, am->subsolverZ->solution, am->Ax, am->Bz, am->residual);
461:     /* Dual variable, y += y + mu*(Ax+Bz-c) */
462:     VecWAXPY(am->y, am->mu, am->residual, am->yold);

464:     /* stopping tolerance update */
465:     TaoADMMToleranceUpdate(tao);

467:     /* Updating Spectral Penalty */
468:     switch (am->update) {
469:     case TAO_ADMM_UPDATE_BASIC:
470:       am->muold = am->mu;
471:       break;
472:     case TAO_ADMM_UPDATE_ADAPTIVE:
473:     case TAO_ADMM_UPDATE_ADAPTIVE_RELAXED:
474:       if (tao->niter == 0) {
475:         VecCopy(am->y, am->y0);
476:         VecWAXPY(am->residual, 1., am->Ax, am->Bzold);
477:         if (am->constraint) {
478:           VecAXPY(am->residual, -1., am->constraint);
479:         }
480:         VecWAXPY(am->yhatold, -am->mu, am->residual, am->yold);
481:         VecCopy(am->Ax, am->Axold);
482:         VecCopy(am->Bz, am->Bz0);
483:         am->muold = am->mu;
484:       } else if (tao->niter % am->T == 1) {
485:         /* we have compute Bzold in a previous iteration, and we computed Ax above */
486:         VecWAXPY(am->residual, 1., am->Ax, am->Bzold);
487:         if (am->constraint) {
488:           VecAXPY(am->residual, -1., am->constraint);
489:         }
490:         VecWAXPY(am->yhat, -am->mu, am->residual, am->yold);
491:         AdaptiveADMMPenaltyUpdate(tao);
492:         VecCopy(am->Ax, am->Axold);
493:         VecCopy(am->Bz, am->Bz0);
494:         VecCopy(am->yhat, am->yhatold);
495:         VecCopy(am->y, am->y0);
496:       } else {
497:         am->muold = am->mu;
498:       }
499:       break;
500:     default:
501:       break;
502:     }
503:     tao->niter++;

505:     /* Calculate original function values. misfit part was done in TaoADMMToleranceUpdate*/
506:     switch (am->regswitch) {
507:     case TAO_ADMM_REGULARIZER_USER:
508:       if (is_reg_shell) {
509:         ADMML1EpsilonNorm(tao,am->subsolverZ->solution,am->l1epsilon,&reg_func);
510:       } else {
511:         (*am->ops->regobjgrad)(am->subsolverZ,am->subsolverX->solution,&reg_func,tempL,am->regobjgradP);
512:       }
513:       break;
514:     case TAO_ADMM_REGULARIZER_SOFT_THRESH:
515:       ADMML1EpsilonNorm(tao,am->subsolverZ->solution,am->l1epsilon,&reg_func);
516:       break;
517:     }
518:     VecCopy(am->y,am->yold);
519:     ADMMUpdateConstraintResidualVector(tao, am->subsolverX->solution, am->subsolverZ->solution, am->Ax, am->Bz, am->residual);
520:     VecNorm(am->residual,NORM_2,&am->resnorm);
521:     TaoLogConvergenceHistory(tao,am->last_misfit_val + reg_func,am->dualres,am->resnorm,tao->ksp_its);

523:     TaoMonitor(tao,tao->niter,am->last_misfit_val + reg_func,am->dualres,am->resnorm,1.0);
524:     (*tao->ops->convergencetest)(tao,tao->cnvP);
525:   }
526:   /* Update vectors */
527:   VecCopy(am->subsolverX->solution,tao->solution);
528:   VecCopy(am->subsolverX->gradient,tao->gradient);
529:   PetscObjectCompose((PetscObject)am->subsolverX,"TaoGetADMMParentTao_ADMM", NULL);
530:   PetscObjectCompose((PetscObject)am->subsolverZ,"TaoGetADMMParentTao_ADMM", NULL);
531:   PetscObjectComposeFunction((PetscObject)tao,"TaoADMMSetRegularizerType_C",NULL);
532:   PetscObjectComposeFunction((PetscObject)tao,"TaoADMMGetRegularizerType_C",NULL);
533:   PetscObjectComposeFunction((PetscObject)tao,"TaoADMMSetUpdateType_C",NULL);
534:   PetscObjectComposeFunction((PetscObject)tao,"TaoADMMGetUpdateType_C",NULL);
535:   return(0);
536: }

538: static PetscErrorCode TaoSetFromOptions_ADMM(PetscOptionItems *PetscOptionsObject,Tao tao)
539: {
540:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;

544:   PetscOptionsHead(PetscOptionsObject,"ADMM problem that solves f(x) in a form of f(x) + g(z) subject to x - z = 0. Norm 1 and 2 are supported. Different subsolver routines can be selected. ");
545:   PetscOptionsReal("-tao_admm_regularizer_coefficient","regularizer constant","",am->lambda,&am->lambda,NULL);
546:   PetscOptionsReal("-tao_admm_spectral_penalty","Constant for Augmented Lagrangian term.","",am->mu,&am->mu,NULL);
547:   PetscOptionsReal("-tao_admm_relaxation_parameter","x relaxation parameter for Z update.","",am->gamma,&am->gamma,NULL);
548:   PetscOptionsReal("-tao_admm_tolerance_update_factor","ADMM dynamic tolerance update factor.","",am->tol,&am->tol,NULL);
549:   PetscOptionsReal("-tao_admm_spectral_penalty_update_factor","ADMM spectral penalty update curvature safeguard value.","",am->orthval,&am->orthval,NULL);
550:   PetscOptionsReal("-tao_admm_minimum_spectral_penalty","Set ADMM minimum spectral penalty.","",am->mumin,&am->mumin,NULL);
551:   PetscOptionsEnum("-tao_admm_dual_update","Lagrangian dual update policy","TaoADMMUpdateType",
552:                           TaoADMMUpdateTypes,(PetscEnum)am->update,(PetscEnum*)&am->update,NULL);
553:   PetscOptionsEnum("-tao_admm_regularizer_type","ADMM regularizer update rule","TaoADMMRegularizerType",
554:                           TaoADMMRegularizerTypes,(PetscEnum)am->regswitch,(PetscEnum*)&am->regswitch,NULL);
555:   PetscOptionsTail();
556:   TaoSetFromOptions(am->subsolverX);
557:   if (am->regswitch != TAO_ADMM_REGULARIZER_SOFT_THRESH) {
558:     TaoSetFromOptions(am->subsolverZ);
559:   }
560:   return(0);
561: }

563: static PetscErrorCode TaoView_ADMM(Tao tao,PetscViewer viewer)
564: {
565:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;

569:   PetscViewerASCIIPushTab(viewer);
570:   TaoView(am->subsolverX,viewer);
571:   TaoView(am->subsolverZ,viewer);
572:   PetscViewerASCIIPopTab(viewer);
573:   return(0);
574: }

576: static PetscErrorCode TaoSetUp_ADMM(Tao tao)
577: {
578:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;
580:   PetscInt       n,N,M;

583:   VecGetLocalSize(tao->solution,&n);
584:   VecGetSize(tao->solution,&N);
585:   /* If Jacobian is given as NULL, it means Jacobian is identity matrix with size of solution vector */
586:   if (!am->JB) {
587:     am->zJI   = PETSC_TRUE;
588:     MatCreateShell(PetscObjectComm((PetscObject)tao),n,n,PETSC_DETERMINE,PETSC_DETERMINE,NULL,&am->JB);
589:     MatShellSetOperation(am->JB,MATOP_MULT,(void (*)(void))JacobianIdentityB);
590:     MatShellSetOperation(am->JB,MATOP_MULT_TRANSPOSE,(void (*)(void))JacobianIdentityB);
591:     MatShellSetOperation(am->JB,MATOP_TRANSPOSE_MAT_MULT,(void (*)(void))JacobianIdentityB);
592:     am->JBpre = am->JB;
593:   }
594:   if (!am->JA) {
595:     am->xJI   = PETSC_TRUE;
596:     MatCreateShell(PetscObjectComm((PetscObject)tao),n,n,PETSC_DETERMINE,PETSC_DETERMINE,NULL,&am->JA);
597:     MatShellSetOperation(am->JA,MATOP_MULT,(void (*)(void))JacobianIdentity);
598:     MatShellSetOperation(am->JA,MATOP_MULT_TRANSPOSE,(void (*)(void))JacobianIdentity);
599:     MatShellSetOperation(am->JA,MATOP_TRANSPOSE_MAT_MULT,(void (*)(void))JacobianIdentity);
600:     am->JApre = am->JA;
601:   }
602:   MatCreateVecs(am->JA,NULL,&am->Ax);
603:   if (!tao->gradient) {
604:     VecDuplicate(tao->solution,&tao->gradient);
605:   }
606:   TaoSetInitialVector(am->subsolverX, tao->solution);
607:   if (!am->z) {
608:     VecDuplicate(tao->solution,&am->z);
609:     VecSet(am->z,0.0);
610:   }
611:   TaoSetInitialVector(am->subsolverZ, am->z);
612:   if (!am->workLeft) {
613:     VecDuplicate(tao->solution,&am->workLeft);
614:   }
615:   if (!am->Axold) {
616:     VecDuplicate(am->Ax,&am->Axold);
617:   }
618:   if (!am->workJacobianRight) {
619:     VecDuplicate(am->Ax,&am->workJacobianRight);
620:   }
621:   if (!am->workJacobianRight2) {
622:     VecDuplicate(am->Ax,&am->workJacobianRight2);
623:   }
624:   if (!am->Bz) {
625:     VecDuplicate(am->Ax,&am->Bz);
626:   }
627:   if (!am->Bzold) {
628:     VecDuplicate(am->Ax,&am->Bzold);
629:   }
630:   if (!am->Bz0) {
631:     VecDuplicate(am->Ax,&am->Bz0);
632:   }
633:   if (!am->y) {
634:     VecDuplicate(am->Ax,&am->y);
635:     VecSet(am->y,0.0);
636:   }
637:   if (!am->yold) {
638:     VecDuplicate(am->Ax,&am->yold);
639:     VecSet(am->yold,0.0);
640:   }
641:   if (!am->y0) {
642:     VecDuplicate(am->Ax,&am->y0);
643:     VecSet(am->y0,0.0);
644:   }
645:   if (!am->yhat) {
646:     VecDuplicate(am->Ax,&am->yhat);
647:     VecSet(am->yhat,0.0);
648:   }
649:   if (!am->yhatold) {
650:     VecDuplicate(am->Ax,&am->yhatold);
651:     VecSet(am->yhatold,0.0);
652:   }
653:   if (!am->residual) {
654:     VecDuplicate(am->Ax,&am->residual);
655:     VecSet(am->residual,0.0);
656:   }
657:   if (!am->constraint) {
658:     am->constraint = NULL;
659:   } else {
660:     VecGetSize(am->constraint,&M);
661:     if (M != N) SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"Solution vector and constraint vector must be of same size!");
662:   }

664:   /* Save changed tao tolerance for adaptive tolerance */
665:   if (tao->gatol_changed) {
666:     am->gatol_admm = tao->gatol;
667:   }
668:   if (tao->catol_changed) {
669:     am->catol_admm = tao->catol;
670:   }

672:   /*Update spectral and dual elements to X subsolver */
673:   TaoSetObjectiveAndGradientRoutine(am->subsolverX, SubObjGradUpdate, tao);
674:   TaoSetJacobianEqualityRoutine(am->subsolverX,am->JA,am->JApre, am->ops->misfitjac, am->misfitjacobianP);
675:   TaoSetJacobianEqualityRoutine(am->subsolverZ,am->JB,am->JBpre, am->ops->regjac, am->regjacobianP);
676:   return(0);
677: }

679: static PetscErrorCode TaoDestroy_ADMM(Tao tao)
680: {
681:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;

685:   VecDestroy(&am->z);
686:   VecDestroy(&am->Ax);
687:   VecDestroy(&am->Axold);
688:   VecDestroy(&am->Bz);
689:   VecDestroy(&am->Bzold);
690:   VecDestroy(&am->Bz0);
691:   VecDestroy(&am->residual);
692:   VecDestroy(&am->y);
693:   VecDestroy(&am->yold);
694:   VecDestroy(&am->y0);
695:   VecDestroy(&am->yhat);
696:   VecDestroy(&am->yhatold);
697:   VecDestroy(&am->workLeft);
698:   VecDestroy(&am->workJacobianRight);
699:   VecDestroy(&am->workJacobianRight2);

701:   MatDestroy(&am->JA);
702:   MatDestroy(&am->JB);
703:   if (!am->xJI) {
704:     MatDestroy(&am->JApre);
705:   }
706:   if (!am->zJI) {
707:     MatDestroy(&am->JBpre);
708:   }
709:   if (am->Hx) {
710:     MatDestroy(&am->Hx);
711:     MatDestroy(&am->Hxpre);
712:   }
713:   if (am->Hz) {
714:     MatDestroy(&am->Hz);
715:     MatDestroy(&am->Hzpre);
716:   }
717:   MatDestroy(&am->ATA);
718:   MatDestroy(&am->BTB);
719:   TaoDestroy(&am->subsolverX);
720:   TaoDestroy(&am->subsolverZ);
721:   am->parent = NULL;
722:   PetscFree(tao->data);
723:   return(0);
724: }

726: /*MC

728:   TAOADMM - Alternating direction method of multipliers method fo solving linear problems with
729:             constraints. in a min_x f(x) + g(z)  s.t. Ax+Bz=c.
730:             This algorithm employs two sub Tao solvers, of which type can be specified
731:             by the user. User need to provide ObjectiveAndGradient routine, and/or HessianRoutine for both subsolvers.
732:             Hessians can be given boolean flag determining whether they change with respect to a input vector. This can be set via
733:             TaoADMMSet{Misfit,Regularizer}HessianChangeStatus.
734:             Second subsolver does support TAOSHELL. It should be noted that L1-norm is used for objective value for TAOSHELL type.
735:             There is option to set regularizer option, and currently soft-threshold is implemented. For spectral penalty update,
736:             currently there are basic option and adaptive option.
737:             Constraint is set at Ax+Bz=c, and A and B can be set with TaoADMMSet{Misfit,Regularizer}ConstraintJacobian.
738:             c can be set with TaoADMMSetConstraintVectorRHS.
739:             The user can also provide regularizer weight for second subsolver.

741:   References:
742: .    1. - Xu, Zheng and Figueiredo, Mario A. T. and Yuan, Xiaoming and Studer, Christoph and Goldstein, Tom
743:           "Adaptive Relaxed ADMM: Convergence Theory and Practical Implementation"
744:           The IEEE Conference on Computer Vision and Pattern Recognition (CVPR), July, 2017.

746:   Options Database Keys:
747: + -tao_admm_regularizer_coefficient        - regularizer constant (default 1.e-6)
748: . -tao_admm_spectral_penalty               - Constant for Augmented Lagrangian term (default 1.)
749: . -tao_admm_relaxation_parameter           - relaxation parameter for Z update (default 1.)
750: . -tao_admm_tolerance_update_factor        - ADMM dynamic tolerance update factor (default 1.e-12)
751: . -tao_admm_spectral_penalty_update_factor - ADMM spectral penalty update curvature safeguard value (default 0.2)
752: . -tao_admm_minimum_spectral_penalty       - Set ADMM minimum spectral penalty (default 0)
753: . -tao_admm_dual_update                    - Lagrangian dual update policy ("basic","adaptive","adaptive-relaxed") (default "basic")
754: - -tao_admm_regularizer_type               - ADMM regularizer update rule ("user","soft-threshold") (default "soft-threshold")

756:   Level: beginner

758: .seealso: TaoADMMSetMisfitHessianChangeStatus(), TaoADMMSetRegHessianChangeStatus(), TaoADMMGetSpectralPenalty(),
759:           TaoADMMGetMisfitSubsolver(), TaoADMMGetRegularizationSubsolver(), TaoADMMSetConstraintVectorRHS(),
760:           TaoADMMSetMinimumSpectralPenalty(), TaoADMMSetRegularizerCoefficient(),
761:           TaoADMMSetRegularizerConstraintJacobian(), TaoADMMSetMisfitConstraintJacobian(),
762:           TaoADMMSetMisfitObjectiveAndGradientRoutine(), TaoADMMSetMisfitHessianRoutine(),
763:           TaoADMMSetRegularizerObjectiveAndGradientRoutine(), TaoADMMSetRegularizerHessianRoutine(),
764:           TaoGetADMMParentTao(), TaoADMMGetDualVector(), TaoADMMSetRegularizerType(),
765:           TaoADMMGetRegularizerType(), TaoADMMSetUpdateType(), TaoADMMGetUpdateType()
766: M*/

768: PETSC_EXTERN PetscErrorCode TaoCreate_ADMM(Tao tao)
769: {
770:   TAO_ADMM       *am;

774:   PetscNewLog(tao,&am);

776:   tao->ops->destroy        = TaoDestroy_ADMM;
777:   tao->ops->setup          = TaoSetUp_ADMM;
778:   tao->ops->setfromoptions = TaoSetFromOptions_ADMM;
779:   tao->ops->view           = TaoView_ADMM;
780:   tao->ops->solve          = TaoSolve_ADMM;

782:   tao->data           = (void*)am;
783:   am->l1epsilon       = 1e-6;
784:   am->lambda          = 1e-4;
785:   am->mu              = 1.;
786:   am->muold           = 0.;
787:   am->mueps           = PETSC_MACHINE_EPSILON;
788:   am->mumin           = 0.;
789:   am->orthval         = 0.2;
790:   am->T               = 2;
791:   am->parent          = tao;
792:   am->update          = TAO_ADMM_UPDATE_BASIC;
793:   am->regswitch       = TAO_ADMM_REGULARIZER_SOFT_THRESH;
794:   am->tol             = PETSC_SMALL;
795:   am->const_norm      = 0;
796:   am->resnorm         = 0;
797:   am->dualres         = 0;
798:   am->ops->regobjgrad = NULL;
799:   am->ops->reghess    = NULL;
800:   am->gamma           = 1;
801:   am->regobjgradP     = NULL;
802:   am->reghessP        = NULL;
803:   am->gatol_admm      = 1e-8;
804:   am->catol_admm      = 0;
805:   am->Hxchange        = PETSC_TRUE;
806:   am->Hzchange        = PETSC_TRUE;
807:   am->Hzbool          = PETSC_TRUE;
808:   am->Hxbool          = PETSC_TRUE;

810:   TaoCreate(PetscObjectComm((PetscObject)tao),&am->subsolverX);
811:   TaoSetOptionsPrefix(am->subsolverX,"misfit_");
812:   PetscObjectIncrementTabLevel((PetscObject)am->subsolverX,(PetscObject)tao,1);
813:   TaoCreate(PetscObjectComm((PetscObject)tao),&am->subsolverZ);
814:   TaoSetOptionsPrefix(am->subsolverZ,"reg_");
815:   PetscObjectIncrementTabLevel((PetscObject)am->subsolverZ,(PetscObject)tao,1);

817:   TaoSetType(am->subsolverX,TAONLS);
818:   TaoSetType(am->subsolverZ,TAONLS);
819:   PetscObjectCompose((PetscObject)am->subsolverX,"TaoGetADMMParentTao_ADMM", (PetscObject) tao);
820:   PetscObjectCompose((PetscObject)am->subsolverZ,"TaoGetADMMParentTao_ADMM", (PetscObject) tao);
821:   PetscObjectComposeFunction((PetscObject)tao,"TaoADMMSetRegularizerType_C",TaoADMMSetRegularizerType_ADMM);
822:   PetscObjectComposeFunction((PetscObject)tao,"TaoADMMGetRegularizerType_C",TaoADMMGetRegularizerType_ADMM);
823:   PetscObjectComposeFunction((PetscObject)tao,"TaoADMMSetUpdateType_C",TaoADMMSetUpdateType_ADMM);
824:   PetscObjectComposeFunction((PetscObject)tao,"TaoADMMGetUpdateType_C",TaoADMMGetUpdateType_ADMM);
825:   return(0);
826: }

828: /*@
829:   TaoADMMSetMisfitHessianChangeStatus - Set boolean that determines  whether Hessian matrix of misfit subsolver changes with respect to input vector.

831:   Collective on Tao

833:   Input Parameters:
834: +  tao - the Tao solver context.
835: -  b - the Hessian matrix change status boolean, PETSC_FALSE  when the Hessian matrix does not change, TRUE otherwise.

837:   Level: advanced

839: .seealso: TAOADMM

841: @*/
842: PetscErrorCode TaoADMMSetMisfitHessianChangeStatus(Tao tao, PetscBool b)
843: {
844:   TAO_ADMM *am = (TAO_ADMM*)tao->data;

847:   am->Hxchange = b;
848:   return(0);
849: }

851: /*@
852:   TaoADMMSetRegHessianChangeStatus - Set boolean that determines whether Hessian matrix of regularization subsolver changes with respect to input vector.

854:   Collective on Tao

856:   Input Parameters:
857: +  tao - the Tao solver context
858: -  b - the Hessian matrix change status boolean, PETSC_FALSE when the Hessian matrix does not change, TRUE otherwise.

860:   Level: advanced

862: .seealso: TAOADMM

864: @*/
865: PetscErrorCode TaoADMMSetRegHessianChangeStatus(Tao tao, PetscBool b)
866: {
867:   TAO_ADMM *am = (TAO_ADMM*)tao->data;

870:   am->Hzchange = b;
871:   return(0);
872: }

874: /*@
875:   TaoADMMSetSpectralPenalty - Set the spectral penalty (mu) value

877:   Collective on Tao

879:   Input Parameters:
880: +  tao - the Tao solver context
881: -  mu - spectral penalty

883:   Level: advanced

885: .seealso: TaoADMMSetMinimumSpectralPenalty(), TAOADMM
886: @*/
887: PetscErrorCode TaoADMMSetSpectralPenalty(Tao tao, PetscReal mu)
888: {
889:   TAO_ADMM *am = (TAO_ADMM*)tao->data;

892:   am->mu = mu;
893:   return(0);
894: }

896: /*@
897:   TaoADMMGetSpectralPenalty - Get the spectral penalty (mu) value

899:   Collective on Tao

901:   Input Parameter:
902: .  tao - the Tao solver context

904:   Output Parameter:
905: .  mu - spectral penalty

907:   Level: advanced

909: .seealso: TaoADMMSetMinimumSpectralPenalty(), TaoADMMSetSpectralPenalty(), TAOADMM
910: @*/
911: PetscErrorCode TaoADMMGetSpectralPenalty(Tao tao, PetscReal *mu)
912: {
913:   TAO_ADMM *am = (TAO_ADMM*)tao->data;

918:   *mu = am->mu;
919:   return(0);
920: }

922: /*@
923:   TaoADMMGetMisfitSubsolver - Get the pointer to the misfit subsolver inside ADMM

925:   Collective on Tao

927:   Input Parameter:
928: .  tao - the Tao solver context

930:    Output Parameter:
931: .  misfit - the Tao subsolver context

933:   Level: advanced

935: .seealso: TAOADMM

937: @*/
938: PetscErrorCode TaoADMMGetMisfitSubsolver(Tao tao, Tao *misfit)
939: {
940:   TAO_ADMM *am = (TAO_ADMM*)tao->data;

943:   *misfit = am->subsolverX;
944:   return(0);
945: }

947: /*@
948:   TaoADMMGetRegularizationSubsolver - Get the pointer to the regularization subsolver inside ADMM

950:   Collective on Tao

952:   Input Parameter:
953: .  tao - the Tao solver context

955:   Output Parameter:
956: .  reg - the Tao subsolver context

958:   Level: advanced

960: .seealso: TAOADMM

962: @*/
963: PetscErrorCode TaoADMMGetRegularizationSubsolver(Tao tao, Tao *reg)
964: {
965:   TAO_ADMM *am = (TAO_ADMM*)tao->data;

968:   *reg = am->subsolverZ;
969:   return(0);
970: }

972: /*@
973:   TaoADMMSetConstraintVectorRHS - Set the RHS constraint vector for ADMM

975:   Collective on Tao

977:   Input Parameters:
978: + tao - the Tao solver context
979: - c - RHS vector

981:   Level: advanced

983: .seealso: TAOADMM

985: @*/
986: PetscErrorCode TaoADMMSetConstraintVectorRHS(Tao tao, Vec c)
987: {
988:   TAO_ADMM *am = (TAO_ADMM*)tao->data;

991:   am->constraint = c;
992:   return(0);
993: }

995: /*@
996:   TaoADMMSetMinimumSpectralPenalty - Set the minimum value for the spectral penalty

998:   Collective on Tao

1000:   Input Parameters:
1001: +  tao - the Tao solver context
1002: -  mu  - minimum spectral penalty value

1004:   Level: advanced

1006: .seealso: TaoADMMGetSpectralPenalty(), TAOADMM
1007: @*/
1008: PetscErrorCode TaoADMMSetMinimumSpectralPenalty(Tao tao, PetscReal mu)
1009: {
1010:   TAO_ADMM *am = (TAO_ADMM*)tao->data;

1013:   am->mumin= mu;
1014:   return(0);
1015: }

1017: /*@
1018:   TaoADMMSetRegularizerCoefficient - Set the regularization coefficient lambda for L1 norm regularization case

1020:   Collective on Tao

1022:   Input Parameters:
1023: +  tao - the Tao solver context
1024: -  lambda - L1-norm regularizer coefficient

1026:   Level: advanced

1028: .seealso: TaoADMMSetMisfitConstraintJacobian(), TaoADMMSetRegularizerConstraintJacobian(), TAOADMM

1030: @*/
1031: PetscErrorCode TaoADMMSetRegularizerCoefficient(Tao tao, PetscReal lambda)
1032: {
1033:   TAO_ADMM *am = (TAO_ADMM*)tao->data;

1036:   am->lambda = lambda;
1037:   return(0);
1038: }

1040: /*@C
1041:   TaoADMMSetMisfitConstraintJacobian - Set the constraint matrix B for the ADMM algorithm. Matrix B constrains the z variable.

1043:   Collective on Tao

1045:   Input Parameters:
1046: + tao - the Tao solver context
1047: . J - user-created regularizer constraint Jacobian matrix
1048: . Jpre - user-created regularizer Jacobian constraint preconditioner matrix
1049: . func - function pointer for the regularizer constraint Jacobian update function
1050: - ctx - user context for the regularizer Hessian

1052:   Level: advanced

1054: .seealso: TaoADMMSetRegularizerCoefficient(), TaoADMMSetRegularizerConstraintJacobian(), TAOADMM

1056: @*/
1057: PetscErrorCode TaoADMMSetMisfitConstraintJacobian(Tao tao, Mat J, Mat Jpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void*), void *ctx)
1058: {
1059:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;

1064:   if (J) {
1067:   }
1068:   if (Jpre) {
1071:   }
1072:   if (ctx)  am->misfitjacobianP = ctx;
1073:   if (func) am->ops->misfitjac  = func;

1075:   if (J) {
1076:     PetscObjectReference((PetscObject)J);
1077:     MatDestroy(&am->JA);
1078:     am->JA = J;
1079:   }
1080:   if (Jpre) {
1081:     PetscObjectReference((PetscObject)Jpre);
1082:     MatDestroy(&am->JApre);
1083:     am->JApre = Jpre;
1084:   }
1085:   return(0);
1086: }

1088: /*@C
1089:   TaoADMMSetRegularizerConstraintJacobian - Set the constraint matrix B for ADMM algorithm. Matrix B constraints z variable.

1091:   Collective on Tao

1093:   Input Parameters:
1094: + tao - the Tao solver context
1095: . J - user-created regularizer constraint Jacobian matrix
1096: . Jpre - user-created regularizer Jacobian constraint preconditioner matrix
1097: . func - function pointer for the regularizer constraint Jacobian update function
1098: - ctx - user context for the regularizer Hessian

1100:   Level: advanced

1102: .seealso: TaoADMMSetRegularizerCoefficient(), TaoADMMSetMisfitConstraintJacobian(), TAOADMM

1104: @*/
1105: PetscErrorCode TaoADMMSetRegularizerConstraintJacobian(Tao tao, Mat J, Mat Jpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void*), void *ctx)
1106: {
1107:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;

1112:   if (J) {
1115:   }
1116:   if (Jpre) {
1119:   }
1120:   if (ctx)  am->regjacobianP = ctx;
1121:   if (func) am->ops->regjac  = func;

1123:   if (J) {
1124:     PetscObjectReference((PetscObject)J);
1125:     MatDestroy(&am->JB);
1126:     am->JB = J;
1127:   }
1128:   if (Jpre) {
1129:     PetscObjectReference((PetscObject)Jpre);
1130:     MatDestroy(&am->JBpre);
1131:     am->JBpre = Jpre;
1132:   }
1133:   return(0);
1134: }

1136: /*@C
1137:    TaoADMMSetMisfitObjectiveAndGradientRoutine - Sets the user-defined misfit call-back function

1139:    Collective on tao

1141:    Input Parameters:
1142: +    tao - the Tao context
1143: .    func - function pointer for the misfit value and gradient evaluation
1144: -    ctx - user context for the misfit

1146:    Level: advanced

1148: .seealso: TAOADMM

1150: @*/
1151: PetscErrorCode TaoADMMSetMisfitObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal*, Vec, void*), void *ctx)
1152: {
1153:   TAO_ADMM *am = (TAO_ADMM*)tao->data;

1157:   am->misfitobjgradP     = ctx;
1158:   am->ops->misfitobjgrad = func;
1159:   return(0);
1160: }

1162: /*@C
1163:    TaoADMMSetMisfitHessianRoutine - Sets the user-defined misfit Hessian call-back
1164:    function into the algorithm, to be used for subsolverX.

1166:    Collective on tao

1168:    Input Parameters:
1169:    + tao - the Tao context
1170:    . H - user-created matrix for the Hessian of the misfit term
1171:    . Hpre - user-created matrix for the preconditioner of Hessian of the misfit term
1172:    . func - function pointer for the misfit Hessian evaluation
1173:    - ctx - user context for the misfit Hessian

1175:    Level: advanced

1177: .seealso: TAOADMM

1179: @*/
1180: PetscErrorCode TaoADMMSetMisfitHessianRoutine(Tao tao, Mat H, Mat Hpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void*), void *ctx)
1181: {
1182:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;

1187:   if (H) {
1190:   }
1191:   if (Hpre) {
1194:   }
1195:   if (ctx) {
1196:     am->misfithessP = ctx;
1197:   }
1198:   if (func) {
1199:     am->ops->misfithess = func;
1200:   }
1201:   if (H) {
1202:     PetscObjectReference((PetscObject)H);
1203:     MatDestroy(&am->Hx);
1204:     am->Hx = H;
1205:   }
1206:   if (Hpre) {
1207:     PetscObjectReference((PetscObject)Hpre);
1208:     MatDestroy(&am->Hxpre);
1209:     am->Hxpre = Hpre;
1210:   }
1211:   return(0);
1212: }

1214: /*@C
1215:    TaoADMMSetRegularizerObjectiveAndGradientRoutine - Sets the user-defined regularizer call-back function

1217:    Collective on tao

1219:    Input Parameters:
1220:    + tao - the Tao context
1221:    . func - function pointer for the regularizer value and gradient evaluation
1222:    - ctx - user context for the regularizer

1224:    Level: advanced

1226: .seealso: TAOADMM

1228: @*/
1229: PetscErrorCode TaoADMMSetRegularizerObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal*, Vec, void*), void *ctx)
1230: {
1231:   TAO_ADMM *am = (TAO_ADMM*)tao->data;

1235:   am->regobjgradP     = ctx;
1236:   am->ops->regobjgrad = func;
1237:   return(0);
1238: }

1240: /*@C
1241:    TaoADMMSetRegularizerHessianRoutine - Sets the user-defined regularizer Hessian call-back
1242:    function, to be used for subsolverZ.

1244:    Collective on tao

1246:    Input Parameters:
1247:    + tao - the Tao context
1248:    . H - user-created matrix for the Hessian of the regularization term
1249:    . Hpre - user-created matrix for the preconditioner of Hessian of the regularization term
1250:    . func - function pointer for the regularizer Hessian evaluation
1251:    - ctx - user context for the regularizer Hessian

1253:    Level: advanced

1255: .seealso: TAOADMM

1257: @*/
1258: PetscErrorCode TaoADMMSetRegularizerHessianRoutine(Tao tao, Mat H, Mat Hpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void*), void *ctx)
1259: {
1260:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;

1265:   if (H) {
1268:   }
1269:   if (Hpre) {
1272:   }
1273:   if (ctx) {
1274:     am->reghessP = ctx;
1275:   }
1276:   if (func) {
1277:     am->ops->reghess = func;
1278:   }
1279:   if (H) {
1280:     PetscObjectReference((PetscObject)H);
1281:     MatDestroy(&am->Hz);
1282:     am->Hz = H;
1283:   }
1284:   if (Hpre) {
1285:     PetscObjectReference((PetscObject)Hpre);
1286:     MatDestroy(&am->Hzpre);
1287:     am->Hzpre = Hpre;
1288:   }
1289:   return(0);
1290: }

1292: /*@
1293:    TaoGetADMMParentTao - Gets pointer to parent ADMM tao, used by inner subsolver.

1295:    Collective on tao

1297:    Input Parameter:
1298:    . tao - the Tao context

1300:    Output Parameter:
1301:    . admm_tao - the parent Tao context

1303:    Level: advanced

1305: .seealso: TAOADMM

1307: @*/
1308: PetscErrorCode TaoGetADMMParentTao(Tao tao, Tao *admm_tao)
1309: {

1314:   PetscObjectQuery((PetscObject)tao,"TaoGetADMMParentTao_ADMM", (PetscObject*) admm_tao);
1315:   return(0);
1316: }

1318: /*@
1319:   TaoADMMGetDualVector - Returns the dual vector associated with the current TAOADMM state

1321:   Not Collective

1323:   Input Parameter:
1324:   . tao - the Tao context

1326:   Output Parameter:
1327:   . Y - the current solution

1329:   Level: intermediate

1331: .seealso: TAOADMM

1333: @*/
1334: PetscErrorCode TaoADMMGetDualVector(Tao tao, Vec *Y)
1335: {
1336:   TAO_ADMM *am = (TAO_ADMM*)tao->data;

1340:   *Y = am->y;
1341:   return(0);
1342: }

1344: /*@
1345:   TaoADMMSetRegularizerType - Set regularizer type for ADMM routine

1347:   Not Collective

1349:   Input Parameters:
1350: + tao  - the Tao context
1351: - type - regularizer type

1353:   Options Database:
1354: .  -tao_admm_regularizer_type <admm_regularizer_user,admm_regularizer_soft_thresh>

1356:   Level: intermediate

1358: .seealso: TaoADMMGetRegularizerType(), TaoADMMRegularizerType, TAOADMM
1359: @*/
1360: PetscErrorCode TaoADMMSetRegularizerType(Tao tao, TaoADMMRegularizerType type)
1361: {

1367:   PetscTryMethod(tao,"TaoADMMSetRegularizerType_C",(Tao,TaoADMMRegularizerType),(tao,type));
1368:   return(0);
1369: }

1371: /*@
1372:    TaoADMMGetRegularizerType - Gets the type of regularizer routine for ADMM

1374:    Not Collective

1376:    Input Parameter:
1377: .  tao - the Tao context

1379:    Output Parameter:
1380: .  type - the type of regularizer

1382:   Options Database:
1383: .  -tao_admm_regularizer_type <admm_regularizer_user,admm_regularizer_soft_thresh>

1385:    Level: intermediate

1387: .seealso: TaoADMMSetRegularizerType(), TaoADMMRegularizerType, TAOADMM
1388: @*/
1389: PetscErrorCode TaoADMMGetRegularizerType(Tao tao, TaoADMMRegularizerType *type)
1390: {

1395:   PetscUseMethod(tao,"TaoADMMGetRegularizerType_C",(Tao,TaoADMMRegularizerType*),(tao,type));
1396:   return(0);
1397: }

1399: /*@
1400:   TaoADMMSetUpdateType - Set update routine for ADMM routine

1402:   Not Collective

1404:   Input Parameters:
1405: + tao  - the Tao context
1406: - type - spectral parameter update type

1408:   Level: intermediate

1410: .seealso: TaoADMMGetUpdateType(), TaoADMMUpdateType, TAOADMM
1411: @*/
1412: PetscErrorCode TaoADMMSetUpdateType(Tao tao, TaoADMMUpdateType type)
1413: {

1419:   PetscTryMethod(tao,"TaoADMMSetUpdateType_C",(Tao,TaoADMMUpdateType),(tao,type));
1420:   return(0);
1421: }

1423: /*@
1424:    TaoADMMGetUpdateType - Gets the type of spectral penalty update routine for ADMM

1426:    Not Collective

1428:    Input Parameter:
1429: .  tao - the Tao context

1431:    Output Parameter:
1432: .  type - the type of spectral penalty update routine

1434:    Level: intermediate

1436: .seealso: TaoADMMSetUpdateType(), TaoADMMUpdateType, TAOADMM
1437: @*/
1438: PetscErrorCode TaoADMMGetUpdateType(Tao tao, TaoADMMUpdateType *type)
1439: {

1444:   PetscUseMethod(tao,"TaoADMMGetUpdateType_C",(Tao,TaoADMMUpdateType*),(tao,type));
1445:   return(0);
1446: }