Actual source code: admm.c

  1: #include <../src/tao/constrained/impls/admm/admm.h>
  2: #include <petsctao.h>
  3: #include <petsc/private/petscimpl.h>

  5: /* Updates terminating criteria
  6:  *
  7:  * 1  ||r_k|| = ||Ax+Bz-c|| =< catol_admm* max{||Ax||,||Bz||,||c||}
  8:  *
  9:  * 2. Updates dual residual, d_k
 10:  *
 11:  * 3. ||d_k|| = ||mu*A^T*B(z_k-z_{k-1})|| =< gatol_admm * ||A^Ty||   */

 13: static PetscBool cited = PETSC_FALSE;
 14: static const char citation[] =
 15:   "@misc{xu2017adaptive,\n"
 16:   "   title={Adaptive Relaxed ADMM: Convergence Theory and Practical Implementation},\n"
 17:   "   author={Zheng Xu and Mario A. T. Figueiredo and Xiaoming Yuan and Christoph Studer and Tom Goldstein},\n"
 18:   "   year={2017},\n"
 19:   "   eprint={1704.02712},\n"
 20:   "   archivePrefix={arXiv},\n"
 21:   "   primaryClass={cs.CV}\n"
 22:   "}  \n";

 24: static PetscErrorCode TaoADMMToleranceUpdate(Tao tao)
 25: {
 26:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;
 28:   PetscReal      Axnorm,Bznorm,ATynorm,temp;
 29:   Vec            tempJR,tempL;
 30:   Tao            mis;

 33:   mis    = am->subsolverX;
 34:   tempJR = am->workJacobianRight;
 35:   tempL  = am->workLeft;
 36:   /* ATy */
 37:   TaoComputeJacobianEquality(mis, am->y, mis->jacobian_equality, mis->jacobian_equality_pre);
 38:   MatMultTranspose(mis->jacobian_equality,am->y,tempJR);
 39:   VecNorm(tempJR,NORM_2,&ATynorm);
 40:   /* dualres = mu * ||AT(Bz-Bzold)||_2 */
 41:   VecWAXPY(tempJR,-1.,am->Bzold,am->Bz);
 42:   MatMultTranspose(mis->jacobian_equality,tempJR,tempL);
 43:   VecNorm(tempL,NORM_2,&am->dualres);
 44:   am->dualres *= am->mu;

 46:   /* ||Ax||_2, ||Bz||_2 */
 47:   VecNorm(am->Ax,NORM_2,&Axnorm);
 48:   VecNorm(am->Bz,NORM_2,&Bznorm);

 50:   /* Set catol to be catol_admm *  max{||Ax||,||Bz||,||c||} *
 51:    * Set gatol to be gatol_admm *  ||A^Ty|| *
 52:    * while cnorm is ||r_k||_2, and gnorm is ||d_k||_2 */
 53:   temp = am->catol_admm * PetscMax(Axnorm, (!am->const_norm) ? Bznorm : PetscMax(Bznorm,am->const_norm));
 54:   TaoSetConstraintTolerances(tao,temp,PETSC_DEFAULT);
 55:   TaoSetTolerances(tao, am->gatol_admm*ATynorm, PETSC_DEFAULT,PETSC_DEFAULT);
 56:   return(0);
 57: }

 59: /* Penaly Update for Adaptive ADMM. */
 60: static PetscErrorCode AdaptiveADMMPenaltyUpdate(Tao tao)
 61: {
 62:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;
 64:   PetscReal      ydiff_norm, yhatdiff_norm, Axdiff_norm, Bzdiff_norm, Axyhat, Bzy, a_sd, a_mg, a_k, b_sd, b_mg, b_k;
 65:   PetscBool      hflag, gflag;
 66:   Vec            tempJR,tempJR2;

 69:   tempJR  = am->workJacobianRight;
 70:   tempJR2 = am->workJacobianRight2;
 71:   hflag   = PETSC_FALSE;
 72:   gflag   = PETSC_FALSE;
 73:   a_k     = -1;
 74:   b_k     = -1;

 76:   VecWAXPY(tempJR,-1.,am->Axold,am->Ax);
 77:   VecWAXPY(tempJR2,-1.,am->yhatold,am->yhat);
 78:   VecNorm(tempJR,NORM_2,&Axdiff_norm);
 79:   VecNorm(tempJR2,NORM_2,&yhatdiff_norm);
 80:   VecDot(tempJR,tempJR2,&Axyhat);

 82:   VecWAXPY(tempJR,-1.,am->Bz0,am->Bz);
 83:   VecWAXPY(tempJR2,-1.,am->y,am->y0);
 84:   VecNorm(tempJR,NORM_2,&Bzdiff_norm);
 85:   VecNorm(tempJR2,NORM_2,&ydiff_norm);
 86:   VecDot(tempJR,tempJR2,&Bzy);

 88:   if (Axyhat > am->orthval*Axdiff_norm*yhatdiff_norm + am->mueps) {
 89:     hflag = PETSC_TRUE;
 90:     a_sd  = PetscSqr(yhatdiff_norm)/Axyhat; /* alphaSD */
 91:     a_mg  = Axyhat/PetscSqr(Axdiff_norm);   /* alphaMG */
 92:     a_k   = (a_mg/a_sd) > 0.5 ? a_mg : a_sd - 0.5*a_mg;
 93:   }
 94:   if (Bzy > am->orthval*Bzdiff_norm*ydiff_norm + am->mueps) {
 95:     gflag = PETSC_TRUE;
 96:     b_sd  = PetscSqr(ydiff_norm)/Bzy;  /* betaSD */
 97:     b_mg  = Bzy/PetscSqr(Bzdiff_norm); /* betaMG */
 98:     b_k   = (b_mg/b_sd) > 0.5 ? b_mg : b_sd - 0.5*b_mg;
 99:   }
100:   am->muold = am->mu;
101:   if (gflag && hflag) {
102:     am->mu = PetscSqrtReal(a_k*b_k);
103:   } else if (hflag) {
104:     am->mu = a_k;
105:   } else if (gflag) {
106:     am->mu = b_k;
107:   }
108:   if (am->mu > am->muold) {
109:     am->mu = am->muold;
110:   }
111:   if (am->mu < am->mumin) {
112:     am->mu = am->mumin;
113:   }
114:   return(0);
115: }

117: static PetscErrorCode  TaoADMMSetRegularizerType_ADMM(Tao tao, TaoADMMRegularizerType type)
118: {
119:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;

122:   am->regswitch = type;
123:   return(0);
124: }

126: static PetscErrorCode  TaoADMMGetRegularizerType_ADMM(Tao tao, TaoADMMRegularizerType *type)
127: {
128:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;

131:   *type = am->regswitch;
132:   return(0);
133: }

135: static PetscErrorCode  TaoADMMSetUpdateType_ADMM(Tao tao, TaoADMMUpdateType type)
136: {
137:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;

140:   am->update = type;
141:   return(0);
142: }

144: static PetscErrorCode  TaoADMMGetUpdateType_ADMM(Tao tao, TaoADMMUpdateType *type)
145: {
146:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;

149:   *type = am->update;
150:   return(0);
151: }

153: /* This routine updates Jacobians with new x,z vectors,
154:  * and then updates Ax and Bz vectors, then computes updated residual vector*/
155: static PetscErrorCode ADMMUpdateConstraintResidualVector(Tao tao, Vec x, Vec z, Vec Ax, Vec Bz, Vec residual)
156: {
157:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;
158:   Tao            mis,reg;

162:   mis  = am->subsolverX;
163:   reg  = am->subsolverZ;
164:   TaoComputeJacobianEquality(mis, x, mis->jacobian_equality, mis->jacobian_equality_pre);
165:   MatMult(mis->jacobian_equality,x,Ax);
166:   TaoComputeJacobianEquality(reg, z, reg->jacobian_equality, reg->jacobian_equality_pre);
167:   MatMult(reg->jacobian_equality,z,Bz);

169:   VecWAXPY(residual,1.,Bz,Ax);
170:   if (am->constraint != NULL) {
171:     VecAXPY(residual,-1.,am->constraint);
172:   }
173:   return(0);
174: }

176: /* Updates Augmented Lagrangians to given routines *
177:  * For subsolverX, routine needs to be ComputeObjectiveAndGraidnet
178:  * Separate Objective and Gradient routines are not supported.  */
179: static PetscErrorCode SubObjGradUpdate(Tao tao, Vec x, PetscReal *f, Vec g, void *ptr)
180: {
181:   Tao            parent = (Tao)ptr;
182:   TAO_ADMM       *am    = (TAO_ADMM*)parent->data;
184:   PetscReal      temp,temp2;
185:   Vec            tempJR;

188:   tempJR = am->workJacobianRight;
189:   ADMMUpdateConstraintResidualVector(parent, x, am->subsolverZ->solution, am->Ax, am->Bz, am->residual);
190:   (*am->ops->misfitobjgrad)(am->subsolverX,x,f,g,am->misfitobjgradP);

192:   am->last_misfit_val = *f;
193:   /* Objective  Add + yT(Ax+Bz-c) + mu/2*||Ax+Bz-c||_2^2 */
194:   VecTDot(am->residual,am->y,&temp);
195:   VecTDot(am->residual,am->residual,&temp2);
196:   *f   += temp + (am->mu/2)*temp2;

198:   /* Gradient. Add + mu*AT(Ax+Bz-c) + yTA*/
199:   MatMultTranspose(tao->jacobian_equality,am->residual,tempJR);
200:   VecAXPY(g,am->mu,tempJR);
201:   MatMultTranspose(tao->jacobian_equality,am->y,tempJR);
202:   VecAXPY(g,1.,tempJR);
203:   return(0);
204: }

206: /* Updates Augmented Lagrangians to given routines
207:  * For subsolverZ, routine needs to be ComputeObjectiveAndGraidnet
208:  * Separate Objective and Gradient routines are not supported.  */
209: static PetscErrorCode RegObjGradUpdate(Tao tao, Vec z, PetscReal *f, Vec g, void *ptr)
210: {
211:   Tao            parent = (Tao)ptr;
212:   TAO_ADMM       *am    = (TAO_ADMM*)parent->data;
214:   PetscReal      temp,temp2;
215:   Vec            tempJR;

218:   tempJR = am->workJacobianRight;
219:   ADMMUpdateConstraintResidualVector(parent, am->subsolverX->solution, z, am->Ax, am->Bz, am->residual);
220:   (*am->ops->regobjgrad)(am->subsolverZ,z,f,g,am->regobjgradP);
221:   am->last_reg_val= *f;
222:   /* Objective  Add  + yT(Ax+Bz-c) + mu/2*||Ax+Bz-c||_2^2 */
223:   VecTDot(am->residual,am->y,&temp);
224:   VecTDot(am->residual,am->residual,&temp2);
225:   *f   += temp + (am->mu/2)*temp2;

227:   /* Gradient. Add + mu*BT(Ax+Bz-c) + yTB*/
228:   MatMultTranspose(am->subsolverZ->jacobian_equality,am->residual,tempJR);
229:   VecAXPY(g,am->mu,tempJR);
230:   MatMultTranspose(am->subsolverZ->jacobian_equality,am->y,tempJR);
231:   VecAXPY(g,1.,tempJR);
232:   return(0);
233: }

235: /* Computes epsilon padded L1 norm lambda*sum(sqrt(x^2+eps^2)-eps */
236: static PetscErrorCode ADMML1EpsilonNorm(Tao tao, Vec x, PetscReal eps, PetscReal *norm)
237: {
238:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;
239:   PetscInt       N;

243:   VecGetSize(am->workLeft,&N);
244:   VecPointwiseMult(am->workLeft,x,x);
245:   VecShift(am->workLeft,am->l1epsilon*am->l1epsilon);
246:   VecSqrtAbs(am->workLeft);
247:   VecSum(am->workLeft,norm);
248:   *norm += N*am->l1epsilon;
249:   *norm *= am->lambda;
250:   return(0);
251: }

253: static PetscErrorCode ADMMInternalHessianUpdate(Mat H, Mat Constraint, PetscBool Identity, void *ptr)
254: {
255:   TAO_ADMM       *am = (TAO_ADMM*)ptr;

259:   switch (am->update) {
260:   case (TAO_ADMM_UPDATE_BASIC):
261:     break;
262:   case (TAO_ADMM_UPDATE_ADAPTIVE):
263:   case (TAO_ADMM_UPDATE_ADAPTIVE_RELAXED):
264:     if (H && (am->muold != am->mu)) {
265:       if (!Identity) {
266:         MatAXPY(H,am->mu-am->muold,Constraint,DIFFERENT_NONZERO_PATTERN);
267:       } else {
268:         MatShift(H,am->mu-am->muold);
269:       }
270:     }
271:     break;
272:   }
273:   return(0);
274: }

276: /* Updates Hessian - adds second derivative of augmented Lagrangian
277:  * H \gets H + \rho*ATA
278:  * Here, \rho does not change in TAO_ADMM_UPDATE_BASIC - thus no-op
279:  * For ADAPTAIVE,ADAPTIVE_RELAXED,
280:  * H \gets H + (\rho-\rhoold)*ATA
281:  * Here, we assume that A is linear constraint i.e., doesnt change.
282:  * Thus, for both ADAPTIVE, and RELAXED, ATA matrix is pre-set (except for A=I (null case)) see TaoSetUp_ADMM */
283: static PetscErrorCode SubHessianUpdate(Tao tao, Vec x, Mat H, Mat Hpre, void *ptr)
284: {
285:   Tao            parent = (Tao)ptr;
286:   TAO_ADMM       *am    = (TAO_ADMM*)parent->data;

290:   if (am->Hxchange) {
291:     /* Case where Hessian gets updated with respect to x vector input. */
292:     (*am->ops->misfithess)(am->subsolverX,x,H,Hpre,am->misfithessP);
293:     ADMMInternalHessianUpdate(am->subsolverX->hessian,am->ATA,am->xJI,am);
294:   } else if (am->Hxbool) {
295:     /* Hessian doesn't get updated. H(x) = c */
296:     /* Update Lagrangian only only per TAO call */
297:     ADMMInternalHessianUpdate(am->subsolverX->hessian,am->ATA,am->xJI,am);
298:     am->Hxbool = PETSC_FALSE;
299:   }
300:   return(0);
301: }

303: /* Same as SubHessianUpdate, except for B matrix instead of A matrix */
304: static PetscErrorCode RegHessianUpdate(Tao tao, Vec z, Mat H, Mat Hpre, void *ptr)
305: {
306:   Tao            parent = (Tao)ptr;
307:   TAO_ADMM       *am    = (TAO_ADMM*)parent->data;


312:   if (am->Hzchange) {
313:     /* Case where Hessian gets updated with respect to x vector input. */
314:     (*am->ops->reghess)(am->subsolverZ,z,H,Hpre,am->reghessP);
315:     ADMMInternalHessianUpdate(am->subsolverZ->hessian,am->BTB,am->zJI,am);
316:   } else if (am->Hzbool) {
317:     /* Hessian doesn't get updated. H(x) = c */
318:     /* Update Lagrangian only only per TAO call */
319:     ADMMInternalHessianUpdate(am->subsolverZ->hessian,am->BTB,am->zJI,am);
320:     am->Hzbool = PETSC_FALSE;
321:   }
322:   return(0);
323: }

325: /* Shell Matrix routine for A matrix.
326:  * This gets used when user puts NULL for
327:  * TaoSetJacobianEqualityRoutine(tao, NULL,NULL, ...)
328:  * Essentially sets A=I*/
329: static PetscErrorCode JacobianIdentity(Mat mat,Vec in,Vec out)
330: {

334:   VecCopy(in,out);
335:   return(0);
336: }

338: /* Shell Matrix routine for B matrix.
339:  * This gets used when user puts NULL for
340:  * TaoADMMSetRegularizerConstraintJacobian(tao, NULL,NULL, ...)
341:  * Sets B=-I */
342: static PetscErrorCode JacobianIdentityB(Mat mat,Vec in,Vec out)
343: {

347:   VecCopy(in,out);
348:   VecScale(out,-1.);
349:   return(0);
350: }

352: /* Solve f(x) + g(z) s.t. Ax + Bz = c */
353: static PetscErrorCode TaoSolve_ADMM(Tao tao)
354: {
355:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;
357:   PetscInt       N;
358:   PetscReal      reg_func;
359:   PetscBool      is_reg_shell;
360:   Vec            tempL;

363:   if (am->regswitch != TAO_ADMM_REGULARIZER_SOFT_THRESH) {
364:     if (!am->subsolverX->ops->computejacobianequality) SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"Must call TaoADMMSetMisfitConstraintJacobian() first");
365:     if (!am->subsolverZ->ops->computejacobianequality) SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"Must call TaoADMMSetRegularizerConstraintJacobian() first");
366:     if (am->constraint != NULL) {
367:       VecNorm(am->constraint,NORM_2,&am->const_norm);
368:     }
369:   }
370:   tempL = am->workLeft;
371:   VecGetSize(tempL,&N);

373:   if (am->Hx && am->ops->misfithess) {
374:     TaoSetHessianRoutine(am->subsolverX, am->Hx, am->Hx, SubHessianUpdate, tao);
375:   }

377:   if (!am->zJI) {
378:     /* Currently, B is assumed to be a linear system, i.e., not getting updated*/
379:     MatTransposeMatMult(am->JB,am->JB,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&(am->BTB));
380:   }
381:   if (!am->xJI) {
382:     /* Currently, A is assumed to be a linear system, i.e., not getting updated*/
383:     MatTransposeMatMult(am->subsolverX->jacobian_equality,am->subsolverX->jacobian_equality,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&(am->ATA));
384:   }

386:   is_reg_shell = PETSC_FALSE;

388:   PetscObjectTypeCompare((PetscObject)am->subsolverZ, TAOSHELL, &is_reg_shell);

390:   if (!is_reg_shell) {
391:     switch (am->regswitch) {
392:     case (TAO_ADMM_REGULARIZER_USER):
393:       if (!am->ops->regobjgrad) SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"Must call TaoADMMSetRegularizerObjectiveAndGradientRoutine() first if one wishes to use TAO_ADMM_REGULARIZER_USER with non-TAOSHELL type");
394:       break;
395:     case (TAO_ADMM_REGULARIZER_SOFT_THRESH):
396:       /* Soft Threshold. */
397:       break;
398:     }
399:     if (am->ops->regobjgrad) {
400:       TaoSetObjectiveAndGradientRoutine(am->subsolverZ, RegObjGradUpdate, tao);
401:     }
402:     if (am->Hz && am->ops->reghess) {
403:       TaoSetHessianRoutine(am->subsolverZ, am->Hz, am->Hzpre, RegHessianUpdate, tao);
404:     }
405:   }

407:   switch (am->update) {
408:   case TAO_ADMM_UPDATE_BASIC:
409:     if (am->subsolverX->hessian) {
410:       /* In basic case, Hessian does not get updated w.r.t. to spectral penalty
411:        * Here, when A is set, i.e., am->xJI, add mu*ATA to Hessian*/
412:       if (!am->xJI) {
413:         MatAXPY(am->subsolverX->hessian,am->mu,am->ATA,DIFFERENT_NONZERO_PATTERN);
414:       } else {
415:         MatShift(am->subsolverX->hessian,am->mu);
416:       }
417:     }
418:     if (am->subsolverZ->hessian && am->regswitch == TAO_ADMM_REGULARIZER_USER) {
419:       if (am->regswitch == TAO_ADMM_REGULARIZER_USER && !am->zJI) {
420:         MatAXPY(am->subsolverZ->hessian,am->mu,am->BTB,DIFFERENT_NONZERO_PATTERN);
421:       } else {
422:         MatShift(am->subsolverZ->hessian,am->mu);
423:       }
424:     }
425:     break;
426:   case TAO_ADMM_UPDATE_ADAPTIVE:
427:   case TAO_ADMM_UPDATE_ADAPTIVE_RELAXED:
428:     break;
429:   }

431:   PetscCitationsRegister(citation,&cited);
432:   tao->reason = TAO_CONTINUE_ITERATING;

434:   while (tao->reason == TAO_CONTINUE_ITERATING) {
435:     if (tao->ops->update) {
436:       (*tao->ops->update)(tao, tao->niter, tao->user_update);
437:     }
438:     VecCopy(am->Bz, am->Bzold);

440:     /* x update */
441:     TaoSolve(am->subsolverX);
442:     TaoComputeJacobianEquality(am->subsolverX, am->subsolverX->solution, am->subsolverX->jacobian_equality, am->subsolverX->jacobian_equality_pre);
443:     MatMult(am->subsolverX->jacobian_equality, am->subsolverX->solution,am->Ax);

445:     am->Hxbool = PETSC_TRUE;

447:     /* z update */
448:     switch (am->regswitch) {
449:     case TAO_ADMM_REGULARIZER_USER:
450:       TaoSolve(am->subsolverZ);
451:       break;
452:     case TAO_ADMM_REGULARIZER_SOFT_THRESH:
453:       /* L1 assumes A,B jacobians are identity nxn matrix */
454:       VecWAXPY(am->workJacobianRight,1/am->mu,am->y,am->Ax);
455:       TaoSoftThreshold(am->workJacobianRight,-am->lambda/am->mu,am->lambda/am->mu,am->subsolverZ->solution);
456:       break;
457:     }
458:     am->Hzbool = PETSC_TRUE;
459:     /* Returns Ax + Bz - c with updated Ax,Bz vectors */
460:     ADMMUpdateConstraintResidualVector(tao, am->subsolverX->solution, am->subsolverZ->solution, am->Ax, am->Bz, am->residual);
461:     /* Dual variable, y += y + mu*(Ax+Bz-c) */
462:     VecWAXPY(am->y, am->mu, am->residual, am->yold);

464:     /* stopping tolerance update */
465:     TaoADMMToleranceUpdate(tao);

467:     /* Updating Spectral Penalty */
468:     switch (am->update) {
469:     case TAO_ADMM_UPDATE_BASIC:
470:       am->muold = am->mu;
471:       break;
472:     case TAO_ADMM_UPDATE_ADAPTIVE:
473:     case TAO_ADMM_UPDATE_ADAPTIVE_RELAXED:
474:       if (tao->niter == 0) {
475:         VecCopy(am->y, am->y0);
476:         VecWAXPY(am->residual, 1., am->Ax, am->Bzold);
477:         if (am->constraint) {
478:           VecAXPY(am->residual, -1., am->constraint);
479:         }
480:         VecWAXPY(am->yhatold, -am->mu, am->residual, am->yold);
481:         VecCopy(am->Ax, am->Axold);
482:         VecCopy(am->Bz, am->Bz0);
483:         am->muold = am->mu;
484:       } else if (tao->niter % am->T == 1) {
485:         /* we have compute Bzold in a previous iteration, and we computed Ax above */
486:         VecWAXPY(am->residual, 1., am->Ax, am->Bzold);
487:         if (am->constraint) {
488:           VecAXPY(am->residual, -1., am->constraint);
489:         }
490:         VecWAXPY(am->yhat, -am->mu, am->residual, am->yold);
491:         AdaptiveADMMPenaltyUpdate(tao);
492:         VecCopy(am->Ax, am->Axold);
493:         VecCopy(am->Bz, am->Bz0);
494:         VecCopy(am->yhat, am->yhatold);
495:         VecCopy(am->y, am->y0);
496:       } else {
497:         am->muold = am->mu;
498:       }
499:       break;
500:     default:
501:       break;
502:     }
503:     tao->niter++;

505:     /* Calculate original function values. misfit part was done in TaoADMMToleranceUpdate*/
506:     switch (am->regswitch) {
507:     case TAO_ADMM_REGULARIZER_USER:
508:       if (is_reg_shell) {
509:         ADMML1EpsilonNorm(tao,am->subsolverZ->solution,am->l1epsilon,&reg_func);
510:       } else {
511:         (*am->ops->regobjgrad)(am->subsolverZ,am->subsolverX->solution,&reg_func,tempL,am->regobjgradP);
512:       }
513:       break;
514:     case TAO_ADMM_REGULARIZER_SOFT_THRESH:
515:       ADMML1EpsilonNorm(tao,am->subsolverZ->solution,am->l1epsilon,&reg_func);
516:       break;
517:     }
518:     VecCopy(am->y,am->yold);
519:     ADMMUpdateConstraintResidualVector(tao, am->subsolverX->solution, am->subsolverZ->solution, am->Ax, am->Bz, am->residual);
520:     VecNorm(am->residual,NORM_2,&am->resnorm);
521:     TaoLogConvergenceHistory(tao,am->last_misfit_val + reg_func,am->dualres,am->resnorm,tao->ksp_its);

523:     TaoMonitor(tao,tao->niter,am->last_misfit_val + reg_func,am->dualres,am->resnorm,1.0);
524:     (*tao->ops->convergencetest)(tao,tao->cnvP);
525:   }
526:   /* Update vectors */
527:   VecCopy(am->subsolverX->solution,tao->solution);
528:   VecCopy(am->subsolverX->gradient,tao->gradient);
529:   PetscObjectCompose((PetscObject)am->subsolverX,"TaoGetADMMParentTao_ADMM", NULL);
530:   PetscObjectCompose((PetscObject)am->subsolverZ,"TaoGetADMMParentTao_ADMM", NULL);
531:   PetscObjectComposeFunction((PetscObject)tao,"TaoADMMSetRegularizerType_C",NULL);
532:   PetscObjectComposeFunction((PetscObject)tao,"TaoADMMGetRegularizerType_C",NULL);
533:   PetscObjectComposeFunction((PetscObject)tao,"TaoADMMSetUpdateType_C",NULL);
534:   PetscObjectComposeFunction((PetscObject)tao,"TaoADMMGetUpdateType_C",NULL);
535:   return(0);
536: }

538: static PetscErrorCode TaoSetFromOptions_ADMM(PetscOptionItems *PetscOptionsObject,Tao tao)
539: {
540:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;

544:   PetscOptionsHead(PetscOptionsObject,"ADMM problem that solves f(x) in a form of f(x) + g(z) subject to x - z = 0. Norm 1 and 2 are supported. Different subsolver routines can be selected. ");
545:   PetscOptionsReal("-tao_admm_regularizer_coefficient","regularizer constant","",am->lambda,&am->lambda,NULL);
546:   PetscOptionsReal("-tao_admm_spectral_penalty","Constant for Augmented Lagrangian term.","",am->mu,&am->mu,NULL);
547:   PetscOptionsReal("-tao_admm_relaxation_parameter","x relaxation parameter for Z update.","",am->gamma,&am->gamma,NULL);
548:   PetscOptionsReal("-tao_admm_tolerance_update_factor","ADMM dynamic tolerance update factor.","",am->tol,&am->tol,NULL);
549:   PetscOptionsReal("-tao_admm_spectral_penalty_update_factor","ADMM spectral penalty update curvature safeguard value.","",am->orthval,&am->orthval,NULL);
550:   PetscOptionsReal("-tao_admm_minimum_spectral_penalty","Set ADMM minimum spectral penalty.","",am->mumin,&am->mumin,NULL);
551:   PetscOptionsEnum("-tao_admm_dual_update","Lagrangian dual update policy","TaoADMMUpdateType",
552:                           TaoADMMUpdateTypes,(PetscEnum)am->update,(PetscEnum*)&am->update,NULL);
553:   PetscOptionsEnum("-tao_admm_regularizer_type","ADMM regularizer update rule","TaoADMMRegularizerType",
554:                           TaoADMMRegularizerTypes,(PetscEnum)am->regswitch,(PetscEnum*)&am->regswitch,NULL);
555:   PetscOptionsTail();
556:   TaoSetFromOptions(am->subsolverX);
557:   if (am->regswitch != TAO_ADMM_REGULARIZER_SOFT_THRESH) {
558:     TaoSetFromOptions(am->subsolverZ);
559:   }
560:   return(0);
561: }

563: static PetscErrorCode TaoView_ADMM(Tao tao,PetscViewer viewer)
564: {
565:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;

569:   PetscViewerASCIIPushTab(viewer);
570:   TaoView(am->subsolverX,viewer);
571:   TaoView(am->subsolverZ,viewer);
572:   PetscViewerASCIIPopTab(viewer);
573:   return(0);
574: }

576: static PetscErrorCode TaoSetUp_ADMM(Tao tao)
577: {
578:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;
580:   PetscInt       n,N,M;

583:   VecGetLocalSize(tao->solution,&n);
584:   VecGetSize(tao->solution,&N);
585:   /* If Jacobian is given as NULL, it means Jacobian is identity matrix with size of solution vector */
586:   if (!am->JB) {
587:     am->zJI   = PETSC_TRUE;
588:     MatCreateShell(PetscObjectComm((PetscObject)tao),n,n,PETSC_DETERMINE,PETSC_DETERMINE,NULL,&am->JB);
589:     MatShellSetOperation(am->JB,MATOP_MULT,(void (*)(void))JacobianIdentityB);
590:     MatShellSetOperation(am->JB,MATOP_MULT_TRANSPOSE,(void (*)(void))JacobianIdentityB);
591:     MatShellSetOperation(am->JB,MATOP_TRANSPOSE_MAT_MULT,(void (*)(void))JacobianIdentityB);
592:     am->JBpre = am->JB;
593:   }
594:   if (!am->JA) {
595:     am->xJI   = PETSC_TRUE;
596:     MatCreateShell(PetscObjectComm((PetscObject)tao),n,n,PETSC_DETERMINE,PETSC_DETERMINE,NULL,&am->JA);
597:     MatShellSetOperation(am->JA,MATOP_MULT,(void (*)(void))JacobianIdentity);
598:     MatShellSetOperation(am->JA,MATOP_MULT_TRANSPOSE,(void (*)(void))JacobianIdentity);
599:     MatShellSetOperation(am->JA,MATOP_TRANSPOSE_MAT_MULT,(void (*)(void))JacobianIdentity);
600:     am->JApre = am->JA;
601:   }
602:   MatCreateVecs(am->JA,NULL,&am->Ax);
603:   if (!tao->gradient) {
604:     VecDuplicate(tao->solution,&tao->gradient);
605:   }
606:   TaoSetInitialVector(am->subsolverX, tao->solution);
607:   if (!am->z) {
608:     VecDuplicate(tao->solution,&am->z);
609:     VecSet(am->z,0.0);
610:   }
611:   TaoSetInitialVector(am->subsolverZ, am->z);
612:   if (!am->workLeft) {
613:     VecDuplicate(tao->solution,&am->workLeft);
614:   }
615:   if (!am->Axold) {
616:     VecDuplicate(am->Ax,&am->Axold);
617:   }
618:   if (!am->workJacobianRight) {
619:     VecDuplicate(am->Ax,&am->workJacobianRight);
620:   }
621:   if (!am->workJacobianRight2) {
622:     VecDuplicate(am->Ax,&am->workJacobianRight2);
623:   }
624:   if (!am->Bz) {
625:     VecDuplicate(am->Ax,&am->Bz);
626:   }
627:   if (!am->Bzold) {
628:     VecDuplicate(am->Ax,&am->Bzold);
629:   }
630:   if (!am->Bz0) {
631:     VecDuplicate(am->Ax,&am->Bz0);
632:   }
633:   if (!am->y) {
634:     VecDuplicate(am->Ax,&am->y);
635:     VecSet(am->y,0.0);
636:   }
637:   if (!am->yold) {
638:     VecDuplicate(am->Ax,&am->yold);
639:     VecSet(am->yold,0.0);
640:   }
641:   if (!am->y0) {
642:     VecDuplicate(am->Ax,&am->y0);
643:     VecSet(am->y0,0.0);
644:   }
645:   if (!am->yhat) {
646:     VecDuplicate(am->Ax,&am->yhat);
647:     VecSet(am->yhat,0.0);
648:   }
649:   if (!am->yhatold) {
650:     VecDuplicate(am->Ax,&am->yhatold);
651:     VecSet(am->yhatold,0.0);
652:   }
653:   if (!am->residual) {
654:     VecDuplicate(am->Ax,&am->residual);
655:     VecSet(am->residual,0.0);
656:   }
657:   if (!am->constraint) {
658:     am->constraint = NULL;
659:   } else {
660:     VecGetSize(am->constraint,&M);
661:     if (M != N) SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"Solution vector and constraint vector must be of same size!");
662:   }

664:   /* Save changed tao tolerance for adaptive tolerance */
665:   if (tao->gatol_changed) {
666:     am->gatol_admm = tao->gatol;
667:   }
668:   if (tao->catol_changed) {
669:     am->catol_admm = tao->catol;
670:   }

672:   /*Update spectral and dual elements to X subsolver */
673:   TaoSetObjectiveAndGradientRoutine(am->subsolverX, SubObjGradUpdate, tao);
674:   TaoSetJacobianEqualityRoutine(am->subsolverX,am->JA,am->JApre, am->ops->misfitjac, am->misfitjacobianP);
675:   TaoSetJacobianEqualityRoutine(am->subsolverZ,am->JB,am->JBpre, am->ops->regjac, am->regjacobianP);
676:   return(0);
677: }

679: static PetscErrorCode TaoDestroy_ADMM(Tao tao)
680: {
681:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;

685:   VecDestroy(&am->z);
686:   VecDestroy(&am->Ax);
687:   VecDestroy(&am->Axold);
688:   VecDestroy(&am->Bz);
689:   VecDestroy(&am->Bzold);
690:   VecDestroy(&am->Bz0);
691:   VecDestroy(&am->residual);
692:   VecDestroy(&am->y);
693:   VecDestroy(&am->yold);
694:   VecDestroy(&am->y0);
695:   VecDestroy(&am->yhat);
696:   VecDestroy(&am->yhatold);
697:   VecDestroy(&am->workLeft);
698:   VecDestroy(&am->workJacobianRight);
699:   VecDestroy(&am->workJacobianRight2);

701:   MatDestroy(&am->JA);
702:   MatDestroy(&am->JB);
703:   if (!am->xJI) {
704:     MatDestroy(&am->JApre);
705:   }
706:   if (!am->zJI) {
707:     MatDestroy(&am->JBpre);
708:   }
709:   if (am->Hx) {
710:     MatDestroy(&am->Hx);
711:     MatDestroy(&am->Hxpre);
712:   }
713:   if (am->Hz) {
714:     MatDestroy(&am->Hz);
715:     MatDestroy(&am->Hzpre);
716:   }
717:   MatDestroy(&am->ATA);
718:   MatDestroy(&am->BTB);
719:   TaoDestroy(&am->subsolverX);
720:   TaoDestroy(&am->subsolverZ);
721:   am->parent = NULL;
722:   PetscFree(tao->data);
723:   return(0);
724: }

726: /*MC

728:   TAOADMM - Alternating direction method of multipliers method fo solving linear problems with
729:             constraints. in a min_x f(x) + g(z)  s.t. Ax+Bz=c.
730:             This algorithm employs two sub Tao solvers, of which type can be specificed
731:             by the user. User need to provide ObjectiveAndGradient routine, and/or HessianRoutine for both subsolvers.
732:             Hessians can be given boolean flag determining whether they change with respect to a input vector. This can be set via
733:             TaoADMMSet{Misfit,Regularizer}HessianChangeStatus.
734:             Second subsolver does support TAOSHELL. It should be noted that L1-norm is used for objective value for TAOSHELL type.
735:             There is option to set regularizer option, and currently soft-threshold is implemented. For spectral penalty update,
736:             currently there are baisc option and adaptive option.
737:             Constraint is set at Ax+Bz=c, and A and B can be set with TaoADMMSet{Misfit,Regularizer}ConstraintJacobian.
738:             c can be set with TaoADMMSetConstraintVectorRHS.
739:             The user can also provide regularizer weight for second subsolver.

741:   References:
742: .    1. - Xu, Zheng and Figueiredo, Mario A. T. and Yuan, Xiaoming and Studer, Christoph and Goldstein, Tom
743:           "Adaptive Relaxed ADMM: Convergence Theory and Practical Implementation"
744:           The IEEE Conference on Computer Vision and Pattern Recognition (CVPR), July, 2017.

746:   Options Database Keys:
747: + -tao_admm_regularizer_coefficient        - regularizer constant (default 1.e-6)
748: . -tao_admm_spectral_penalty               - Constant for Augmented Lagrangian term (default 1.)
749: . -tao_admm_relaxation_parameter           - relaxation parameter for Z update (default 1.)
750: . -tao_admm_tolerance_update_factor        - ADMM dynamic tolerance update factor (default 1.e-12)
751: . -tao_admm_spectral_penalty_update_factor - ADMM spectral penalty update curvature safeguard value (default 0.2)
752: . -tao_admm_minimum_spectral_penalty       - Set ADMM minimum spectral penalty (default 0)
753: . -tao_admm_dual_update                    - Lagrangian dual update policy ("basic","adaptive","adaptive-relaxed") (default "basic")
754: - -tao_admm_regularizer_type               - ADMM regularizer update rule ("user","soft-threshold") (default "soft-threshold")

756:   Level: beginner

758: .seealso: TaoADMMSetMisfitHessianChangeStatus(), TaoADMMSetRegHessianChangeStatus(), TaoADMMGetSpectralPenalty(),
759:           TaoADMMGetMisfitSubsolver(), TaoADMMGetRegularizationSubsolver(), TaoADMMSetConstraintVectorRHS(),
760:           TaoADMMSetMinimumSpectralPenalty(), TaoADMMSetRegularizerCoefficient(),
761:           TaoADMMSetRegularizerConstraintJacobian(), TaoADMMSetMisfitConstraintJacobian(),
762:           TaoADMMSetMisfitObjectiveAndGradientRoutine(), TaoADMMSetMisfitHessianRoutine(),
763:           TaoADMMSetRegularizerObjectiveAndGradientRoutine(), TaoADMMSetRegularizerHessianRoutine(),
764:           TaoGetADMMParentTao(), TaoADMMGetDualVector(), TaoADMMSetRegularizerType(),
765:           TaoADMMGetRegularizerType(), TaoADMMSetUpdateType(), TaoADMMGetUpdateType()
766: M*/

768: PETSC_EXTERN PetscErrorCode TaoCreate_ADMM(Tao tao)
769: {
770:   TAO_ADMM       *am;

774:   PetscNewLog(tao,&am);

776:   tao->ops->destroy        = TaoDestroy_ADMM;
777:   tao->ops->setup          = TaoSetUp_ADMM;
778:   tao->ops->setfromoptions = TaoSetFromOptions_ADMM;
779:   tao->ops->view           = TaoView_ADMM;
780:   tao->ops->solve          = TaoSolve_ADMM;

782:   tao->data           = (void*)am;
783:   am->l1epsilon       = 1e-6;
784:   am->lambda          = 1e-4;
785:   am->mu              = 1.;
786:   am->muold           = 0.;
787:   am->mueps           = PETSC_MACHINE_EPSILON;
788:   am->mumin           = 0.;
789:   am->orthval         = 0.2;
790:   am->T               = 2;
791:   am->parent          = tao;
792:   am->update          = TAO_ADMM_UPDATE_BASIC;
793:   am->regswitch       = TAO_ADMM_REGULARIZER_SOFT_THRESH;
794:   am->tol             = PETSC_SMALL;
795:   am->const_norm      = 0;
796:   am->resnorm         = 0;
797:   am->dualres         = 0;
798:   am->ops->regobjgrad = NULL;
799:   am->ops->reghess    = NULL;
800:   am->gamma           = 1;
801:   am->regobjgradP     = NULL;
802:   am->reghessP        = NULL;
803:   am->gatol_admm      = 1e-8;
804:   am->catol_admm      = 0;
805:   am->Hxchange        = PETSC_TRUE;
806:   am->Hzchange        = PETSC_TRUE;
807:   am->Hzbool          = PETSC_TRUE;
808:   am->Hxbool          = PETSC_TRUE;

810:   TaoCreate(PetscObjectComm((PetscObject)tao),&am->subsolverX);
811:   TaoSetOptionsPrefix(am->subsolverX,"misfit_");
812:   PetscObjectIncrementTabLevel((PetscObject)am->subsolverX,(PetscObject)tao,1);
813:   TaoCreate(PetscObjectComm((PetscObject)tao),&am->subsolverZ);
814:   TaoSetOptionsPrefix(am->subsolverZ,"reg_");
815:   PetscObjectIncrementTabLevel((PetscObject)am->subsolverZ,(PetscObject)tao,1);

817:   TaoSetType(am->subsolverX,TAONLS);
818:   TaoSetType(am->subsolverZ,TAONLS);
819:   PetscObjectCompose((PetscObject)am->subsolverX,"TaoGetADMMParentTao_ADMM", (PetscObject) tao);
820:   PetscObjectCompose((PetscObject)am->subsolverZ,"TaoGetADMMParentTao_ADMM", (PetscObject) tao);
821:   PetscObjectComposeFunction((PetscObject)tao,"TaoADMMSetRegularizerType_C",TaoADMMSetRegularizerType_ADMM);
822:   PetscObjectComposeFunction((PetscObject)tao,"TaoADMMGetRegularizerType_C",TaoADMMGetRegularizerType_ADMM);
823:   PetscObjectComposeFunction((PetscObject)tao,"TaoADMMSetUpdateType_C",TaoADMMSetUpdateType_ADMM);
824:   PetscObjectComposeFunction((PetscObject)tao,"TaoADMMGetUpdateType_C",TaoADMMGetUpdateType_ADMM);
825:   return(0);
826: }

828: /*@
829:   TaoADMMSetMisfitHessianChangeStatus - Set boolean that determines  whether Hessian matrix of misfit subsolver changes with respect to input vector.

831:   Collective on Tao

833:   Input Parameters:
834: +  tao - the Tao solver context.
835: -  b - the Hessian matrix change status boolean, PETSC_FALSE  when the Hessian matrix does not change, TRUE otherwise.

837:   Level: advanced

839: .seealso: TAOADMM

841: @*/
842: PetscErrorCode TaoADMMSetMisfitHessianChangeStatus(Tao tao, PetscBool b)
843: {
844:   TAO_ADMM *am = (TAO_ADMM*)tao->data;

847:   am->Hxchange = b;
848:   return(0);
849: }

851: /*@
852:   TaoADMMSetRegHessianChangeStatus - Set boolean that determines whether Hessian matrix of regularization subsolver changes with respect to input vector.


855:   Collective on Tao

857:   Input Parameters:
858: +  tao - the Tao solver context
859: -  b - the Hessian matrix change status boolean, PETSC_FALSE when the Hessian matrix does not change, TRUE otherwise.

861:   Level: advanced

863: .seealso: TAOADMM

865: @*/
866: PetscErrorCode TaoADMMSetRegHessianChangeStatus(Tao tao, PetscBool b)
867: {
868:   TAO_ADMM *am = (TAO_ADMM*)tao->data;

871:   am->Hzchange = b;
872:   return(0);
873: }

875: /*@
876:   TaoADMMSetSpectralPenalty - Set the spectral penalty (mu) value

878:   Collective on Tao

880:   Input Parameters:
881: +  tao - the Tao solver context
882: -  mu - spectral penalty

884:   Level: advanced

886: .seealso: TaoADMMSetMinimumSpectralPenalty(), TAOADMM
887: @*/
888: PetscErrorCode TaoADMMSetSpectralPenalty(Tao tao, PetscReal mu)
889: {
890:   TAO_ADMM *am = (TAO_ADMM*)tao->data;

893:   am->mu = mu;
894:   return(0);
895: }

897: /*@
898:   TaoADMMGetSpectralPenalty - Get the spectral penalty (mu) value

900:   Collective on Tao

902:   Input Parameter:
903: .  tao - the Tao solver context

905:   Output Parameter:
906: .  mu - spectral penalty

908:   Level: advanced

910: .seealso: TaoADMMSetMinimumSpectralPenalty(), TaoADMMSetSpectralPenalty(), TAOADMM
911: @*/
912: PetscErrorCode TaoADMMGetSpectralPenalty(Tao tao, PetscReal *mu)
913: {
914:   TAO_ADMM *am = (TAO_ADMM*)tao->data;

919:   *mu = am->mu;
920:   return(0);
921: }

923: /*@
924:   TaoADMMGetMisfitSubsolver - Get the pointer to the misfit subsolver inside ADMM

926:   Collective on Tao

928:   Input Parameter:
929: .  tao - the Tao solver context

931:    Output Parameter:
932: .  misfit - the Tao subsolver context

934:   Level: advanced

936: .seealso: TAOADMM

938: @*/
939: PetscErrorCode TaoADMMGetMisfitSubsolver(Tao tao, Tao *misfit)
940: {
941:   TAO_ADMM *am = (TAO_ADMM*)tao->data;

944:   *misfit = am->subsolverX;
945:   return(0);
946: }

948: /*@
949:   TaoADMMGetRegularizationSubsolver - Get the pointer to the regularization subsolver inside ADMM

951:   Collective on Tao

953:   Input Parameter:
954: .  tao - the Tao solver context

956:   Output Parameter:
957: .  reg - the Tao subsolver context

959:   Level: advanced

961: .seealso: TAOADMM

963: @*/
964: PetscErrorCode TaoADMMGetRegularizationSubsolver(Tao tao, Tao *reg)
965: {
966:   TAO_ADMM *am = (TAO_ADMM*)tao->data;

969:   *reg = am->subsolverZ;
970:   return(0);
971: }

973: /*@
974:   TaoADMMSetConstraintVectorRHS - Set the RHS constraint vector for ADMM

976:   Collective on Tao

978:   Input Parameters:
979: + tao - the Tao solver context
980: - c - RHS vector

982:   Level: advanced

984: .seealso: TAOADMM

986: @*/
987: PetscErrorCode TaoADMMSetConstraintVectorRHS(Tao tao, Vec c)
988: {
989:   TAO_ADMM *am = (TAO_ADMM*)tao->data;

992:   am->constraint = c;
993:   return(0);
994: }

996: /*@
997:   TaoADMMSetMinimumSpectralPenalty - Set the minimum value for the spectral penalty

999:   Collective on Tao

1001:   Input Parameters:
1002: +  tao - the Tao solver context
1003: -  mu  - minimum spectral penalty value

1005:   Level: advanced

1007: .seealso: TaoADMMGetSpectralPenalty(), TAOADMM
1008: @*/
1009: PetscErrorCode TaoADMMSetMinimumSpectralPenalty(Tao tao, PetscReal mu)
1010: {
1011:   TAO_ADMM *am = (TAO_ADMM*)tao->data;

1014:   am->mumin= mu;
1015:   return(0);
1016: }

1018: /*@
1019:   TaoADMMSetRegularizerCoefficient - Set the regularization coefficient lambda for L1 norm regularization case

1021:   Collective on Tao

1023:   Input Parameters:
1024: +  tao - the Tao solver context
1025: -  lambda - L1-norm regularizer coefficient

1027:   Level: advanced

1029: .seealso: TaoADMMSetMisfitConstraintJacobian(), TaoADMMSetRegularizerConstraintJacobian(), TAOADMM

1031: @*/
1032: PetscErrorCode TaoADMMSetRegularizerCoefficient(Tao tao, PetscReal lambda)
1033: {
1034:   TAO_ADMM *am = (TAO_ADMM*)tao->data;

1037:   am->lambda = lambda;
1038:   return(0);
1039: }

1041: /*@C
1042:   TaoADMMSetMisfitConstraintJacobian - Set the constraint matrix B for the ADMM algorithm. Matrix B constrains the z variable.

1044:   Collective on Tao

1046:   Input Parameters:
1047: + tao - the Tao solver context
1048: . J - user-created regularizer constraint Jacobian matrix
1049: . Jpre - user-created regularizer Jacobian constraint preconditioner matrix
1050: . func - function pointer for the regularizer constraint Jacobian update function
1051: - ctx - user context for the regularizer Hessian

1053:   Level: advanced

1055: .seealso: TaoADMMSetRegularizerCoefficient(), TaoADMMSetRegularizerConstraintJacobian(), TAOADMM

1057: @*/
1058: PetscErrorCode TaoADMMSetMisfitConstraintJacobian(Tao tao, Mat J, Mat Jpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void*), void *ctx)
1059: {
1060:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;

1065:   if (J) {
1068:   }
1069:   if (Jpre) {
1072:   }
1073:   if (ctx)  am->misfitjacobianP = ctx;
1074:   if (func) am->ops->misfitjac  = func;

1076:   if (J) {
1077:     PetscObjectReference((PetscObject)J);
1078:     MatDestroy(&am->JA);
1079:     am->JA = J;
1080:   }
1081:   if (Jpre) {
1082:     PetscObjectReference((PetscObject)Jpre);
1083:     MatDestroy(&am->JApre);
1084:     am->JApre = Jpre;
1085:   }
1086:   return(0);
1087: }

1089: /*@C
1090:   TaoADMMSetRegularizerConstraintJacobian - Set the constraint matrix B for ADMM algorithm. Matrix B constraints z variable.

1092:   Collective on Tao

1094:   Input Parameters:
1095: + tao - the Tao solver context
1096: . J - user-created regularizer constraint Jacobian matrix
1097: . Jpre - user-created regularizer Jacobian constraint preconditioner matrix
1098: . func - function pointer for the regularizer constraint Jacobian update function
1099: - ctx - user context for the regularizer Hessian

1101:   Level: advanced

1103: .seealso: TaoADMMSetRegularizerCoefficient(), TaoADMMSetMisfitConstraintJacobian(), TAOADMM

1105: @*/
1106: PetscErrorCode TaoADMMSetRegularizerConstraintJacobian(Tao tao, Mat J, Mat Jpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void*), void *ctx)
1107: {
1108:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;

1113:   if (J) {
1116:   }
1117:   if (Jpre) {
1120:   }
1121:   if (ctx)  am->regjacobianP = ctx;
1122:   if (func) am->ops->regjac  = func;

1124:   if (J) {
1125:     PetscObjectReference((PetscObject)J);
1126:     MatDestroy(&am->JB);
1127:     am->JB = J;
1128:   }
1129:   if (Jpre) {
1130:     PetscObjectReference((PetscObject)Jpre);
1131:     MatDestroy(&am->JBpre);
1132:     am->JBpre = Jpre;
1133:   }
1134:   return(0);
1135: }

1137: /*@C
1138:    TaoADMMSetMisfitObjectiveAndGradientRoutine - Sets the user-defined misfit call-back function

1140:    Collective on tao

1142:    Input Parameters:
1143: +    tao - the Tao context
1144: .    func - function pointer for the misfit value and gradient evaluation
1145: -    ctx - user context for the misfit

1147:    Level: advanced

1149: .seealso: TAOADMM

1151: @*/
1152: PetscErrorCode TaoADMMSetMisfitObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal*, Vec, void*), void *ctx)
1153: {
1154:   TAO_ADMM *am = (TAO_ADMM*)tao->data;

1158:   am->misfitobjgradP     = ctx;
1159:   am->ops->misfitobjgrad = func;
1160:   return(0);
1161: }

1163: /*@C
1164:    TaoADMMSetMisfitHessianRoutine - Sets the user-defined misfit Hessian call-back
1165:    function into the algorithm, to be used for subsolverX.

1167:    Collective on tao

1169:    Input Parameters:
1170:    + tao - the Tao context
1171:    . H - user-created matrix for the Hessian of the misfit term
1172:    . Hpre - user-created matrix for the preconditioner of Hessian of the misfit term
1173:    . func - function pointer for the misfit Hessian evaluation
1174:    - ctx - user context for the misfit Hessian

1176:    Level: advanced

1178: .seealso: TAOADMM

1180: @*/
1181: PetscErrorCode TaoADMMSetMisfitHessianRoutine(Tao tao, Mat H, Mat Hpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void*), void *ctx)
1182: {
1183:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;

1188:   if (H) {
1191:   }
1192:   if (Hpre) {
1195:   }
1196:   if (ctx) {
1197:     am->misfithessP = ctx;
1198:   }
1199:   if (func) {
1200:     am->ops->misfithess = func;
1201:   }
1202:   if (H) {
1203:     PetscObjectReference((PetscObject)H);
1204:     MatDestroy(&am->Hx);
1205:     am->Hx = H;
1206:   }
1207:   if (Hpre) {
1208:     PetscObjectReference((PetscObject)Hpre);
1209:     MatDestroy(&am->Hxpre);
1210:     am->Hxpre = Hpre;
1211:   }
1212:   return(0);
1213: }

1215: /*@C
1216:    TaoADMMSetRegularizerObjectiveAndGradientRoutine - Sets the user-defined regularizer call-back function

1218:    Collective on tao

1220:    Input Parameters:
1221:    + tao - the Tao context
1222:    . func - function pointer for the regularizer value and gradient evaluation
1223:    - ctx - user context for the regularizer

1225:    Level: advanced

1227: .seealso: TAOADMM

1229: @*/
1230: PetscErrorCode TaoADMMSetRegularizerObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal*, Vec, void*), void *ctx)
1231: {
1232:   TAO_ADMM *am = (TAO_ADMM*)tao->data;

1236:   am->regobjgradP     = ctx;
1237:   am->ops->regobjgrad = func;
1238:   return(0);
1239: }

1241: /*@C
1242:    TaoADMMSetRegularizerHessianRoutine - Sets the user-defined regularizer Hessian call-back
1243:    function, to be used for subsolverZ.

1245:    Collective on tao

1247:    Input Parameters:
1248:    + tao - the Tao context
1249:    . H - user-created matrix for the Hessian of the regularization term
1250:    . Hpre - user-created matrix for the preconditioner of Hessian of the regularization term
1251:    . func - function pointer for the regularizer Hessian evaluation
1252:    - ctx - user context for the regularizer Hessian

1254:    Level: advanced

1256: .seealso: TAOADMM

1258: @*/
1259: PetscErrorCode TaoADMMSetRegularizerHessianRoutine(Tao tao, Mat H, Mat Hpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void*), void *ctx)
1260: {
1261:   TAO_ADMM       *am = (TAO_ADMM*)tao->data;

1266:   if (H) {
1269:   }
1270:   if (Hpre) {
1273:   }
1274:   if (ctx) {
1275:     am->reghessP = ctx;
1276:   }
1277:   if (func) {
1278:     am->ops->reghess = func;
1279:   }
1280:   if (H) {
1281:     PetscObjectReference((PetscObject)H);
1282:     MatDestroy(&am->Hz);
1283:     am->Hz = H;
1284:   }
1285:   if (Hpre) {
1286:     PetscObjectReference((PetscObject)Hpre);
1287:     MatDestroy(&am->Hzpre);
1288:     am->Hzpre = Hpre;
1289:   }
1290:   return(0);
1291: }

1293: /*@
1294:    TaoGetADMMParentTao - Gets pointer to parent ADMM tao, used by inner subsolver.

1296:    Collective on tao

1298:    Input Parameter:
1299:    . tao - the Tao context

1301:    Output Parameter:
1302:    . admm_tao - the parent Tao context

1304:    Level: advanced

1306: .seealso: TAOADMM

1308: @*/
1309: PetscErrorCode TaoGetADMMParentTao(Tao tao, Tao *admm_tao)
1310: {

1315:   PetscObjectQuery((PetscObject)tao,"TaoGetADMMParentTao_ADMM", (PetscObject*) admm_tao);
1316:   return(0);
1317: }

1319: /*@
1320:   TaoADMMGetDualVector - Returns the dual vector associated with the current TAOADMM state

1322:   Not Collective

1324:   Input Parameter:
1325:   . tao - the Tao context

1327:   Output Parameter:
1328:   . Y - the current solution

1330:   Level: intermediate

1332: .seealso: TAOADMM

1334: @*/
1335: PetscErrorCode TaoADMMGetDualVector(Tao tao, Vec *Y)
1336: {
1337:   TAO_ADMM *am = (TAO_ADMM*)tao->data;

1341:   *Y = am->y;
1342:   return(0);
1343: }

1345: /*@
1346:   TaoADMMSetRegularizerType - Set regularizer type for ADMM routine

1348:   Not Collective

1350:   Input Parameter:
1351: + tao  - the Tao context
1352: - type - regularizer type

1354:   Options Database:
1355: .  -tao_admm_regularizer_type <admm_regularizer_user,admm_regularizer_soft_thresh>

1357:   Level: intermediate

1359: .seealso: TaoADMMGetRegularizerType(), TaoADMMRegularizerType, TAOADMM
1360: @*/
1361: PetscErrorCode TaoADMMSetRegularizerType(Tao tao, TaoADMMRegularizerType type)
1362: {

1368:   PetscTryMethod(tao,"TaoADMMSetRegularizerType_C",(Tao,TaoADMMRegularizerType),(tao,type));
1369:   return(0);
1370: }

1372: /*@
1373:    TaoADMMGetRegularizerType - Gets the type of regularizer routine for ADMM

1375:    Not Collective

1377:    Input Parameter:
1378: .  tao - the Tao context

1380:    Output Parameter:
1381: .  type - the type of regularizer

1383:   Options Database:
1384: .  -tao_admm_regularizer_type <admm_regularizer_user,admm_regularizer_soft_thresh>

1386:    Level: intermediate

1388: .seealso: TaoADMMSetRegularizerType(), TaoADMMRegularizerType, TAOADMM
1389: @*/
1390: PetscErrorCode TaoADMMGetRegularizerType(Tao tao, TaoADMMRegularizerType *type)
1391: {

1396:   PetscUseMethod(tao,"TaoADMMGetRegularizerType_C",(Tao,TaoADMMRegularizerType*),(tao,type));
1397:   return(0);
1398: }

1400: /*@
1401:   TaoADMMSetUpdateType - Set update routine for ADMM routine

1403:   Not Collective

1405:   Input Parameter:
1406: + tao  - the Tao context
1407: - type - spectral parameter update type

1409:   Level: intermediate

1411: .seealso: TaoADMMGetUpdateType(), TaoADMMUpdateType, TAOADMM
1412: @*/
1413: PetscErrorCode TaoADMMSetUpdateType(Tao tao, TaoADMMUpdateType type)
1414: {

1420:   PetscTryMethod(tao,"TaoADMMSetUpdateType_C",(Tao,TaoADMMUpdateType),(tao,type));
1421:   return(0);
1422: }

1424: /*@
1425:    TaoADMMGetUpdateType - Gets the type of spectral penalty update routine for ADMM

1427:    Not Collective

1429:    Input Parameter:
1430: .  tao - the Tao context

1432:    Output Parameter:
1433: .  type - the type of spectral penalty update routine

1435:    Level: intermediate

1437: .seealso: TaoADMMSetUpdateType(), TaoADMMUpdateType, TAOADMM
1438: @*/
1439: PetscErrorCode TaoADMMGetUpdateType(Tao tao, TaoADMMUpdateType *type)
1440: {

1445:   PetscUseMethod(tao,"TaoADMMGetUpdateType_C",(Tao,TaoADMMUpdateType*),(tao,type));
1446:   return(0);
1447: }