Actual source code: admm.c
1: #include <../src/tao/constrained/impls/admm/admm.h>
2: #include <petsctao.h>
3: #include <petsc/private/petscimpl.h>
5: /* Updates terminating criteria
6: *
7: * 1 ||r_k|| = ||Ax+Bz-c|| =< catol_admm* max{||Ax||,||Bz||,||c||}
8: *
9: * 2. Updates dual residual, d_k
10: *
11: * 3. ||d_k|| = ||mu*A^T*B(z_k-z_{k-1})|| =< gatol_admm * ||A^Ty|| */
13: static PetscBool cited = PETSC_FALSE;
14: static const char citation[] =
15: "@misc{xu2017adaptive,\n"
16: " title={Adaptive Relaxed ADMM: Convergence Theory and Practical Implementation},\n"
17: " author={Zheng Xu and Mario A. T. Figueiredo and Xiaoming Yuan and Christoph Studer and Tom Goldstein},\n"
18: " year={2017},\n"
19: " eprint={1704.02712},\n"
20: " archivePrefix={arXiv},\n"
21: " primaryClass={cs.CV}\n"
22: "} \n";
24: const char *const TaoADMMRegularizerTypes[] = {"REGULARIZER_USER","REGULARIZER_SOFT_THRESH","TaoADMMRegularizerType","TAO_ADMM_",NULL};
25: const char *const TaoADMMUpdateTypes[] = {"UPDATE_BASIC","UPDATE_ADAPTIVE","UPDATE_ADAPTIVE_RELAXED","TaoADMMUpdateType","TAO_ADMM_",NULL};
26: const char *const TaoALMMTypes[] = {"CLASSIC","PHR","TaoALMMType","TAO_ALMM_",NULL};
28: static PetscErrorCode TaoADMMToleranceUpdate(Tao tao)
29: {
30: TAO_ADMM *am = (TAO_ADMM*)tao->data;
31: PetscReal Axnorm,Bznorm,ATynorm,temp;
32: Vec tempJR,tempL;
33: Tao mis;
35: mis = am->subsolverX;
36: tempJR = am->workJacobianRight;
37: tempL = am->workLeft;
38: /* ATy */
39: TaoComputeJacobianEquality(mis, am->y, mis->jacobian_equality, mis->jacobian_equality_pre);
40: MatMultTranspose(mis->jacobian_equality,am->y,tempJR);
41: VecNorm(tempJR,NORM_2,&ATynorm);
42: /* dualres = mu * ||AT(Bz-Bzold)||_2 */
43: VecWAXPY(tempJR,-1.,am->Bzold,am->Bz);
44: MatMultTranspose(mis->jacobian_equality,tempJR,tempL);
45: VecNorm(tempL,NORM_2,&am->dualres);
46: am->dualres *= am->mu;
48: /* ||Ax||_2, ||Bz||_2 */
49: VecNorm(am->Ax,NORM_2,&Axnorm);
50: VecNorm(am->Bz,NORM_2,&Bznorm);
52: /* Set catol to be catol_admm * max{||Ax||,||Bz||,||c||} *
53: * Set gatol to be gatol_admm * ||A^Ty|| *
54: * while cnorm is ||r_k||_2, and gnorm is ||d_k||_2 */
55: temp = am->catol_admm * PetscMax(Axnorm, (!am->const_norm) ? Bznorm : PetscMax(Bznorm,am->const_norm));
56: TaoSetConstraintTolerances(tao,temp,PETSC_DEFAULT);
57: TaoSetTolerances(tao, am->gatol_admm*ATynorm, PETSC_DEFAULT,PETSC_DEFAULT);
58: return 0;
59: }
61: /* Penaly Update for Adaptive ADMM. */
62: static PetscErrorCode AdaptiveADMMPenaltyUpdate(Tao tao)
63: {
64: TAO_ADMM *am = (TAO_ADMM*)tao->data;
65: PetscReal ydiff_norm, yhatdiff_norm, Axdiff_norm, Bzdiff_norm, Axyhat, Bzy, a_sd, a_mg, a_k, b_sd, b_mg, b_k;
66: PetscBool hflag, gflag;
67: Vec tempJR,tempJR2;
69: tempJR = am->workJacobianRight;
70: tempJR2 = am->workJacobianRight2;
71: hflag = PETSC_FALSE;
72: gflag = PETSC_FALSE;
73: a_k = -1;
74: b_k = -1;
76: VecWAXPY(tempJR,-1.,am->Axold,am->Ax);
77: VecWAXPY(tempJR2,-1.,am->yhatold,am->yhat);
78: VecNorm(tempJR,NORM_2,&Axdiff_norm);
79: VecNorm(tempJR2,NORM_2,&yhatdiff_norm);
80: VecDot(tempJR,tempJR2,&Axyhat);
82: VecWAXPY(tempJR,-1.,am->Bz0,am->Bz);
83: VecWAXPY(tempJR2,-1.,am->y,am->y0);
84: VecNorm(tempJR,NORM_2,&Bzdiff_norm);
85: VecNorm(tempJR2,NORM_2,&ydiff_norm);
86: VecDot(tempJR,tempJR2,&Bzy);
88: if (Axyhat > am->orthval*Axdiff_norm*yhatdiff_norm + am->mueps) {
89: hflag = PETSC_TRUE;
90: a_sd = PetscSqr(yhatdiff_norm)/Axyhat; /* alphaSD */
91: a_mg = Axyhat/PetscSqr(Axdiff_norm); /* alphaMG */
92: a_k = (a_mg/a_sd) > 0.5 ? a_mg : a_sd - 0.5*a_mg;
93: }
94: if (Bzy > am->orthval*Bzdiff_norm*ydiff_norm + am->mueps) {
95: gflag = PETSC_TRUE;
96: b_sd = PetscSqr(ydiff_norm)/Bzy; /* betaSD */
97: b_mg = Bzy/PetscSqr(Bzdiff_norm); /* betaMG */
98: b_k = (b_mg/b_sd) > 0.5 ? b_mg : b_sd - 0.5*b_mg;
99: }
100: am->muold = am->mu;
101: if (gflag && hflag) {
102: am->mu = PetscSqrtReal(a_k*b_k);
103: } else if (hflag) {
104: am->mu = a_k;
105: } else if (gflag) {
106: am->mu = b_k;
107: }
108: if (am->mu > am->muold) {
109: am->mu = am->muold;
110: }
111: if (am->mu < am->mumin) {
112: am->mu = am->mumin;
113: }
114: return 0;
115: }
117: static PetscErrorCode TaoADMMSetRegularizerType_ADMM(Tao tao, TaoADMMRegularizerType type)
118: {
119: TAO_ADMM *am = (TAO_ADMM*)tao->data;
121: am->regswitch = type;
122: return 0;
123: }
125: static PetscErrorCode TaoADMMGetRegularizerType_ADMM(Tao tao, TaoADMMRegularizerType *type)
126: {
127: TAO_ADMM *am = (TAO_ADMM*)tao->data;
129: *type = am->regswitch;
130: return 0;
131: }
133: static PetscErrorCode TaoADMMSetUpdateType_ADMM(Tao tao, TaoADMMUpdateType type)
134: {
135: TAO_ADMM *am = (TAO_ADMM*)tao->data;
137: am->update = type;
138: return 0;
139: }
141: static PetscErrorCode TaoADMMGetUpdateType_ADMM(Tao tao, TaoADMMUpdateType *type)
142: {
143: TAO_ADMM *am = (TAO_ADMM*)tao->data;
145: *type = am->update;
146: return 0;
147: }
149: /* This routine updates Jacobians with new x,z vectors,
150: * and then updates Ax and Bz vectors, then computes updated residual vector*/
151: static PetscErrorCode ADMMUpdateConstraintResidualVector(Tao tao, Vec x, Vec z, Vec Ax, Vec Bz, Vec residual)
152: {
153: TAO_ADMM *am = (TAO_ADMM*)tao->data;
154: Tao mis,reg;
156: mis = am->subsolverX;
157: reg = am->subsolverZ;
158: TaoComputeJacobianEquality(mis, x, mis->jacobian_equality, mis->jacobian_equality_pre);
159: MatMult(mis->jacobian_equality,x,Ax);
160: TaoComputeJacobianEquality(reg, z, reg->jacobian_equality, reg->jacobian_equality_pre);
161: MatMult(reg->jacobian_equality,z,Bz);
163: VecWAXPY(residual,1.,Bz,Ax);
164: if (am->constraint != NULL) {
165: VecAXPY(residual,-1.,am->constraint);
166: }
167: return 0;
168: }
170: /* Updates Augmented Lagrangians to given routines *
171: * For subsolverX, routine needs to be ComputeObjectiveAndGraidnet
172: * Separate Objective and Gradient routines are not supported. */
173: static PetscErrorCode SubObjGradUpdate(Tao tao, Vec x, PetscReal *f, Vec g, void *ptr)
174: {
175: Tao parent = (Tao)ptr;
176: TAO_ADMM *am = (TAO_ADMM*)parent->data;
177: PetscReal temp,temp2;
178: Vec tempJR;
180: tempJR = am->workJacobianRight;
181: ADMMUpdateConstraintResidualVector(parent, x, am->subsolverZ->solution, am->Ax, am->Bz, am->residual);
182: (*am->ops->misfitobjgrad)(am->subsolverX,x,f,g,am->misfitobjgradP);
184: am->last_misfit_val = *f;
185: /* Objective Add + yT(Ax+Bz-c) + mu/2*||Ax+Bz-c||_2^2 */
186: VecTDot(am->residual,am->y,&temp);
187: VecTDot(am->residual,am->residual,&temp2);
188: *f += temp + (am->mu/2)*temp2;
190: /* Gradient. Add + mu*AT(Ax+Bz-c) + yTA*/
191: MatMultTranspose(tao->jacobian_equality,am->residual,tempJR);
192: VecAXPY(g,am->mu,tempJR);
193: MatMultTranspose(tao->jacobian_equality,am->y,tempJR);
194: VecAXPY(g,1.,tempJR);
195: return 0;
196: }
198: /* Updates Augmented Lagrangians to given routines
199: * For subsolverZ, routine needs to be ComputeObjectiveAndGraidnet
200: * Separate Objective and Gradient routines are not supported. */
201: static PetscErrorCode RegObjGradUpdate(Tao tao, Vec z, PetscReal *f, Vec g, void *ptr)
202: {
203: Tao parent = (Tao)ptr;
204: TAO_ADMM *am = (TAO_ADMM*)parent->data;
205: PetscReal temp,temp2;
206: Vec tempJR;
208: tempJR = am->workJacobianRight;
209: ADMMUpdateConstraintResidualVector(parent, am->subsolverX->solution, z, am->Ax, am->Bz, am->residual);
210: (*am->ops->regobjgrad)(am->subsolverZ,z,f,g,am->regobjgradP);
211: am->last_reg_val= *f;
212: /* Objective Add + yT(Ax+Bz-c) + mu/2*||Ax+Bz-c||_2^2 */
213: VecTDot(am->residual,am->y,&temp);
214: VecTDot(am->residual,am->residual,&temp2);
215: *f += temp + (am->mu/2)*temp2;
217: /* Gradient. Add + mu*BT(Ax+Bz-c) + yTB*/
218: MatMultTranspose(am->subsolverZ->jacobian_equality,am->residual,tempJR);
219: VecAXPY(g,am->mu,tempJR);
220: MatMultTranspose(am->subsolverZ->jacobian_equality,am->y,tempJR);
221: VecAXPY(g,1.,tempJR);
222: return 0;
223: }
225: /* Computes epsilon padded L1 norm lambda*sum(sqrt(x^2+eps^2)-eps */
226: static PetscErrorCode ADMML1EpsilonNorm(Tao tao, Vec x, PetscReal eps, PetscReal *norm)
227: {
228: TAO_ADMM *am = (TAO_ADMM*)tao->data;
229: PetscInt N;
231: VecGetSize(am->workLeft,&N);
232: VecPointwiseMult(am->workLeft,x,x);
233: VecShift(am->workLeft,am->l1epsilon*am->l1epsilon);
234: VecSqrtAbs(am->workLeft);
235: VecSum(am->workLeft,norm);
236: *norm += N*am->l1epsilon;
237: *norm *= am->lambda;
238: return 0;
239: }
241: static PetscErrorCode ADMMInternalHessianUpdate(Mat H, Mat Constraint, PetscBool Identity, void *ptr)
242: {
243: TAO_ADMM *am = (TAO_ADMM*)ptr;
245: switch (am->update) {
246: case (TAO_ADMM_UPDATE_BASIC):
247: break;
248: case (TAO_ADMM_UPDATE_ADAPTIVE):
249: case (TAO_ADMM_UPDATE_ADAPTIVE_RELAXED):
250: if (H && (am->muold != am->mu)) {
251: if (!Identity) {
252: MatAXPY(H,am->mu-am->muold,Constraint,DIFFERENT_NONZERO_PATTERN);
253: } else {
254: MatShift(H,am->mu-am->muold);
255: }
256: }
257: break;
258: }
259: return 0;
260: }
262: /* Updates Hessian - adds second derivative of augmented Lagrangian
263: * H \gets H + \rho*ATA
264: * Here, \rho does not change in TAO_ADMM_UPDATE_BASIC - thus no-op
265: * For ADAPTAIVE,ADAPTIVE_RELAXED,
266: * H \gets H + (\rho-\rhoold)*ATA
267: * Here, we assume that A is linear constraint i.e., doesnt change.
268: * Thus, for both ADAPTIVE, and RELAXED, ATA matrix is pre-set (except for A=I (null case)) see TaoSetUp_ADMM */
269: static PetscErrorCode SubHessianUpdate(Tao tao, Vec x, Mat H, Mat Hpre, void *ptr)
270: {
271: Tao parent = (Tao)ptr;
272: TAO_ADMM *am = (TAO_ADMM*)parent->data;
274: if (am->Hxchange) {
275: /* Case where Hessian gets updated with respect to x vector input. */
276: (*am->ops->misfithess)(am->subsolverX,x,H,Hpre,am->misfithessP);
277: ADMMInternalHessianUpdate(am->subsolverX->hessian,am->ATA,am->xJI,am);
278: } else if (am->Hxbool) {
279: /* Hessian doesn't get updated. H(x) = c */
280: /* Update Lagrangian only only per TAO call */
281: ADMMInternalHessianUpdate(am->subsolverX->hessian,am->ATA,am->xJI,am);
282: am->Hxbool = PETSC_FALSE;
283: }
284: return 0;
285: }
287: /* Same as SubHessianUpdate, except for B matrix instead of A matrix */
288: static PetscErrorCode RegHessianUpdate(Tao tao, Vec z, Mat H, Mat Hpre, void *ptr)
289: {
290: Tao parent = (Tao)ptr;
291: TAO_ADMM *am = (TAO_ADMM*)parent->data;
294: if (am->Hzchange) {
295: /* Case where Hessian gets updated with respect to x vector input. */
296: (*am->ops->reghess)(am->subsolverZ,z,H,Hpre,am->reghessP);
297: ADMMInternalHessianUpdate(am->subsolverZ->hessian,am->BTB,am->zJI,am);
298: } else if (am->Hzbool) {
299: /* Hessian doesn't get updated. H(x) = c */
300: /* Update Lagrangian only only per TAO call */
301: ADMMInternalHessianUpdate(am->subsolverZ->hessian,am->BTB,am->zJI,am);
302: am->Hzbool = PETSC_FALSE;
303: }
304: return 0;
305: }
307: /* Shell Matrix routine for A matrix.
308: * This gets used when user puts NULL for
309: * TaoSetJacobianEqualityRoutine(tao, NULL,NULL, ...)
310: * Essentially sets A=I*/
311: static PetscErrorCode JacobianIdentity(Mat mat,Vec in,Vec out)
312: {
313: VecCopy(in,out);
314: return 0;
315: }
317: /* Shell Matrix routine for B matrix.
318: * This gets used when user puts NULL for
319: * TaoADMMSetRegularizerConstraintJacobian(tao, NULL,NULL, ...)
320: * Sets B=-I */
321: static PetscErrorCode JacobianIdentityB(Mat mat,Vec in,Vec out)
322: {
323: VecCopy(in,out);
324: VecScale(out,-1.);
325: return 0;
326: }
328: /* Solve f(x) + g(z) s.t. Ax + Bz = c */
329: static PetscErrorCode TaoSolve_ADMM(Tao tao)
330: {
331: TAO_ADMM *am = (TAO_ADMM*)tao->data;
332: PetscInt N;
333: PetscReal reg_func;
334: PetscBool is_reg_shell;
335: Vec tempL;
337: if (am->regswitch != TAO_ADMM_REGULARIZER_SOFT_THRESH) {
340: if (am->constraint != NULL) {
341: VecNorm(am->constraint,NORM_2,&am->const_norm);
342: }
343: }
344: tempL = am->workLeft;
345: VecGetSize(tempL,&N);
347: if (am->Hx && am->ops->misfithess) {
348: TaoSetHessian(am->subsolverX, am->Hx, am->Hx, SubHessianUpdate, tao);
349: }
351: if (!am->zJI) {
352: /* Currently, B is assumed to be a linear system, i.e., not getting updated*/
353: MatTransposeMatMult(am->JB,am->JB,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&(am->BTB));
354: }
355: if (!am->xJI) {
356: /* Currently, A is assumed to be a linear system, i.e., not getting updated*/
357: MatTransposeMatMult(am->subsolverX->jacobian_equality,am->subsolverX->jacobian_equality,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&(am->ATA));
358: }
360: is_reg_shell = PETSC_FALSE;
362: PetscObjectTypeCompare((PetscObject)am->subsolverZ, TAOSHELL, &is_reg_shell);
364: if (!is_reg_shell) {
365: switch (am->regswitch) {
366: case (TAO_ADMM_REGULARIZER_USER):
368: break;
369: case (TAO_ADMM_REGULARIZER_SOFT_THRESH):
370: /* Soft Threshold. */
371: break;
372: }
373: if (am->ops->regobjgrad) {
374: TaoSetObjectiveAndGradient(am->subsolverZ, NULL, RegObjGradUpdate, tao);
375: }
376: if (am->Hz && am->ops->reghess) {
377: TaoSetHessian(am->subsolverZ, am->Hz, am->Hzpre, RegHessianUpdate, tao);
378: }
379: }
381: switch (am->update) {
382: case TAO_ADMM_UPDATE_BASIC:
383: if (am->subsolverX->hessian) {
384: /* In basic case, Hessian does not get updated w.r.t. to spectral penalty
385: * Here, when A is set, i.e., am->xJI, add mu*ATA to Hessian*/
386: if (!am->xJI) {
387: MatAXPY(am->subsolverX->hessian,am->mu,am->ATA,DIFFERENT_NONZERO_PATTERN);
388: } else {
389: MatShift(am->subsolverX->hessian,am->mu);
390: }
391: }
392: if (am->subsolverZ->hessian && am->regswitch == TAO_ADMM_REGULARIZER_USER) {
393: if (am->regswitch == TAO_ADMM_REGULARIZER_USER && !am->zJI) {
394: MatAXPY(am->subsolverZ->hessian,am->mu,am->BTB,DIFFERENT_NONZERO_PATTERN);
395: } else {
396: MatShift(am->subsolverZ->hessian,am->mu);
397: }
398: }
399: break;
400: case TAO_ADMM_UPDATE_ADAPTIVE:
401: case TAO_ADMM_UPDATE_ADAPTIVE_RELAXED:
402: break;
403: }
405: PetscCitationsRegister(citation,&cited);
406: tao->reason = TAO_CONTINUE_ITERATING;
408: while (tao->reason == TAO_CONTINUE_ITERATING) {
409: if (tao->ops->update) {
410: (*tao->ops->update)(tao, tao->niter, tao->user_update);
411: }
412: VecCopy(am->Bz, am->Bzold);
414: /* x update */
415: TaoSolve(am->subsolverX);
416: TaoComputeJacobianEquality(am->subsolverX, am->subsolverX->solution, am->subsolverX->jacobian_equality, am->subsolverX->jacobian_equality_pre);
417: MatMult(am->subsolverX->jacobian_equality, am->subsolverX->solution,am->Ax);
419: am->Hxbool = PETSC_TRUE;
421: /* z update */
422: switch (am->regswitch) {
423: case TAO_ADMM_REGULARIZER_USER:
424: TaoSolve(am->subsolverZ);
425: break;
426: case TAO_ADMM_REGULARIZER_SOFT_THRESH:
427: /* L1 assumes A,B jacobians are identity nxn matrix */
428: VecWAXPY(am->workJacobianRight,1/am->mu,am->y,am->Ax);
429: TaoSoftThreshold(am->workJacobianRight,-am->lambda/am->mu,am->lambda/am->mu,am->subsolverZ->solution);
430: break;
431: }
432: am->Hzbool = PETSC_TRUE;
433: /* Returns Ax + Bz - c with updated Ax,Bz vectors */
434: ADMMUpdateConstraintResidualVector(tao, am->subsolverX->solution, am->subsolverZ->solution, am->Ax, am->Bz, am->residual);
435: /* Dual variable, y += y + mu*(Ax+Bz-c) */
436: VecWAXPY(am->y, am->mu, am->residual, am->yold);
438: /* stopping tolerance update */
439: TaoADMMToleranceUpdate(tao);
441: /* Updating Spectral Penalty */
442: switch (am->update) {
443: case TAO_ADMM_UPDATE_BASIC:
444: am->muold = am->mu;
445: break;
446: case TAO_ADMM_UPDATE_ADAPTIVE:
447: case TAO_ADMM_UPDATE_ADAPTIVE_RELAXED:
448: if (tao->niter == 0) {
449: VecCopy(am->y, am->y0);
450: VecWAXPY(am->residual, 1., am->Ax, am->Bzold);
451: if (am->constraint) {
452: VecAXPY(am->residual, -1., am->constraint);
453: }
454: VecWAXPY(am->yhatold, -am->mu, am->residual, am->yold);
455: VecCopy(am->Ax, am->Axold);
456: VecCopy(am->Bz, am->Bz0);
457: am->muold = am->mu;
458: } else if (tao->niter % am->T == 1) {
459: /* we have compute Bzold in a previous iteration, and we computed Ax above */
460: VecWAXPY(am->residual, 1., am->Ax, am->Bzold);
461: if (am->constraint) {
462: VecAXPY(am->residual, -1., am->constraint);
463: }
464: VecWAXPY(am->yhat, -am->mu, am->residual, am->yold);
465: AdaptiveADMMPenaltyUpdate(tao);
466: VecCopy(am->Ax, am->Axold);
467: VecCopy(am->Bz, am->Bz0);
468: VecCopy(am->yhat, am->yhatold);
469: VecCopy(am->y, am->y0);
470: } else {
471: am->muold = am->mu;
472: }
473: break;
474: default:
475: break;
476: }
477: tao->niter++;
479: /* Calculate original function values. misfit part was done in TaoADMMToleranceUpdate*/
480: switch (am->regswitch) {
481: case TAO_ADMM_REGULARIZER_USER:
482: if (is_reg_shell) {
483: ADMML1EpsilonNorm(tao,am->subsolverZ->solution,am->l1epsilon,®_func);
484: } else {
485: (*am->ops->regobjgrad)(am->subsolverZ,am->subsolverX->solution,®_func,tempL,am->regobjgradP);
486: }
487: break;
488: case TAO_ADMM_REGULARIZER_SOFT_THRESH:
489: ADMML1EpsilonNorm(tao,am->subsolverZ->solution,am->l1epsilon,®_func);
490: break;
491: }
492: VecCopy(am->y,am->yold);
493: ADMMUpdateConstraintResidualVector(tao, am->subsolverX->solution, am->subsolverZ->solution, am->Ax, am->Bz, am->residual);
494: VecNorm(am->residual,NORM_2,&am->resnorm);
495: TaoLogConvergenceHistory(tao,am->last_misfit_val + reg_func,am->dualres,am->resnorm,tao->ksp_its);
497: TaoMonitor(tao,tao->niter,am->last_misfit_val + reg_func,am->dualres,am->resnorm,1.0);
498: (*tao->ops->convergencetest)(tao,tao->cnvP);
499: }
500: /* Update vectors */
501: VecCopy(am->subsolverX->solution,tao->solution);
502: VecCopy(am->subsolverX->gradient,tao->gradient);
503: PetscObjectCompose((PetscObject)am->subsolverX,"TaoGetADMMParentTao_ADMM", NULL);
504: PetscObjectCompose((PetscObject)am->subsolverZ,"TaoGetADMMParentTao_ADMM", NULL);
505: PetscObjectComposeFunction((PetscObject)tao,"TaoADMMSetRegularizerType_C",NULL);
506: PetscObjectComposeFunction((PetscObject)tao,"TaoADMMGetRegularizerType_C",NULL);
507: PetscObjectComposeFunction((PetscObject)tao,"TaoADMMSetUpdateType_C",NULL);
508: PetscObjectComposeFunction((PetscObject)tao,"TaoADMMGetUpdateType_C",NULL);
509: return 0;
510: }
512: static PetscErrorCode TaoSetFromOptions_ADMM(PetscOptionItems *PetscOptionsObject,Tao tao)
513: {
514: TAO_ADMM *am = (TAO_ADMM*)tao->data;
517: PetscOptionsHead(PetscOptionsObject,"ADMM problem that solves f(x) in a form of f(x) + g(z) subject to x - z = 0. Norm 1 and 2 are supported. Different subsolver routines can be selected. ");
518: PetscOptionsReal("-tao_admm_regularizer_coefficient","regularizer constant","",am->lambda,&am->lambda,NULL);
519: PetscOptionsReal("-tao_admm_spectral_penalty","Constant for Augmented Lagrangian term.","",am->mu,&am->mu,NULL);
520: PetscOptionsReal("-tao_admm_relaxation_parameter","x relaxation parameter for Z update.","",am->gamma,&am->gamma,NULL);
521: PetscOptionsReal("-tao_admm_tolerance_update_factor","ADMM dynamic tolerance update factor.","",am->tol,&am->tol,NULL);
522: PetscOptionsReal("-tao_admm_spectral_penalty_update_factor","ADMM spectral penalty update curvature safeguard value.","",am->orthval,&am->orthval,NULL);
523: PetscOptionsReal("-tao_admm_minimum_spectral_penalty","Set ADMM minimum spectral penalty.","",am->mumin,&am->mumin,NULL);
524: PetscOptionsEnum("-tao_admm_dual_update","Lagrangian dual update policy","TaoADMMUpdateType",
525: TaoADMMUpdateTypes,(PetscEnum)am->update,(PetscEnum*)&am->update,NULL);
526: PetscOptionsEnum("-tao_admm_regularizer_type","ADMM regularizer update rule","TaoADMMRegularizerType",
527: TaoADMMRegularizerTypes,(PetscEnum)am->regswitch,(PetscEnum*)&am->regswitch,NULL);
528: PetscOptionsTail();
529: TaoSetFromOptions(am->subsolverX);
530: if (am->regswitch != TAO_ADMM_REGULARIZER_SOFT_THRESH) {
531: TaoSetFromOptions(am->subsolverZ);
532: }
533: return 0;
534: }
536: static PetscErrorCode TaoView_ADMM(Tao tao,PetscViewer viewer)
537: {
538: TAO_ADMM *am = (TAO_ADMM*)tao->data;
540: PetscViewerASCIIPushTab(viewer);
541: TaoView(am->subsolverX,viewer);
542: TaoView(am->subsolverZ,viewer);
543: PetscViewerASCIIPopTab(viewer);
544: return 0;
545: }
547: static PetscErrorCode TaoSetUp_ADMM(Tao tao)
548: {
549: TAO_ADMM *am = (TAO_ADMM*)tao->data;
550: PetscInt n,N,M;
552: VecGetLocalSize(tao->solution,&n);
553: VecGetSize(tao->solution,&N);
554: /* If Jacobian is given as NULL, it means Jacobian is identity matrix with size of solution vector */
555: if (!am->JB) {
556: am->zJI = PETSC_TRUE;
557: MatCreateShell(PetscObjectComm((PetscObject)tao),n,n,PETSC_DETERMINE,PETSC_DETERMINE,NULL,&am->JB);
558: MatShellSetOperation(am->JB,MATOP_MULT,(void (*)(void))JacobianIdentityB);
559: MatShellSetOperation(am->JB,MATOP_MULT_TRANSPOSE,(void (*)(void))JacobianIdentityB);
560: MatShellSetOperation(am->JB,MATOP_TRANSPOSE_MAT_MULT,(void (*)(void))JacobianIdentityB);
561: am->JBpre = am->JB;
562: }
563: if (!am->JA) {
564: am->xJI = PETSC_TRUE;
565: MatCreateShell(PetscObjectComm((PetscObject)tao),n,n,PETSC_DETERMINE,PETSC_DETERMINE,NULL,&am->JA);
566: MatShellSetOperation(am->JA,MATOP_MULT,(void (*)(void))JacobianIdentity);
567: MatShellSetOperation(am->JA,MATOP_MULT_TRANSPOSE,(void (*)(void))JacobianIdentity);
568: MatShellSetOperation(am->JA,MATOP_TRANSPOSE_MAT_MULT,(void (*)(void))JacobianIdentity);
569: am->JApre = am->JA;
570: }
571: MatCreateVecs(am->JA,NULL,&am->Ax);
572: if (!tao->gradient) {
573: VecDuplicate(tao->solution,&tao->gradient);
574: }
575: TaoSetSolution(am->subsolverX, tao->solution);
576: if (!am->z) {
577: VecDuplicate(tao->solution,&am->z);
578: VecSet(am->z,0.0);
579: }
580: TaoSetSolution(am->subsolverZ, am->z);
581: if (!am->workLeft) {
582: VecDuplicate(tao->solution,&am->workLeft);
583: }
584: if (!am->Axold) {
585: VecDuplicate(am->Ax,&am->Axold);
586: }
587: if (!am->workJacobianRight) {
588: VecDuplicate(am->Ax,&am->workJacobianRight);
589: }
590: if (!am->workJacobianRight2) {
591: VecDuplicate(am->Ax,&am->workJacobianRight2);
592: }
593: if (!am->Bz) {
594: VecDuplicate(am->Ax,&am->Bz);
595: }
596: if (!am->Bzold) {
597: VecDuplicate(am->Ax,&am->Bzold);
598: }
599: if (!am->Bz0) {
600: VecDuplicate(am->Ax,&am->Bz0);
601: }
602: if (!am->y) {
603: VecDuplicate(am->Ax,&am->y);
604: VecSet(am->y,0.0);
605: }
606: if (!am->yold) {
607: VecDuplicate(am->Ax,&am->yold);
608: VecSet(am->yold,0.0);
609: }
610: if (!am->y0) {
611: VecDuplicate(am->Ax,&am->y0);
612: VecSet(am->y0,0.0);
613: }
614: if (!am->yhat) {
615: VecDuplicate(am->Ax,&am->yhat);
616: VecSet(am->yhat,0.0);
617: }
618: if (!am->yhatold) {
619: VecDuplicate(am->Ax,&am->yhatold);
620: VecSet(am->yhatold,0.0);
621: }
622: if (!am->residual) {
623: VecDuplicate(am->Ax,&am->residual);
624: VecSet(am->residual,0.0);
625: }
626: if (!am->constraint) {
627: am->constraint = NULL;
628: } else {
629: VecGetSize(am->constraint,&M);
631: }
633: /* Save changed tao tolerance for adaptive tolerance */
634: if (tao->gatol_changed) {
635: am->gatol_admm = tao->gatol;
636: }
637: if (tao->catol_changed) {
638: am->catol_admm = tao->catol;
639: }
641: /*Update spectral and dual elements to X subsolver */
642: TaoSetObjectiveAndGradient(am->subsolverX, NULL, SubObjGradUpdate, tao);
643: TaoSetJacobianEqualityRoutine(am->subsolverX,am->JA,am->JApre, am->ops->misfitjac, am->misfitjacobianP);
644: TaoSetJacobianEqualityRoutine(am->subsolverZ,am->JB,am->JBpre, am->ops->regjac, am->regjacobianP);
645: return 0;
646: }
648: static PetscErrorCode TaoDestroy_ADMM(Tao tao)
649: {
650: TAO_ADMM *am = (TAO_ADMM*)tao->data;
652: VecDestroy(&am->z);
653: VecDestroy(&am->Ax);
654: VecDestroy(&am->Axold);
655: VecDestroy(&am->Bz);
656: VecDestroy(&am->Bzold);
657: VecDestroy(&am->Bz0);
658: VecDestroy(&am->residual);
659: VecDestroy(&am->y);
660: VecDestroy(&am->yold);
661: VecDestroy(&am->y0);
662: VecDestroy(&am->yhat);
663: VecDestroy(&am->yhatold);
664: VecDestroy(&am->workLeft);
665: VecDestroy(&am->workJacobianRight);
666: VecDestroy(&am->workJacobianRight2);
668: MatDestroy(&am->JA);
669: MatDestroy(&am->JB);
670: if (!am->xJI) {
671: MatDestroy(&am->JApre);
672: }
673: if (!am->zJI) {
674: MatDestroy(&am->JBpre);
675: }
676: if (am->Hx) {
677: MatDestroy(&am->Hx);
678: MatDestroy(&am->Hxpre);
679: }
680: if (am->Hz) {
681: MatDestroy(&am->Hz);
682: MatDestroy(&am->Hzpre);
683: }
684: MatDestroy(&am->ATA);
685: MatDestroy(&am->BTB);
686: TaoDestroy(&am->subsolverX);
687: TaoDestroy(&am->subsolverZ);
688: am->parent = NULL;
689: PetscFree(tao->data);
690: return 0;
691: }
693: /*MC
695: TAOADMM - Alternating direction method of multipliers method fo solving linear problems with
696: constraints. in a min_x f(x) + g(z) s.t. Ax+Bz=c.
697: This algorithm employs two sub Tao solvers, of which type can be specified
698: by the user. User need to provide ObjectiveAndGradient routine, and/or HessianRoutine for both subsolvers.
699: Hessians can be given boolean flag determining whether they change with respect to a input vector. This can be set via
700: TaoADMMSet{Misfit,Regularizer}HessianChangeStatus.
701: Second subsolver does support TAOSHELL. It should be noted that L1-norm is used for objective value for TAOSHELL type.
702: There is option to set regularizer option, and currently soft-threshold is implemented. For spectral penalty update,
703: currently there are basic option and adaptive option.
704: Constraint is set at Ax+Bz=c, and A and B can be set with TaoADMMSet{Misfit,Regularizer}ConstraintJacobian.
705: c can be set with TaoADMMSetConstraintVectorRHS.
706: The user can also provide regularizer weight for second subsolver.
708: References:
709: . * - Xu, Zheng and Figueiredo, Mario A. T. and Yuan, Xiaoming and Studer, Christoph and Goldstein, Tom
710: "Adaptive Relaxed ADMM: Convergence Theory and Practical Implementation"
711: The IEEE Conference on Computer Vision and Pattern Recognition (CVPR), July, 2017.
713: Options Database Keys:
714: + -tao_admm_regularizer_coefficient - regularizer constant (default 1.e-6)
715: . -tao_admm_spectral_penalty - Constant for Augmented Lagrangian term (default 1.)
716: . -tao_admm_relaxation_parameter - relaxation parameter for Z update (default 1.)
717: . -tao_admm_tolerance_update_factor - ADMM dynamic tolerance update factor (default 1.e-12)
718: . -tao_admm_spectral_penalty_update_factor - ADMM spectral penalty update curvature safeguard value (default 0.2)
719: . -tao_admm_minimum_spectral_penalty - Set ADMM minimum spectral penalty (default 0)
720: . -tao_admm_dual_update - Lagrangian dual update policy ("basic","adaptive","adaptive-relaxed") (default "basic")
721: - -tao_admm_regularizer_type - ADMM regularizer update rule ("user","soft-threshold") (default "soft-threshold")
723: Level: beginner
725: .seealso: TaoADMMSetMisfitHessianChangeStatus(), TaoADMMSetRegHessianChangeStatus(), TaoADMMGetSpectralPenalty(),
726: TaoADMMGetMisfitSubsolver(), TaoADMMGetRegularizationSubsolver(), TaoADMMSetConstraintVectorRHS(),
727: TaoADMMSetMinimumSpectralPenalty(), TaoADMMSetRegularizerCoefficient(),
728: TaoADMMSetRegularizerConstraintJacobian(), TaoADMMSetMisfitConstraintJacobian(),
729: TaoADMMSetMisfitObjectiveAndGradientRoutine(), TaoADMMSetMisfitHessianRoutine(),
730: TaoADMMSetRegularizerObjectiveAndGradientRoutine(), TaoADMMSetRegularizerHessianRoutine(),
731: TaoGetADMMParentTao(), TaoADMMGetDualVector(), TaoADMMSetRegularizerType(),
732: TaoADMMGetRegularizerType(), TaoADMMSetUpdateType(), TaoADMMGetUpdateType()
733: M*/
735: PETSC_EXTERN PetscErrorCode TaoCreate_ADMM(Tao tao)
736: {
737: TAO_ADMM *am;
739: PetscNewLog(tao,&am);
741: tao->ops->destroy = TaoDestroy_ADMM;
742: tao->ops->setup = TaoSetUp_ADMM;
743: tao->ops->setfromoptions = TaoSetFromOptions_ADMM;
744: tao->ops->view = TaoView_ADMM;
745: tao->ops->solve = TaoSolve_ADMM;
747: tao->data = (void*)am;
748: am->l1epsilon = 1e-6;
749: am->lambda = 1e-4;
750: am->mu = 1.;
751: am->muold = 0.;
752: am->mueps = PETSC_MACHINE_EPSILON;
753: am->mumin = 0.;
754: am->orthval = 0.2;
755: am->T = 2;
756: am->parent = tao;
757: am->update = TAO_ADMM_UPDATE_BASIC;
758: am->regswitch = TAO_ADMM_REGULARIZER_SOFT_THRESH;
759: am->tol = PETSC_SMALL;
760: am->const_norm = 0;
761: am->resnorm = 0;
762: am->dualres = 0;
763: am->ops->regobjgrad = NULL;
764: am->ops->reghess = NULL;
765: am->gamma = 1;
766: am->regobjgradP = NULL;
767: am->reghessP = NULL;
768: am->gatol_admm = 1e-8;
769: am->catol_admm = 0;
770: am->Hxchange = PETSC_TRUE;
771: am->Hzchange = PETSC_TRUE;
772: am->Hzbool = PETSC_TRUE;
773: am->Hxbool = PETSC_TRUE;
775: TaoCreate(PetscObjectComm((PetscObject)tao),&am->subsolverX);
776: TaoSetOptionsPrefix(am->subsolverX,"misfit_");
777: PetscObjectIncrementTabLevel((PetscObject)am->subsolverX,(PetscObject)tao,1);
778: TaoCreate(PetscObjectComm((PetscObject)tao),&am->subsolverZ);
779: TaoSetOptionsPrefix(am->subsolverZ,"reg_");
780: PetscObjectIncrementTabLevel((PetscObject)am->subsolverZ,(PetscObject)tao,1);
782: TaoSetType(am->subsolverX,TAONLS);
783: TaoSetType(am->subsolverZ,TAONLS);
784: PetscObjectCompose((PetscObject)am->subsolverX,"TaoGetADMMParentTao_ADMM", (PetscObject) tao);
785: PetscObjectCompose((PetscObject)am->subsolverZ,"TaoGetADMMParentTao_ADMM", (PetscObject) tao);
786: PetscObjectComposeFunction((PetscObject)tao,"TaoADMMSetRegularizerType_C",TaoADMMSetRegularizerType_ADMM);
787: PetscObjectComposeFunction((PetscObject)tao,"TaoADMMGetRegularizerType_C",TaoADMMGetRegularizerType_ADMM);
788: PetscObjectComposeFunction((PetscObject)tao,"TaoADMMSetUpdateType_C",TaoADMMSetUpdateType_ADMM);
789: PetscObjectComposeFunction((PetscObject)tao,"TaoADMMGetUpdateType_C",TaoADMMGetUpdateType_ADMM);
790: return 0;
791: }
793: /*@
794: TaoADMMSetMisfitHessianChangeStatus - Set boolean that determines whether Hessian matrix of misfit subsolver changes with respect to input vector.
796: Collective on Tao
798: Input Parameters:
799: + tao - the Tao solver context.
800: - b - the Hessian matrix change status boolean, PETSC_FALSE when the Hessian matrix does not change, TRUE otherwise.
802: Level: advanced
804: .seealso: TAOADMM
806: @*/
807: PetscErrorCode TaoADMMSetMisfitHessianChangeStatus(Tao tao, PetscBool b)
808: {
809: TAO_ADMM *am = (TAO_ADMM*)tao->data;
811: am->Hxchange = b;
812: return 0;
813: }
815: /*@
816: TaoADMMSetRegHessianChangeStatus - Set boolean that determines whether Hessian matrix of regularization subsolver changes with respect to input vector.
818: Collective on Tao
820: Input Parameters:
821: + tao - the Tao solver context
822: - b - the Hessian matrix change status boolean, PETSC_FALSE when the Hessian matrix does not change, TRUE otherwise.
824: Level: advanced
826: .seealso: TAOADMM
828: @*/
829: PetscErrorCode TaoADMMSetRegHessianChangeStatus(Tao tao, PetscBool b)
830: {
831: TAO_ADMM *am = (TAO_ADMM*)tao->data;
833: am->Hzchange = b;
834: return 0;
835: }
837: /*@
838: TaoADMMSetSpectralPenalty - Set the spectral penalty (mu) value
840: Collective on Tao
842: Input Parameters:
843: + tao - the Tao solver context
844: - mu - spectral penalty
846: Level: advanced
848: .seealso: TaoADMMSetMinimumSpectralPenalty(), TAOADMM
849: @*/
850: PetscErrorCode TaoADMMSetSpectralPenalty(Tao tao, PetscReal mu)
851: {
852: TAO_ADMM *am = (TAO_ADMM*)tao->data;
854: am->mu = mu;
855: return 0;
856: }
858: /*@
859: TaoADMMGetSpectralPenalty - Get the spectral penalty (mu) value
861: Collective on Tao
863: Input Parameter:
864: . tao - the Tao solver context
866: Output Parameter:
867: . mu - spectral penalty
869: Level: advanced
871: .seealso: TaoADMMSetMinimumSpectralPenalty(), TaoADMMSetSpectralPenalty(), TAOADMM
872: @*/
873: PetscErrorCode TaoADMMGetSpectralPenalty(Tao tao, PetscReal *mu)
874: {
875: TAO_ADMM *am = (TAO_ADMM*)tao->data;
879: *mu = am->mu;
880: return 0;
881: }
883: /*@
884: TaoADMMGetMisfitSubsolver - Get the pointer to the misfit subsolver inside ADMM
886: Collective on Tao
888: Input Parameter:
889: . tao - the Tao solver context
891: Output Parameter:
892: . misfit - the Tao subsolver context
894: Level: advanced
896: .seealso: TAOADMM
898: @*/
899: PetscErrorCode TaoADMMGetMisfitSubsolver(Tao tao, Tao *misfit)
900: {
901: TAO_ADMM *am = (TAO_ADMM*)tao->data;
903: *misfit = am->subsolverX;
904: return 0;
905: }
907: /*@
908: TaoADMMGetRegularizationSubsolver - Get the pointer to the regularization subsolver inside ADMM
910: Collective on Tao
912: Input Parameter:
913: . tao - the Tao solver context
915: Output Parameter:
916: . reg - the Tao subsolver context
918: Level: advanced
920: .seealso: TAOADMM
922: @*/
923: PetscErrorCode TaoADMMGetRegularizationSubsolver(Tao tao, Tao *reg)
924: {
925: TAO_ADMM *am = (TAO_ADMM*)tao->data;
927: *reg = am->subsolverZ;
928: return 0;
929: }
931: /*@
932: TaoADMMSetConstraintVectorRHS - Set the RHS constraint vector for ADMM
934: Collective on Tao
936: Input Parameters:
937: + tao - the Tao solver context
938: - c - RHS vector
940: Level: advanced
942: .seealso: TAOADMM
944: @*/
945: PetscErrorCode TaoADMMSetConstraintVectorRHS(Tao tao, Vec c)
946: {
947: TAO_ADMM *am = (TAO_ADMM*)tao->data;
949: am->constraint = c;
950: return 0;
951: }
953: /*@
954: TaoADMMSetMinimumSpectralPenalty - Set the minimum value for the spectral penalty
956: Collective on Tao
958: Input Parameters:
959: + tao - the Tao solver context
960: - mu - minimum spectral penalty value
962: Level: advanced
964: .seealso: TaoADMMGetSpectralPenalty(), TAOADMM
965: @*/
966: PetscErrorCode TaoADMMSetMinimumSpectralPenalty(Tao tao, PetscReal mu)
967: {
968: TAO_ADMM *am = (TAO_ADMM*)tao->data;
970: am->mumin= mu;
971: return 0;
972: }
974: /*@
975: TaoADMMSetRegularizerCoefficient - Set the regularization coefficient lambda for L1 norm regularization case
977: Collective on Tao
979: Input Parameters:
980: + tao - the Tao solver context
981: - lambda - L1-norm regularizer coefficient
983: Level: advanced
985: .seealso: TaoADMMSetMisfitConstraintJacobian(), TaoADMMSetRegularizerConstraintJacobian(), TAOADMM
987: @*/
988: PetscErrorCode TaoADMMSetRegularizerCoefficient(Tao tao, PetscReal lambda)
989: {
990: TAO_ADMM *am = (TAO_ADMM*)tao->data;
992: am->lambda = lambda;
993: return 0;
994: }
996: /*@C
997: TaoADMMSetMisfitConstraintJacobian - Set the constraint matrix B for the ADMM algorithm. Matrix B constrains the z variable.
999: Collective on Tao
1001: Input Parameters:
1002: + tao - the Tao solver context
1003: . J - user-created regularizer constraint Jacobian matrix
1004: . Jpre - user-created regularizer Jacobian constraint preconditioner matrix
1005: . func - function pointer for the regularizer constraint Jacobian update function
1006: - ctx - user context for the regularizer Hessian
1008: Level: advanced
1010: .seealso: TaoADMMSetRegularizerCoefficient(), TaoADMMSetRegularizerConstraintJacobian(), TAOADMM
1012: @*/
1013: PetscErrorCode TaoADMMSetMisfitConstraintJacobian(Tao tao, Mat J, Mat Jpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void*), void *ctx)
1014: {
1015: TAO_ADMM *am = (TAO_ADMM*)tao->data;
1018: if (J) {
1021: }
1022: if (Jpre) {
1025: }
1026: if (ctx) am->misfitjacobianP = ctx;
1027: if (func) am->ops->misfitjac = func;
1029: if (J) {
1030: PetscObjectReference((PetscObject)J);
1031: MatDestroy(&am->JA);
1032: am->JA = J;
1033: }
1034: if (Jpre) {
1035: PetscObjectReference((PetscObject)Jpre);
1036: MatDestroy(&am->JApre);
1037: am->JApre = Jpre;
1038: }
1039: return 0;
1040: }
1042: /*@C
1043: TaoADMMSetRegularizerConstraintJacobian - Set the constraint matrix B for ADMM algorithm. Matrix B constraints z variable.
1045: Collective on Tao
1047: Input Parameters:
1048: + tao - the Tao solver context
1049: . J - user-created regularizer constraint Jacobian matrix
1050: . Jpre - user-created regularizer Jacobian constraint preconditioner matrix
1051: . func - function pointer for the regularizer constraint Jacobian update function
1052: - ctx - user context for the regularizer Hessian
1054: Level: advanced
1056: .seealso: TaoADMMSetRegularizerCoefficient(), TaoADMMSetMisfitConstraintJacobian(), TAOADMM
1058: @*/
1059: PetscErrorCode TaoADMMSetRegularizerConstraintJacobian(Tao tao, Mat J, Mat Jpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void*), void *ctx)
1060: {
1061: TAO_ADMM *am = (TAO_ADMM*)tao->data;
1064: if (J) {
1067: }
1068: if (Jpre) {
1071: }
1072: if (ctx) am->regjacobianP = ctx;
1073: if (func) am->ops->regjac = func;
1075: if (J) {
1076: PetscObjectReference((PetscObject)J);
1077: MatDestroy(&am->JB);
1078: am->JB = J;
1079: }
1080: if (Jpre) {
1081: PetscObjectReference((PetscObject)Jpre);
1082: MatDestroy(&am->JBpre);
1083: am->JBpre = Jpre;
1084: }
1085: return 0;
1086: }
1088: /*@C
1089: TaoADMMSetMisfitObjectiveAndGradientRoutine - Sets the user-defined misfit call-back function
1091: Collective on tao
1093: Input Parameters:
1094: + tao - the Tao context
1095: . func - function pointer for the misfit value and gradient evaluation
1096: - ctx - user context for the misfit
1098: Level: advanced
1100: .seealso: TAOADMM
1102: @*/
1103: PetscErrorCode TaoADMMSetMisfitObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal*, Vec, void*), void *ctx)
1104: {
1105: TAO_ADMM *am = (TAO_ADMM*)tao->data;
1108: am->misfitobjgradP = ctx;
1109: am->ops->misfitobjgrad = func;
1110: return 0;
1111: }
1113: /*@C
1114: TaoADMMSetMisfitHessianRoutine - Sets the user-defined misfit Hessian call-back
1115: function into the algorithm, to be used for subsolverX.
1117: Collective on tao
1119: Input Parameters:
1120: + tao - the Tao context
1121: . H - user-created matrix for the Hessian of the misfit term
1122: . Hpre - user-created matrix for the preconditioner of Hessian of the misfit term
1123: . func - function pointer for the misfit Hessian evaluation
1124: - ctx - user context for the misfit Hessian
1126: Level: advanced
1128: .seealso: TAOADMM
1130: @*/
1131: PetscErrorCode TaoADMMSetMisfitHessianRoutine(Tao tao, Mat H, Mat Hpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void*), void *ctx)
1132: {
1133: TAO_ADMM *am = (TAO_ADMM*)tao->data;
1136: if (H) {
1139: }
1140: if (Hpre) {
1143: }
1144: if (ctx) {
1145: am->misfithessP = ctx;
1146: }
1147: if (func) {
1148: am->ops->misfithess = func;
1149: }
1150: if (H) {
1151: PetscObjectReference((PetscObject)H);
1152: MatDestroy(&am->Hx);
1153: am->Hx = H;
1154: }
1155: if (Hpre) {
1156: PetscObjectReference((PetscObject)Hpre);
1157: MatDestroy(&am->Hxpre);
1158: am->Hxpre = Hpre;
1159: }
1160: return 0;
1161: }
1163: /*@C
1164: TaoADMMSetRegularizerObjectiveAndGradientRoutine - Sets the user-defined regularizer call-back function
1166: Collective on tao
1168: Input Parameters:
1169: + tao - the Tao context
1170: . func - function pointer for the regularizer value and gradient evaluation
1171: - ctx - user context for the regularizer
1173: Level: advanced
1175: .seealso: TAOADMM
1177: @*/
1178: PetscErrorCode TaoADMMSetRegularizerObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal*, Vec, void*), void *ctx)
1179: {
1180: TAO_ADMM *am = (TAO_ADMM*)tao->data;
1183: am->regobjgradP = ctx;
1184: am->ops->regobjgrad = func;
1185: return 0;
1186: }
1188: /*@C
1189: TaoADMMSetRegularizerHessianRoutine - Sets the user-defined regularizer Hessian call-back
1190: function, to be used for subsolverZ.
1192: Collective on tao
1194: Input Parameters:
1195: + tao - the Tao context
1196: . H - user-created matrix for the Hessian of the regularization term
1197: . Hpre - user-created matrix for the preconditioner of Hessian of the regularization term
1198: . func - function pointer for the regularizer Hessian evaluation
1199: - ctx - user context for the regularizer Hessian
1201: Level: advanced
1203: .seealso: TAOADMM
1205: @*/
1206: PetscErrorCode TaoADMMSetRegularizerHessianRoutine(Tao tao, Mat H, Mat Hpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void*), void *ctx)
1207: {
1208: TAO_ADMM *am = (TAO_ADMM*)tao->data;
1211: if (H) {
1214: }
1215: if (Hpre) {
1218: }
1219: if (ctx) {
1220: am->reghessP = ctx;
1221: }
1222: if (func) {
1223: am->ops->reghess = func;
1224: }
1225: if (H) {
1226: PetscObjectReference((PetscObject)H);
1227: MatDestroy(&am->Hz);
1228: am->Hz = H;
1229: }
1230: if (Hpre) {
1231: PetscObjectReference((PetscObject)Hpre);
1232: MatDestroy(&am->Hzpre);
1233: am->Hzpre = Hpre;
1234: }
1235: return 0;
1236: }
1238: /*@
1239: TaoGetADMMParentTao - Gets pointer to parent ADMM tao, used by inner subsolver.
1241: Collective on tao
1243: Input Parameter:
1244: . tao - the Tao context
1246: Output Parameter:
1247: . admm_tao - the parent Tao context
1249: Level: advanced
1251: .seealso: TAOADMM
1253: @*/
1254: PetscErrorCode TaoGetADMMParentTao(Tao tao, Tao *admm_tao)
1255: {
1257: PetscObjectQuery((PetscObject)tao,"TaoGetADMMParentTao_ADMM", (PetscObject*) admm_tao);
1258: return 0;
1259: }
1261: /*@
1262: TaoADMMGetDualVector - Returns the dual vector associated with the current TAOADMM state
1264: Not Collective
1266: Input Parameter:
1267: . tao - the Tao context
1269: Output Parameter:
1270: . Y - the current solution
1272: Level: intermediate
1274: .seealso: TAOADMM
1276: @*/
1277: PetscErrorCode TaoADMMGetDualVector(Tao tao, Vec *Y)
1278: {
1279: TAO_ADMM *am = (TAO_ADMM*)tao->data;
1282: *Y = am->y;
1283: return 0;
1284: }
1286: /*@
1287: TaoADMMSetRegularizerType - Set regularizer type for ADMM routine
1289: Not Collective
1291: Input Parameters:
1292: + tao - the Tao context
1293: - type - regularizer type
1295: Options Database:
1296: . -tao_admm_regularizer_type <admm_regularizer_user,admm_regularizer_soft_thresh> - select the regularizer
1298: Level: intermediate
1300: .seealso: TaoADMMGetRegularizerType(), TaoADMMRegularizerType, TAOADMM
1301: @*/
1302: PetscErrorCode TaoADMMSetRegularizerType(Tao tao, TaoADMMRegularizerType type)
1303: {
1306: PetscTryMethod(tao,"TaoADMMSetRegularizerType_C",(Tao,TaoADMMRegularizerType),(tao,type));
1307: return 0;
1308: }
1310: /*@
1311: TaoADMMGetRegularizerType - Gets the type of regularizer routine for ADMM
1313: Not Collective
1315: Input Parameter:
1316: . tao - the Tao context
1318: Output Parameter:
1319: . type - the type of regularizer
1321: Level: intermediate
1323: .seealso: TaoADMMSetRegularizerType(), TaoADMMRegularizerType, TAOADMM
1324: @*/
1325: PetscErrorCode TaoADMMGetRegularizerType(Tao tao, TaoADMMRegularizerType *type)
1326: {
1328: PetscUseMethod(tao,"TaoADMMGetRegularizerType_C",(Tao,TaoADMMRegularizerType*),(tao,type));
1329: return 0;
1330: }
1332: /*@
1333: TaoADMMSetUpdateType - Set update routine for ADMM routine
1335: Not Collective
1337: Input Parameters:
1338: + tao - the Tao context
1339: - type - spectral parameter update type
1341: Level: intermediate
1343: .seealso: TaoADMMGetUpdateType(), TaoADMMUpdateType, TAOADMM
1344: @*/
1345: PetscErrorCode TaoADMMSetUpdateType(Tao tao, TaoADMMUpdateType type)
1346: {
1349: PetscTryMethod(tao,"TaoADMMSetUpdateType_C",(Tao,TaoADMMUpdateType),(tao,type));
1350: return 0;
1351: }
1353: /*@
1354: TaoADMMGetUpdateType - Gets the type of spectral penalty update routine for ADMM
1356: Not Collective
1358: Input Parameter:
1359: . tao - the Tao context
1361: Output Parameter:
1362: . type - the type of spectral penalty update routine
1364: Level: intermediate
1366: .seealso: TaoADMMSetUpdateType(), TaoADMMUpdateType, TAOADMM
1367: @*/
1368: PetscErrorCode TaoADMMGetUpdateType(Tao tao, TaoADMMUpdateType *type)
1369: {
1371: PetscUseMethod(tao,"TaoADMMGetUpdateType_C",(Tao,TaoADMMUpdateType*),(tao,type));
1372: return 0;
1373: }