Actual source code: admm.c
1: #include <../src/tao/constrained/impls/admm/admm.h>
2: #include <petsctao.h>
3: #include <petsc/private/petscimpl.h>
5: /* Updates terminating criteria
6: *
7: * 1 ||r_k|| = ||Ax+Bz-c|| =< catol_admm* max{||Ax||,||Bz||,||c||}
8: *
9: * 2. Updates dual residual, d_k
10: *
11: * 3. ||d_k|| = ||mu*A^T*B(z_k-z_{k-1})|| =< gatol_admm * ||A^Ty|| */
13: static PetscBool cited = PETSC_FALSE;
14: static const char citation[] = "@misc{xu2017adaptive,\n"
15: " title={Adaptive Relaxed ADMM: Convergence Theory and Practical Implementation},\n"
16: " author={Zheng Xu and Mario A. T. Figueiredo and Xiaoming Yuan and Christoph Studer and Tom Goldstein},\n"
17: " year={2017},\n"
18: " eprint={1704.02712},\n"
19: " archivePrefix={arXiv},\n"
20: " primaryClass={cs.CV}\n"
21: "} \n";
23: const char *const TaoADMMRegularizerTypes[] = {"REGULARIZER_USER", "REGULARIZER_SOFT_THRESH", "TaoADMMRegularizerType", "TAO_ADMM_", NULL};
24: const char *const TaoADMMUpdateTypes[] = {"UPDATE_BASIC", "UPDATE_ADAPTIVE", "UPDATE_ADAPTIVE_RELAXED", "TaoADMMUpdateType", "TAO_ADMM_", NULL};
25: const char *const TaoALMMTypes[] = {"CLASSIC", "PHR", "TaoALMMType", "TAO_ALMM_", NULL};
27: static PetscErrorCode TaoADMMToleranceUpdate(Tao tao)
28: {
29: TAO_ADMM *am = (TAO_ADMM *)tao->data;
30: PetscReal Axnorm, Bznorm, ATynorm, temp;
31: Vec tempJR, tempL;
32: Tao mis;
34: PetscFunctionBegin;
35: mis = am->subsolverX;
36: tempJR = am->workJacobianRight;
37: tempL = am->workLeft;
38: /* ATy */
39: PetscCall(TaoComputeJacobianEquality(mis, am->y, mis->jacobian_equality, mis->jacobian_equality_pre));
40: PetscCall(MatMultTranspose(mis->jacobian_equality, am->y, tempJR));
41: PetscCall(VecNorm(tempJR, NORM_2, &ATynorm));
42: /* dualres = mu * ||AT(Bz-Bzold)||_2 */
43: PetscCall(VecWAXPY(tempJR, -1., am->Bzold, am->Bz));
44: PetscCall(MatMultTranspose(mis->jacobian_equality, tempJR, tempL));
45: PetscCall(VecNorm(tempL, NORM_2, &am->dualres));
46: am->dualres *= am->mu;
48: /* ||Ax||_2, ||Bz||_2 */
49: PetscCall(VecNorm(am->Ax, NORM_2, &Axnorm));
50: PetscCall(VecNorm(am->Bz, NORM_2, &Bznorm));
52: /* Set catol to be catol_admm * max{||Ax||,||Bz||,||c||} *
53: * Set gatol to be gatol_admm * ||A^Ty|| *
54: * while cnorm is ||r_k||_2, and gnorm is ||d_k||_2 */
55: temp = am->catol_admm * PetscMax(Axnorm, (!am->const_norm) ? Bznorm : PetscMax(Bznorm, am->const_norm));
56: PetscCall(TaoSetConstraintTolerances(tao, temp, PETSC_DEFAULT));
57: PetscCall(TaoSetTolerances(tao, am->gatol_admm * ATynorm, PETSC_DEFAULT, PETSC_DEFAULT));
58: PetscFunctionReturn(PETSC_SUCCESS);
59: }
61: /* Penaly Update for Adaptive ADMM. */
62: static PetscErrorCode AdaptiveADMMPenaltyUpdate(Tao tao)
63: {
64: TAO_ADMM *am = (TAO_ADMM *)tao->data;
65: PetscReal ydiff_norm, yhatdiff_norm, Axdiff_norm, Bzdiff_norm, Axyhat, Bzy, a_sd, a_mg, a_k, b_sd, b_mg, b_k;
66: PetscBool hflag, gflag;
67: Vec tempJR, tempJR2;
69: PetscFunctionBegin;
70: tempJR = am->workJacobianRight;
71: tempJR2 = am->workJacobianRight2;
72: hflag = PETSC_FALSE;
73: gflag = PETSC_FALSE;
74: a_k = -1;
75: b_k = -1;
77: PetscCall(VecWAXPY(tempJR, -1., am->Axold, am->Ax));
78: PetscCall(VecWAXPY(tempJR2, -1., am->yhatold, am->yhat));
79: PetscCall(VecNorm(tempJR, NORM_2, &Axdiff_norm));
80: PetscCall(VecNorm(tempJR2, NORM_2, &yhatdiff_norm));
81: PetscCall(VecDot(tempJR, tempJR2, &Axyhat));
83: PetscCall(VecWAXPY(tempJR, -1., am->Bz0, am->Bz));
84: PetscCall(VecWAXPY(tempJR2, -1., am->y, am->y0));
85: PetscCall(VecNorm(tempJR, NORM_2, &Bzdiff_norm));
86: PetscCall(VecNorm(tempJR2, NORM_2, &ydiff_norm));
87: PetscCall(VecDot(tempJR, tempJR2, &Bzy));
89: if (Axyhat > am->orthval * Axdiff_norm * yhatdiff_norm + am->mueps) {
90: hflag = PETSC_TRUE;
91: a_sd = PetscSqr(yhatdiff_norm) / Axyhat; /* alphaSD */
92: a_mg = Axyhat / PetscSqr(Axdiff_norm); /* alphaMG */
93: a_k = (a_mg / a_sd) > 0.5 ? a_mg : a_sd - 0.5 * a_mg;
94: }
95: if (Bzy > am->orthval * Bzdiff_norm * ydiff_norm + am->mueps) {
96: gflag = PETSC_TRUE;
97: b_sd = PetscSqr(ydiff_norm) / Bzy; /* betaSD */
98: b_mg = Bzy / PetscSqr(Bzdiff_norm); /* betaMG */
99: b_k = (b_mg / b_sd) > 0.5 ? b_mg : b_sd - 0.5 * b_mg;
100: }
101: am->muold = am->mu;
102: if (gflag && hflag) {
103: am->mu = PetscSqrtReal(a_k * b_k);
104: } else if (hflag) {
105: am->mu = a_k;
106: } else if (gflag) {
107: am->mu = b_k;
108: }
109: if (am->mu > am->muold) am->mu = am->muold;
110: if (am->mu < am->mumin) am->mu = am->mumin;
111: PetscFunctionReturn(PETSC_SUCCESS);
112: }
114: static PetscErrorCode TaoADMMSetRegularizerType_ADMM(Tao tao, TaoADMMRegularizerType type)
115: {
116: TAO_ADMM *am = (TAO_ADMM *)tao->data;
118: PetscFunctionBegin;
119: am->regswitch = type;
120: PetscFunctionReturn(PETSC_SUCCESS);
121: }
123: static PetscErrorCode TaoADMMGetRegularizerType_ADMM(Tao tao, TaoADMMRegularizerType *type)
124: {
125: TAO_ADMM *am = (TAO_ADMM *)tao->data;
127: PetscFunctionBegin;
128: *type = am->regswitch;
129: PetscFunctionReturn(PETSC_SUCCESS);
130: }
132: static PetscErrorCode TaoADMMSetUpdateType_ADMM(Tao tao, TaoADMMUpdateType type)
133: {
134: TAO_ADMM *am = (TAO_ADMM *)tao->data;
136: PetscFunctionBegin;
137: am->update = type;
138: PetscFunctionReturn(PETSC_SUCCESS);
139: }
141: static PetscErrorCode TaoADMMGetUpdateType_ADMM(Tao tao, TaoADMMUpdateType *type)
142: {
143: TAO_ADMM *am = (TAO_ADMM *)tao->data;
145: PetscFunctionBegin;
146: *type = am->update;
147: PetscFunctionReturn(PETSC_SUCCESS);
148: }
150: /* This routine updates Jacobians with new x,z vectors,
151: * and then updates Ax and Bz vectors, then computes updated residual vector*/
152: static PetscErrorCode ADMMUpdateConstraintResidualVector(Tao tao, Vec x, Vec z, Vec Ax, Vec Bz, Vec residual)
153: {
154: TAO_ADMM *am = (TAO_ADMM *)tao->data;
155: Tao mis, reg;
157: PetscFunctionBegin;
158: mis = am->subsolverX;
159: reg = am->subsolverZ;
160: PetscCall(TaoComputeJacobianEquality(mis, x, mis->jacobian_equality, mis->jacobian_equality_pre));
161: PetscCall(MatMult(mis->jacobian_equality, x, Ax));
162: PetscCall(TaoComputeJacobianEquality(reg, z, reg->jacobian_equality, reg->jacobian_equality_pre));
163: PetscCall(MatMult(reg->jacobian_equality, z, Bz));
165: PetscCall(VecWAXPY(residual, 1., Bz, Ax));
166: if (am->constraint != NULL) PetscCall(VecAXPY(residual, -1., am->constraint));
167: PetscFunctionReturn(PETSC_SUCCESS);
168: }
170: /* Updates Augmented Lagrangians to given routines *
171: * For subsolverX, routine needs to be ComputeObjectiveAndGraidnet
172: * Separate Objective and Gradient routines are not supported. */
173: static PetscErrorCode SubObjGradUpdate(Tao tao, Vec x, PetscReal *f, Vec g, void *ptr)
174: {
175: Tao parent = (Tao)ptr;
176: TAO_ADMM *am = (TAO_ADMM *)parent->data;
177: PetscReal temp, temp2;
178: Vec tempJR;
180: PetscFunctionBegin;
181: tempJR = am->workJacobianRight;
182: PetscCall(ADMMUpdateConstraintResidualVector(parent, x, am->subsolverZ->solution, am->Ax, am->Bz, am->residual));
183: PetscCall((*am->ops->misfitobjgrad)(am->subsolverX, x, f, g, am->misfitobjgradP));
185: am->last_misfit_val = *f;
186: /* Objective Add + yT(Ax+Bz-c) + mu/2*||Ax+Bz-c||_2^2 */
187: PetscCall(VecTDot(am->residual, am->y, &temp));
188: PetscCall(VecTDot(am->residual, am->residual, &temp2));
189: *f += temp + (am->mu / 2) * temp2;
191: /* Gradient. Add + mu*AT(Ax+Bz-c) + yTA*/
192: PetscCall(MatMultTranspose(tao->jacobian_equality, am->residual, tempJR));
193: PetscCall(VecAXPY(g, am->mu, tempJR));
194: PetscCall(MatMultTranspose(tao->jacobian_equality, am->y, tempJR));
195: PetscCall(VecAXPY(g, 1., tempJR));
196: PetscFunctionReturn(PETSC_SUCCESS);
197: }
199: /* Updates Augmented Lagrangians to given routines
200: * For subsolverZ, routine needs to be ComputeObjectiveAndGraidnet
201: * Separate Objective and Gradient routines are not supported. */
202: static PetscErrorCode RegObjGradUpdate(Tao tao, Vec z, PetscReal *f, Vec g, void *ptr)
203: {
204: Tao parent = (Tao)ptr;
205: TAO_ADMM *am = (TAO_ADMM *)parent->data;
206: PetscReal temp, temp2;
207: Vec tempJR;
209: PetscFunctionBegin;
210: tempJR = am->workJacobianRight;
211: PetscCall(ADMMUpdateConstraintResidualVector(parent, am->subsolverX->solution, z, am->Ax, am->Bz, am->residual));
212: PetscCall((*am->ops->regobjgrad)(am->subsolverZ, z, f, g, am->regobjgradP));
213: am->last_reg_val = *f;
214: /* Objective Add + yT(Ax+Bz-c) + mu/2*||Ax+Bz-c||_2^2 */
215: PetscCall(VecTDot(am->residual, am->y, &temp));
216: PetscCall(VecTDot(am->residual, am->residual, &temp2));
217: *f += temp + (am->mu / 2) * temp2;
219: /* Gradient. Add + mu*BT(Ax+Bz-c) + yTB*/
220: PetscCall(MatMultTranspose(am->subsolverZ->jacobian_equality, am->residual, tempJR));
221: PetscCall(VecAXPY(g, am->mu, tempJR));
222: PetscCall(MatMultTranspose(am->subsolverZ->jacobian_equality, am->y, tempJR));
223: PetscCall(VecAXPY(g, 1., tempJR));
224: PetscFunctionReturn(PETSC_SUCCESS);
225: }
227: /* Computes epsilon padded L1 norm lambda*sum(sqrt(x^2+eps^2)-eps */
228: static PetscErrorCode ADMML1EpsilonNorm(Tao tao, Vec x, PetscReal eps, PetscReal *norm)
229: {
230: TAO_ADMM *am = (TAO_ADMM *)tao->data;
231: PetscInt N;
233: PetscFunctionBegin;
234: PetscCall(VecGetSize(am->workLeft, &N));
235: PetscCall(VecPointwiseMult(am->workLeft, x, x));
236: PetscCall(VecShift(am->workLeft, am->l1epsilon * am->l1epsilon));
237: PetscCall(VecSqrtAbs(am->workLeft));
238: PetscCall(VecSum(am->workLeft, norm));
239: *norm += N * am->l1epsilon;
240: *norm *= am->lambda;
241: PetscFunctionReturn(PETSC_SUCCESS);
242: }
244: static PetscErrorCode ADMMInternalHessianUpdate(Mat H, Mat Constraint, PetscBool Identity, void *ptr)
245: {
246: TAO_ADMM *am = (TAO_ADMM *)ptr;
248: PetscFunctionBegin;
249: switch (am->update) {
250: case (TAO_ADMM_UPDATE_BASIC):
251: break;
252: case (TAO_ADMM_UPDATE_ADAPTIVE):
253: case (TAO_ADMM_UPDATE_ADAPTIVE_RELAXED):
254: if (H && (am->muold != am->mu)) {
255: if (!Identity) {
256: PetscCall(MatAXPY(H, am->mu - am->muold, Constraint, DIFFERENT_NONZERO_PATTERN));
257: } else {
258: PetscCall(MatShift(H, am->mu - am->muold));
259: }
260: }
261: break;
262: }
263: PetscFunctionReturn(PETSC_SUCCESS);
264: }
266: /* Updates Hessian - adds second derivative of augmented Lagrangian
267: * H \gets H + \rho*ATA
268: Here, \rho does not change in TAO_ADMM_UPDATE_BASIC - thus no-op
269: For ADAPTAIVE,ADAPTIVE_RELAXED,
270: H \gets H + (\rho-\rhoold)*ATA
271: Here, we assume that A is linear constraint i.e., does not change.
272: Thus, for both ADAPTIVE, and RELAXED, ATA matrix is pre-set (except for A=I (null case)) see TaoSetUp_ADMM */
273: static PetscErrorCode SubHessianUpdate(Tao tao, Vec x, Mat H, Mat Hpre, void *ptr)
274: {
275: Tao parent = (Tao)ptr;
276: TAO_ADMM *am = (TAO_ADMM *)parent->data;
278: PetscFunctionBegin;
279: if (am->Hxchange) {
280: /* Case where Hessian gets updated with respect to x vector input. */
281: PetscCall((*am->ops->misfithess)(am->subsolverX, x, H, Hpre, am->misfithessP));
282: PetscCall(ADMMInternalHessianUpdate(am->subsolverX->hessian, am->ATA, am->xJI, am));
283: } else if (am->Hxbool) {
284: /* Hessian doesn't get updated. H(x) = c */
285: /* Update Lagrangian only only per TAO call */
286: PetscCall(ADMMInternalHessianUpdate(am->subsolverX->hessian, am->ATA, am->xJI, am));
287: am->Hxbool = PETSC_FALSE;
288: }
289: PetscFunctionReturn(PETSC_SUCCESS);
290: }
292: /* Same as SubHessianUpdate, except for B matrix instead of A matrix */
293: static PetscErrorCode RegHessianUpdate(Tao tao, Vec z, Mat H, Mat Hpre, void *ptr)
294: {
295: Tao parent = (Tao)ptr;
296: TAO_ADMM *am = (TAO_ADMM *)parent->data;
298: PetscFunctionBegin;
300: if (am->Hzchange) {
301: /* Case where Hessian gets updated with respect to x vector input. */
302: PetscCall((*am->ops->reghess)(am->subsolverZ, z, H, Hpre, am->reghessP));
303: PetscCall(ADMMInternalHessianUpdate(am->subsolverZ->hessian, am->BTB, am->zJI, am));
304: } else if (am->Hzbool) {
305: /* Hessian doesn't get updated. H(x) = c */
306: /* Update Lagrangian only only per TAO call */
307: PetscCall(ADMMInternalHessianUpdate(am->subsolverZ->hessian, am->BTB, am->zJI, am));
308: am->Hzbool = PETSC_FALSE;
309: }
310: PetscFunctionReturn(PETSC_SUCCESS);
311: }
313: /* Shell Matrix routine for A matrix.
314: * This gets used when user puts NULL for
315: * TaoSetJacobianEqualityRoutine(tao, NULL,NULL, ...)
316: * Essentially sets A=I*/
317: static PetscErrorCode JacobianIdentity(Mat mat, Vec in, Vec out)
318: {
319: PetscFunctionBegin;
320: PetscCall(VecCopy(in, out));
321: PetscFunctionReturn(PETSC_SUCCESS);
322: }
324: /* Shell Matrix routine for B matrix.
325: * This gets used when user puts NULL for
326: * TaoADMMSetRegularizerConstraintJacobian(tao, NULL,NULL, ...)
327: * Sets B=-I */
328: static PetscErrorCode JacobianIdentityB(Mat mat, Vec in, Vec out)
329: {
330: PetscFunctionBegin;
331: PetscCall(VecCopy(in, out));
332: PetscCall(VecScale(out, -1.));
333: PetscFunctionReturn(PETSC_SUCCESS);
334: }
336: /* Solve f(x) + g(z) s.t. Ax + Bz = c */
337: static PetscErrorCode TaoSolve_ADMM(Tao tao)
338: {
339: TAO_ADMM *am = (TAO_ADMM *)tao->data;
340: PetscInt N;
341: PetscReal reg_func;
342: PetscBool is_reg_shell;
343: Vec tempL;
345: PetscFunctionBegin;
346: if (am->regswitch != TAO_ADMM_REGULARIZER_SOFT_THRESH) {
347: PetscCheck(am->subsolverX->ops->computejacobianequality, PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "Must call TaoADMMSetMisfitConstraintJacobian() first");
348: PetscCheck(am->subsolverZ->ops->computejacobianequality, PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "Must call TaoADMMSetRegularizerConstraintJacobian() first");
349: if (am->constraint != NULL) PetscCall(VecNorm(am->constraint, NORM_2, &am->const_norm));
350: }
351: tempL = am->workLeft;
352: PetscCall(VecGetSize(tempL, &N));
354: if (am->Hx && am->ops->misfithess) PetscCall(TaoSetHessian(am->subsolverX, am->Hx, am->Hx, SubHessianUpdate, tao));
356: if (!am->zJI) {
357: /* Currently, B is assumed to be a linear system, i.e., not getting updated*/
358: PetscCall(MatTransposeMatMult(am->JB, am->JB, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &(am->BTB)));
359: }
360: if (!am->xJI) {
361: /* Currently, A is assumed to be a linear system, i.e., not getting updated*/
362: PetscCall(MatTransposeMatMult(am->subsolverX->jacobian_equality, am->subsolverX->jacobian_equality, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &(am->ATA)));
363: }
365: is_reg_shell = PETSC_FALSE;
367: PetscCall(PetscObjectTypeCompare((PetscObject)am->subsolverZ, TAOSHELL, &is_reg_shell));
369: if (!is_reg_shell) {
370: switch (am->regswitch) {
371: case (TAO_ADMM_REGULARIZER_USER):
372: break;
373: case (TAO_ADMM_REGULARIZER_SOFT_THRESH):
374: /* Soft Threshold. */
375: break;
376: }
377: if (am->ops->regobjgrad) PetscCall(TaoSetObjectiveAndGradient(am->subsolverZ, NULL, RegObjGradUpdate, tao));
378: if (am->Hz && am->ops->reghess) PetscCall(TaoSetHessian(am->subsolverZ, am->Hz, am->Hzpre, RegHessianUpdate, tao));
379: }
381: switch (am->update) {
382: case TAO_ADMM_UPDATE_BASIC:
383: if (am->subsolverX->hessian) {
384: /* In basic case, Hessian does not get updated w.r.t. to spectral penalty
385: * Here, when A is set, i.e., am->xJI, add mu*ATA to Hessian*/
386: if (!am->xJI) {
387: PetscCall(MatAXPY(am->subsolverX->hessian, am->mu, am->ATA, DIFFERENT_NONZERO_PATTERN));
388: } else {
389: PetscCall(MatShift(am->subsolverX->hessian, am->mu));
390: }
391: }
392: if (am->subsolverZ->hessian && am->regswitch == TAO_ADMM_REGULARIZER_USER) {
393: if (am->regswitch == TAO_ADMM_REGULARIZER_USER && !am->zJI) {
394: PetscCall(MatAXPY(am->subsolverZ->hessian, am->mu, am->BTB, DIFFERENT_NONZERO_PATTERN));
395: } else {
396: PetscCall(MatShift(am->subsolverZ->hessian, am->mu));
397: }
398: }
399: break;
400: case TAO_ADMM_UPDATE_ADAPTIVE:
401: case TAO_ADMM_UPDATE_ADAPTIVE_RELAXED:
402: break;
403: }
405: PetscCall(PetscCitationsRegister(citation, &cited));
406: tao->reason = TAO_CONTINUE_ITERATING;
408: while (tao->reason == TAO_CONTINUE_ITERATING) {
409: PetscTryTypeMethod(tao, update, tao->niter, tao->user_update);
410: PetscCall(VecCopy(am->Bz, am->Bzold));
412: /* x update */
413: PetscCall(TaoSolve(am->subsolverX));
414: PetscCall(TaoComputeJacobianEquality(am->subsolverX, am->subsolverX->solution, am->subsolverX->jacobian_equality, am->subsolverX->jacobian_equality_pre));
415: PetscCall(MatMult(am->subsolverX->jacobian_equality, am->subsolverX->solution, am->Ax));
417: am->Hxbool = PETSC_TRUE;
419: /* z update */
420: switch (am->regswitch) {
421: case TAO_ADMM_REGULARIZER_USER:
422: PetscCall(TaoSolve(am->subsolverZ));
423: break;
424: case TAO_ADMM_REGULARIZER_SOFT_THRESH:
425: /* L1 assumes A,B jacobians are identity nxn matrix */
426: PetscCall(VecWAXPY(am->workJacobianRight, 1 / am->mu, am->y, am->Ax));
427: PetscCall(TaoSoftThreshold(am->workJacobianRight, -am->lambda / am->mu, am->lambda / am->mu, am->subsolverZ->solution));
428: break;
429: }
430: am->Hzbool = PETSC_TRUE;
431: /* Returns Ax + Bz - c with updated Ax,Bz vectors */
432: PetscCall(ADMMUpdateConstraintResidualVector(tao, am->subsolverX->solution, am->subsolverZ->solution, am->Ax, am->Bz, am->residual));
433: /* Dual variable, y += y + mu*(Ax+Bz-c) */
434: PetscCall(VecWAXPY(am->y, am->mu, am->residual, am->yold));
436: /* stopping tolerance update */
437: PetscCall(TaoADMMToleranceUpdate(tao));
439: /* Updating Spectral Penalty */
440: switch (am->update) {
441: case TAO_ADMM_UPDATE_BASIC:
442: am->muold = am->mu;
443: break;
444: case TAO_ADMM_UPDATE_ADAPTIVE:
445: case TAO_ADMM_UPDATE_ADAPTIVE_RELAXED:
446: if (tao->niter == 0) {
447: PetscCall(VecCopy(am->y, am->y0));
448: PetscCall(VecWAXPY(am->residual, 1., am->Ax, am->Bzold));
449: if (am->constraint) PetscCall(VecAXPY(am->residual, -1., am->constraint));
450: PetscCall(VecWAXPY(am->yhatold, -am->mu, am->residual, am->yold));
451: PetscCall(VecCopy(am->Ax, am->Axold));
452: PetscCall(VecCopy(am->Bz, am->Bz0));
453: am->muold = am->mu;
454: } else if (tao->niter % am->T == 1) {
455: /* we have compute Bzold in a previous iteration, and we computed Ax above */
456: PetscCall(VecWAXPY(am->residual, 1., am->Ax, am->Bzold));
457: if (am->constraint) PetscCall(VecAXPY(am->residual, -1., am->constraint));
458: PetscCall(VecWAXPY(am->yhat, -am->mu, am->residual, am->yold));
459: PetscCall(AdaptiveADMMPenaltyUpdate(tao));
460: PetscCall(VecCopy(am->Ax, am->Axold));
461: PetscCall(VecCopy(am->Bz, am->Bz0));
462: PetscCall(VecCopy(am->yhat, am->yhatold));
463: PetscCall(VecCopy(am->y, am->y0));
464: } else {
465: am->muold = am->mu;
466: }
467: break;
468: default:
469: break;
470: }
471: tao->niter++;
473: /* Calculate original function values. misfit part was done in TaoADMMToleranceUpdate*/
474: switch (am->regswitch) {
475: case TAO_ADMM_REGULARIZER_USER:
476: if (is_reg_shell) {
477: PetscCall(ADMML1EpsilonNorm(tao, am->subsolverZ->solution, am->l1epsilon, ®_func));
478: } else {
479: PetscCall((*am->ops->regobjgrad)(am->subsolverZ, am->subsolverX->solution, ®_func, tempL, am->regobjgradP));
480: }
481: break;
482: case TAO_ADMM_REGULARIZER_SOFT_THRESH:
483: PetscCall(ADMML1EpsilonNorm(tao, am->subsolverZ->solution, am->l1epsilon, ®_func));
484: break;
485: }
486: PetscCall(VecCopy(am->y, am->yold));
487: PetscCall(ADMMUpdateConstraintResidualVector(tao, am->subsolverX->solution, am->subsolverZ->solution, am->Ax, am->Bz, am->residual));
488: PetscCall(VecNorm(am->residual, NORM_2, &am->resnorm));
489: PetscCall(TaoLogConvergenceHistory(tao, am->last_misfit_val + reg_func, am->dualres, am->resnorm, tao->ksp_its));
491: PetscCall(TaoMonitor(tao, tao->niter, am->last_misfit_val + reg_func, am->dualres, am->resnorm, 1.0));
492: PetscUseTypeMethod(tao, convergencetest, tao->cnvP);
493: }
494: /* Update vectors */
495: PetscCall(VecCopy(am->subsolverX->solution, tao->solution));
496: PetscCall(VecCopy(am->subsolverX->gradient, tao->gradient));
497: PetscCall(PetscObjectCompose((PetscObject)am->subsolverX, "TaoGetADMMParentTao_ADMM", NULL));
498: PetscCall(PetscObjectCompose((PetscObject)am->subsolverZ, "TaoGetADMMParentTao_ADMM", NULL));
499: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetRegularizerType_C", NULL));
500: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetRegularizerType_C", NULL));
501: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetUpdateType_C", NULL));
502: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetUpdateType_C", NULL));
503: PetscFunctionReturn(PETSC_SUCCESS);
504: }
506: static PetscErrorCode TaoSetFromOptions_ADMM(Tao tao, PetscOptionItems *PetscOptionsObject)
507: {
508: TAO_ADMM *am = (TAO_ADMM *)tao->data;
510: PetscFunctionBegin;
511: PetscOptionsHeadBegin(PetscOptionsObject, "ADMM problem that solves f(x) in a form of f(x) + g(z) subject to x - z = 0. Norm 1 and 2 are supported. Different subsolver routines can be selected. ");
512: PetscCall(PetscOptionsReal("-tao_admm_regularizer_coefficient", "regularizer constant", "", am->lambda, &am->lambda, NULL));
513: PetscCall(PetscOptionsReal("-tao_admm_spectral_penalty", "Constant for Augmented Lagrangian term.", "", am->mu, &am->mu, NULL));
514: PetscCall(PetscOptionsReal("-tao_admm_relaxation_parameter", "x relaxation parameter for Z update.", "", am->gamma, &am->gamma, NULL));
515: PetscCall(PetscOptionsReal("-tao_admm_tolerance_update_factor", "ADMM dynamic tolerance update factor.", "", am->tol, &am->tol, NULL));
516: PetscCall(PetscOptionsReal("-tao_admm_spectral_penalty_update_factor", "ADMM spectral penalty update curvature safeguard value.", "", am->orthval, &am->orthval, NULL));
517: PetscCall(PetscOptionsReal("-tao_admm_minimum_spectral_penalty", "Set ADMM minimum spectral penalty.", "", am->mumin, &am->mumin, NULL));
518: PetscCall(PetscOptionsEnum("-tao_admm_dual_update", "Lagrangian dual update policy", "TaoADMMUpdateType", TaoADMMUpdateTypes, (PetscEnum)am->update, (PetscEnum *)&am->update, NULL));
519: PetscCall(PetscOptionsEnum("-tao_admm_regularizer_type", "ADMM regularizer update rule", "TaoADMMRegularizerType", TaoADMMRegularizerTypes, (PetscEnum)am->regswitch, (PetscEnum *)&am->regswitch, NULL));
520: PetscOptionsHeadEnd();
521: PetscCall(TaoSetFromOptions(am->subsolverX));
522: if (am->regswitch != TAO_ADMM_REGULARIZER_SOFT_THRESH) PetscCall(TaoSetFromOptions(am->subsolverZ));
523: PetscFunctionReturn(PETSC_SUCCESS);
524: }
526: static PetscErrorCode TaoView_ADMM(Tao tao, PetscViewer viewer)
527: {
528: TAO_ADMM *am = (TAO_ADMM *)tao->data;
530: PetscFunctionBegin;
531: PetscCall(PetscViewerASCIIPushTab(viewer));
532: PetscCall(TaoView(am->subsolverX, viewer));
533: PetscCall(TaoView(am->subsolverZ, viewer));
534: PetscCall(PetscViewerASCIIPopTab(viewer));
535: PetscFunctionReturn(PETSC_SUCCESS);
536: }
538: static PetscErrorCode TaoSetUp_ADMM(Tao tao)
539: {
540: TAO_ADMM *am = (TAO_ADMM *)tao->data;
541: PetscInt n, N, M;
543: PetscFunctionBegin;
544: PetscCall(VecGetLocalSize(tao->solution, &n));
545: PetscCall(VecGetSize(tao->solution, &N));
546: /* If Jacobian is given as NULL, it means Jacobian is identity matrix with size of solution vector */
547: if (!am->JB) {
548: am->zJI = PETSC_TRUE;
549: PetscCall(MatCreateShell(PetscObjectComm((PetscObject)tao), n, n, PETSC_DETERMINE, PETSC_DETERMINE, NULL, &am->JB));
550: PetscCall(MatShellSetOperation(am->JB, MATOP_MULT, (void (*)(void))JacobianIdentityB));
551: PetscCall(MatShellSetOperation(am->JB, MATOP_MULT_TRANSPOSE, (void (*)(void))JacobianIdentityB));
552: am->JBpre = am->JB;
553: }
554: if (!am->JA) {
555: am->xJI = PETSC_TRUE;
556: PetscCall(MatCreateShell(PetscObjectComm((PetscObject)tao), n, n, PETSC_DETERMINE, PETSC_DETERMINE, NULL, &am->JA));
557: PetscCall(MatShellSetOperation(am->JA, MATOP_MULT, (void (*)(void))JacobianIdentity));
558: PetscCall(MatShellSetOperation(am->JA, MATOP_MULT_TRANSPOSE, (void (*)(void))JacobianIdentity));
559: am->JApre = am->JA;
560: }
561: PetscCall(MatCreateVecs(am->JA, NULL, &am->Ax));
562: if (!tao->gradient) PetscCall(VecDuplicate(tao->solution, &tao->gradient));
563: PetscCall(TaoSetSolution(am->subsolverX, tao->solution));
564: if (!am->z) {
565: PetscCall(VecDuplicate(tao->solution, &am->z));
566: PetscCall(VecSet(am->z, 0.0));
567: }
568: PetscCall(TaoSetSolution(am->subsolverZ, am->z));
569: if (!am->workLeft) PetscCall(VecDuplicate(tao->solution, &am->workLeft));
570: if (!am->Axold) PetscCall(VecDuplicate(am->Ax, &am->Axold));
571: if (!am->workJacobianRight) PetscCall(VecDuplicate(am->Ax, &am->workJacobianRight));
572: if (!am->workJacobianRight2) PetscCall(VecDuplicate(am->Ax, &am->workJacobianRight2));
573: if (!am->Bz) PetscCall(VecDuplicate(am->Ax, &am->Bz));
574: if (!am->Bzold) PetscCall(VecDuplicate(am->Ax, &am->Bzold));
575: if (!am->Bz0) PetscCall(VecDuplicate(am->Ax, &am->Bz0));
576: if (!am->y) {
577: PetscCall(VecDuplicate(am->Ax, &am->y));
578: PetscCall(VecSet(am->y, 0.0));
579: }
580: if (!am->yold) {
581: PetscCall(VecDuplicate(am->Ax, &am->yold));
582: PetscCall(VecSet(am->yold, 0.0));
583: }
584: if (!am->y0) {
585: PetscCall(VecDuplicate(am->Ax, &am->y0));
586: PetscCall(VecSet(am->y0, 0.0));
587: }
588: if (!am->yhat) {
589: PetscCall(VecDuplicate(am->Ax, &am->yhat));
590: PetscCall(VecSet(am->yhat, 0.0));
591: }
592: if (!am->yhatold) {
593: PetscCall(VecDuplicate(am->Ax, &am->yhatold));
594: PetscCall(VecSet(am->yhatold, 0.0));
595: }
596: if (!am->residual) {
597: PetscCall(VecDuplicate(am->Ax, &am->residual));
598: PetscCall(VecSet(am->residual, 0.0));
599: }
600: if (!am->constraint) {
601: am->constraint = NULL;
602: } else {
603: PetscCall(VecGetSize(am->constraint, &M));
604: PetscCheck(M == N, PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "Solution vector and constraint vector must be of same size!");
605: }
607: /* Save changed tao tolerance for adaptive tolerance */
608: if (tao->gatol_changed) am->gatol_admm = tao->gatol;
609: if (tao->catol_changed) am->catol_admm = tao->catol;
611: /*Update spectral and dual elements to X subsolver */
612: PetscCall(TaoSetObjectiveAndGradient(am->subsolverX, NULL, SubObjGradUpdate, tao));
613: PetscCall(TaoSetJacobianEqualityRoutine(am->subsolverX, am->JA, am->JApre, am->ops->misfitjac, am->misfitjacobianP));
614: PetscCall(TaoSetJacobianEqualityRoutine(am->subsolverZ, am->JB, am->JBpre, am->ops->regjac, am->regjacobianP));
615: PetscFunctionReturn(PETSC_SUCCESS);
616: }
618: static PetscErrorCode TaoDestroy_ADMM(Tao tao)
619: {
620: TAO_ADMM *am = (TAO_ADMM *)tao->data;
622: PetscFunctionBegin;
623: PetscCall(VecDestroy(&am->z));
624: PetscCall(VecDestroy(&am->Ax));
625: PetscCall(VecDestroy(&am->Axold));
626: PetscCall(VecDestroy(&am->Bz));
627: PetscCall(VecDestroy(&am->Bzold));
628: PetscCall(VecDestroy(&am->Bz0));
629: PetscCall(VecDestroy(&am->residual));
630: PetscCall(VecDestroy(&am->y));
631: PetscCall(VecDestroy(&am->yold));
632: PetscCall(VecDestroy(&am->y0));
633: PetscCall(VecDestroy(&am->yhat));
634: PetscCall(VecDestroy(&am->yhatold));
635: PetscCall(VecDestroy(&am->workLeft));
636: PetscCall(VecDestroy(&am->workJacobianRight));
637: PetscCall(VecDestroy(&am->workJacobianRight2));
639: PetscCall(MatDestroy(&am->JA));
640: PetscCall(MatDestroy(&am->JB));
641: if (!am->xJI) PetscCall(MatDestroy(&am->JApre));
642: if (!am->zJI) PetscCall(MatDestroy(&am->JBpre));
643: if (am->Hx) {
644: PetscCall(MatDestroy(&am->Hx));
645: PetscCall(MatDestroy(&am->Hxpre));
646: }
647: if (am->Hz) {
648: PetscCall(MatDestroy(&am->Hz));
649: PetscCall(MatDestroy(&am->Hzpre));
650: }
651: PetscCall(MatDestroy(&am->ATA));
652: PetscCall(MatDestroy(&am->BTB));
653: PetscCall(TaoDestroy(&am->subsolverX));
654: PetscCall(TaoDestroy(&am->subsolverZ));
655: am->parent = NULL;
656: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetRegularizerType_C", NULL));
657: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetRegularizerType_C", NULL));
658: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetUpdateType_C", NULL));
659: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetUpdateType_C", NULL));
660: PetscCall(PetscFree(tao->data));
661: PetscFunctionReturn(PETSC_SUCCESS);
662: }
664: /*MC
665: TAOADMM - Alternating direction method of multipliers method fo solving linear problems with
666: constraints. in a $ \min_x f(x) + g(z)$ s.t. $Ax+Bz=c$.
667: This algorithm employs two sub Tao solvers, of which type can be specified
668: by the user. User need to provide ObjectiveAndGradient routine, and/or HessianRoutine for both subsolvers.
669: Hessians can be given boolean flag determining whether they change with respect to a input vector. This can be set via
670: `TaoADMMSet{Misfit,Regularizer}HessianChangeStatus()`.
671: Second subsolver does support `TAOSHELL`. It should be noted that L1-norm is used for objective value for `TAOSHELL` type.
672: There is option to set regularizer option, and currently soft-threshold is implemented. For spectral penalty update,
673: currently there are basic option and adaptive option.
674: Constraint is set at Ax+Bz=c, and A and B can be set with `TaoADMMSet{Misfit,Regularizer}ConstraintJacobian()`.
675: c can be set with `TaoADMMSetConstraintVectorRHS()`.
676: The user can also provide regularizer weight for second subsolver. {cite}`xu2017adaptive`
678: Options Database Keys:
679: + -tao_admm_regularizer_coefficient - regularizer constant (default 1.e-6)
680: . -tao_admm_spectral_penalty - Constant for Augmented Lagrangian term (default 1.)
681: . -tao_admm_relaxation_parameter - relaxation parameter for Z update (default 1.)
682: . -tao_admm_tolerance_update_factor - ADMM dynamic tolerance update factor (default 1.e-12)
683: . -tao_admm_spectral_penalty_update_factor - ADMM spectral penalty update curvature safeguard value (default 0.2)
684: . -tao_admm_minimum_spectral_penalty - Set ADMM minimum spectral penalty (default 0)
685: . -tao_admm_dual_update - Lagrangian dual update policy ("basic","adaptive","adaptive-relaxed") (default "basic")
686: - -tao_admm_regularizer_type - ADMM regularizer update rule ("user","soft-threshold") (default "soft-threshold")
688: Level: beginner
690: .seealso: `TaoADMMSetMisfitHessianChangeStatus()`, `TaoADMMSetRegHessianChangeStatus()`, `TaoADMMGetSpectralPenalty()`,
691: `TaoADMMGetMisfitSubsolver()`, `TaoADMMGetRegularizationSubsolver()`, `TaoADMMSetConstraintVectorRHS()`,
692: `TaoADMMSetMinimumSpectralPenalty()`, `TaoADMMSetRegularizerCoefficient()`, `TaoADMMGetRegularizerCoefficient()`,
693: `TaoADMMSetRegularizerConstraintJacobian()`, `TaoADMMSetMisfitConstraintJacobian()`,
694: `TaoADMMSetMisfitObjectiveAndGradientRoutine()`, `TaoADMMSetMisfitHessianRoutine()`,
695: `TaoADMMSetRegularizerObjectiveAndGradientRoutine()`, `TaoADMMSetRegularizerHessianRoutine()`,
696: `TaoGetADMMParentTao()`, `TaoADMMGetDualVector()`, `TaoADMMSetRegularizerType()`,
697: `TaoADMMGetRegularizerType()`, `TaoADMMSetUpdateType()`, `TaoADMMGetUpdateType()`
698: M*/
700: PETSC_EXTERN PetscErrorCode TaoCreate_ADMM(Tao tao)
701: {
702: TAO_ADMM *am;
704: PetscFunctionBegin;
705: PetscCall(PetscNew(&am));
707: tao->ops->destroy = TaoDestroy_ADMM;
708: tao->ops->setup = TaoSetUp_ADMM;
709: tao->ops->setfromoptions = TaoSetFromOptions_ADMM;
710: tao->ops->view = TaoView_ADMM;
711: tao->ops->solve = TaoSolve_ADMM;
713: tao->data = (void *)am;
714: am->l1epsilon = 1e-6;
715: am->lambda = 1e-4;
716: am->mu = 1.;
717: am->muold = 0.;
718: am->mueps = PETSC_MACHINE_EPSILON;
719: am->mumin = 0.;
720: am->orthval = 0.2;
721: am->T = 2;
722: am->parent = tao;
723: am->update = TAO_ADMM_UPDATE_BASIC;
724: am->regswitch = TAO_ADMM_REGULARIZER_SOFT_THRESH;
725: am->tol = PETSC_SMALL;
726: am->const_norm = 0;
727: am->resnorm = 0;
728: am->dualres = 0;
729: am->ops->regobjgrad = NULL;
730: am->ops->reghess = NULL;
731: am->gamma = 1;
732: am->regobjgradP = NULL;
733: am->reghessP = NULL;
734: am->gatol_admm = 1e-8;
735: am->catol_admm = 0;
736: am->Hxchange = PETSC_TRUE;
737: am->Hzchange = PETSC_TRUE;
738: am->Hzbool = PETSC_TRUE;
739: am->Hxbool = PETSC_TRUE;
741: PetscCall(TaoCreate(PetscObjectComm((PetscObject)tao), &am->subsolverX));
742: PetscCall(TaoSetOptionsPrefix(am->subsolverX, "misfit_"));
743: PetscCall(PetscObjectIncrementTabLevel((PetscObject)am->subsolverX, (PetscObject)tao, 1));
744: PetscCall(TaoCreate(PetscObjectComm((PetscObject)tao), &am->subsolverZ));
745: PetscCall(TaoSetOptionsPrefix(am->subsolverZ, "reg_"));
746: PetscCall(PetscObjectIncrementTabLevel((PetscObject)am->subsolverZ, (PetscObject)tao, 1));
748: PetscCall(TaoSetType(am->subsolverX, TAONLS));
749: PetscCall(TaoSetType(am->subsolverZ, TAONLS));
750: PetscCall(PetscObjectCompose((PetscObject)am->subsolverX, "TaoGetADMMParentTao_ADMM", (PetscObject)tao));
751: PetscCall(PetscObjectCompose((PetscObject)am->subsolverZ, "TaoGetADMMParentTao_ADMM", (PetscObject)tao));
752: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetRegularizerType_C", TaoADMMSetRegularizerType_ADMM));
753: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetRegularizerType_C", TaoADMMGetRegularizerType_ADMM));
754: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetUpdateType_C", TaoADMMSetUpdateType_ADMM));
755: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetUpdateType_C", TaoADMMGetUpdateType_ADMM));
756: PetscFunctionReturn(PETSC_SUCCESS);
757: }
759: /*@
760: TaoADMMSetMisfitHessianChangeStatus - Set boolean that determines whether Hessian matrix of misfit subsolver changes with respect to input vector.
762: Collective
764: Input Parameters:
765: + tao - the Tao solver context.
766: - b - the Hessian matrix change status boolean, `PETSC_FALSE` when the Hessian matrix does not change, `PETSC_TRUE` otherwise.
768: Level: advanced
770: .seealso: `TAOADMM`
771: @*/
772: PetscErrorCode TaoADMMSetMisfitHessianChangeStatus(Tao tao, PetscBool b)
773: {
774: TAO_ADMM *am = (TAO_ADMM *)tao->data;
776: PetscFunctionBegin;
777: am->Hxchange = b;
778: PetscFunctionReturn(PETSC_SUCCESS);
779: }
781: /*@
782: TaoADMMSetRegHessianChangeStatus - Set boolean that determines whether Hessian matrix of regularization subsolver changes with respect to input vector.
784: Collective
786: Input Parameters:
787: + tao - the `Tao` solver context
788: - b - the Hessian matrix change status boolean, `PETSC_FALSE` when the Hessian matrix does not change, `PETSC_TRUE` otherwise.
790: Level: advanced
792: .seealso: `TAOADMM`
793: @*/
794: PetscErrorCode TaoADMMSetRegHessianChangeStatus(Tao tao, PetscBool b)
795: {
796: TAO_ADMM *am = (TAO_ADMM *)tao->data;
798: PetscFunctionBegin;
799: am->Hzchange = b;
800: PetscFunctionReturn(PETSC_SUCCESS);
801: }
803: /*@
804: TaoADMMSetSpectralPenalty - Set the spectral penalty (mu) value
806: Collective
808: Input Parameters:
809: + tao - the `Tao` solver context
810: - mu - spectral penalty
812: Level: advanced
814: .seealso: `TaoADMMSetMinimumSpectralPenalty()`, `TAOADMM`
815: @*/
816: PetscErrorCode TaoADMMSetSpectralPenalty(Tao tao, PetscReal mu)
817: {
818: TAO_ADMM *am = (TAO_ADMM *)tao->data;
820: PetscFunctionBegin;
821: am->mu = mu;
822: PetscFunctionReturn(PETSC_SUCCESS);
823: }
825: /*@
826: TaoADMMGetSpectralPenalty - Get the spectral penalty (mu) value
828: Collective
830: Input Parameter:
831: . tao - the `Tao` solver context
833: Output Parameter:
834: . mu - spectral penalty
836: Level: advanced
838: .seealso: `TaoADMMSetMinimumSpectralPenalty()`, `TaoADMMSetSpectralPenalty()`, `TAOADMM`
839: @*/
840: PetscErrorCode TaoADMMGetSpectralPenalty(Tao tao, PetscReal *mu)
841: {
842: TAO_ADMM *am = (TAO_ADMM *)tao->data;
844: PetscFunctionBegin;
846: PetscAssertPointer(mu, 2);
847: *mu = am->mu;
848: PetscFunctionReturn(PETSC_SUCCESS);
849: }
851: /*@
852: TaoADMMGetMisfitSubsolver - Get the pointer to the misfit subsolver inside `TAOADMM`
854: Collective
856: Input Parameter:
857: . tao - the `Tao` solver context
859: Output Parameter:
860: . misfit - the `Tao` subsolver context
862: Level: advanced
864: .seealso: `TAOADMM`, `Tao`
865: @*/
866: PetscErrorCode TaoADMMGetMisfitSubsolver(Tao tao, Tao *misfit)
867: {
868: TAO_ADMM *am = (TAO_ADMM *)tao->data;
870: PetscFunctionBegin;
871: *misfit = am->subsolverX;
872: PetscFunctionReturn(PETSC_SUCCESS);
873: }
875: /*@
876: TaoADMMGetRegularizationSubsolver - Get the pointer to the regularization subsolver inside `TAOADMM`
878: Collective
880: Input Parameter:
881: . tao - the `Tao` solver context
883: Output Parameter:
884: . reg - the `Tao` subsolver context
886: Level: advanced
888: .seealso: `TAOADMM`, `Tao`
889: @*/
890: PetscErrorCode TaoADMMGetRegularizationSubsolver(Tao tao, Tao *reg)
891: {
892: TAO_ADMM *am = (TAO_ADMM *)tao->data;
894: PetscFunctionBegin;
895: *reg = am->subsolverZ;
896: PetscFunctionReturn(PETSC_SUCCESS);
897: }
899: /*@
900: TaoADMMSetConstraintVectorRHS - Set the RHS constraint vector for `TAOADMM`
902: Collective
904: Input Parameters:
905: + tao - the `Tao` solver context
906: - c - RHS vector
908: Level: advanced
910: .seealso: `TAOADMM`
911: @*/
912: PetscErrorCode TaoADMMSetConstraintVectorRHS(Tao tao, Vec c)
913: {
914: TAO_ADMM *am = (TAO_ADMM *)tao->data;
916: PetscFunctionBegin;
917: am->constraint = c;
918: PetscFunctionReturn(PETSC_SUCCESS);
919: }
921: /*@
922: TaoADMMSetMinimumSpectralPenalty - Set the minimum value for the spectral penalty
924: Collective
926: Input Parameters:
927: + tao - the `Tao` solver context
928: - mu - minimum spectral penalty value
930: Level: advanced
932: .seealso: `TaoADMMGetSpectralPenalty()`, `TAOADMM`
933: @*/
934: PetscErrorCode TaoADMMSetMinimumSpectralPenalty(Tao tao, PetscReal mu)
935: {
936: TAO_ADMM *am = (TAO_ADMM *)tao->data;
938: PetscFunctionBegin;
939: am->mumin = mu;
940: PetscFunctionReturn(PETSC_SUCCESS);
941: }
943: /*@
944: TaoADMMSetRegularizerCoefficient - Set the regularization coefficient lambda for L1 norm regularization case
946: Collective
948: Input Parameters:
949: + tao - the `Tao` solver context
950: - lambda - L1-norm regularizer coefficient
952: Level: advanced
954: .seealso: `TaoADMMSetMisfitConstraintJacobian()`, `TaoADMMSetRegularizerConstraintJacobian()`, `TAOADMM`
955: @*/
956: PetscErrorCode TaoADMMSetRegularizerCoefficient(Tao tao, PetscReal lambda)
957: {
958: TAO_ADMM *am = (TAO_ADMM *)tao->data;
960: PetscFunctionBegin;
961: am->lambda = lambda;
962: PetscFunctionReturn(PETSC_SUCCESS);
963: }
965: /*@
966: TaoADMMGetRegularizerCoefficient - Get the regularization coefficient lambda for L1 norm regularization case
968: Collective
970: Input Parameter:
971: . tao - the `Tao` solver context
973: Output Parameter:
974: . lambda - L1-norm regularizer coefficient
976: Level: advanced
978: .seealso: `TaoADMMSetMisfitConstraintJacobian()`, `TaoADMMSetRegularizerConstraintJacobian()`, `TAOADMM`
979: @*/
980: PetscErrorCode TaoADMMGetRegularizerCoefficient(Tao tao, PetscReal *lambda)
981: {
982: TAO_ADMM *am = (TAO_ADMM *)tao->data;
984: PetscFunctionBegin;
985: *lambda = am->lambda;
986: PetscFunctionReturn(PETSC_SUCCESS);
987: }
989: /*@C
990: TaoADMMSetMisfitConstraintJacobian - Set the constraint matrix B for the `TAOADMM` algorithm. Matrix B constrains the z variable.
992: Collective
994: Input Parameters:
995: + tao - the Tao solver context
996: . J - user-created regularizer constraint Jacobian matrix
997: . Jpre - user-created regularizer Jacobian constraint matrix for constructing the preconditioner, often this is `J`
998: . func - function pointer for the regularizer constraint Jacobian update function
999: - ctx - user context for the regularizer Hessian
1001: Level: advanced
1003: .seealso: `TaoADMMSetRegularizerCoefficient()`, `TaoADMMSetRegularizerConstraintJacobian()`, `TAOADMM`
1004: @*/
1005: PetscErrorCode TaoADMMSetMisfitConstraintJacobian(Tao tao, Mat J, Mat Jpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx)
1006: {
1007: TAO_ADMM *am = (TAO_ADMM *)tao->data;
1009: PetscFunctionBegin;
1011: if (J) {
1013: PetscCheckSameComm(tao, 1, J, 2);
1014: }
1015: if (Jpre) {
1017: PetscCheckSameComm(tao, 1, Jpre, 3);
1018: }
1019: if (ctx) am->misfitjacobianP = ctx;
1020: if (func) am->ops->misfitjac = func;
1022: if (J) {
1023: PetscCall(PetscObjectReference((PetscObject)J));
1024: PetscCall(MatDestroy(&am->JA));
1025: am->JA = J;
1026: }
1027: if (Jpre) {
1028: PetscCall(PetscObjectReference((PetscObject)Jpre));
1029: PetscCall(MatDestroy(&am->JApre));
1030: am->JApre = Jpre;
1031: }
1032: PetscFunctionReturn(PETSC_SUCCESS);
1033: }
1035: /*@C
1036: TaoADMMSetRegularizerConstraintJacobian - Set the constraint matrix B for `TAOADMM` algorithm. Matrix B constraints z variable.
1038: Collective
1040: Input Parameters:
1041: + tao - the `Tao` solver context
1042: . J - user-created regularizer constraint Jacobian matrix
1043: . Jpre - user-created regularizer Jacobian constraint matrix for constructing the preconditioner, often this is `J`
1044: . func - function pointer for the regularizer constraint Jacobian update function
1045: - ctx - user context for the regularizer Hessian
1047: Level: advanced
1049: .seealso: `TaoADMMSetRegularizerCoefficient()`, `TaoADMMSetMisfitConstraintJacobian()`, `TAOADMM`
1050: @*/
1051: PetscErrorCode TaoADMMSetRegularizerConstraintJacobian(Tao tao, Mat J, Mat Jpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx)
1052: {
1053: TAO_ADMM *am = (TAO_ADMM *)tao->data;
1055: PetscFunctionBegin;
1057: if (J) {
1059: PetscCheckSameComm(tao, 1, J, 2);
1060: }
1061: if (Jpre) {
1063: PetscCheckSameComm(tao, 1, Jpre, 3);
1064: }
1065: if (ctx) am->regjacobianP = ctx;
1066: if (func) am->ops->regjac = func;
1068: if (J) {
1069: PetscCall(PetscObjectReference((PetscObject)J));
1070: PetscCall(MatDestroy(&am->JB));
1071: am->JB = J;
1072: }
1073: if (Jpre) {
1074: PetscCall(PetscObjectReference((PetscObject)Jpre));
1075: PetscCall(MatDestroy(&am->JBpre));
1076: am->JBpre = Jpre;
1077: }
1078: PetscFunctionReturn(PETSC_SUCCESS);
1079: }
1081: /*@C
1082: TaoADMMSetMisfitObjectiveAndGradientRoutine - Sets the user-defined misfit call-back function
1084: Collective
1086: Input Parameters:
1087: + tao - the `Tao` context
1088: . func - function pointer for the misfit value and gradient evaluation
1089: - ctx - user context for the misfit
1091: Level: advanced
1093: .seealso: `TAOADMM`
1094: @*/
1095: PetscErrorCode TaoADMMSetMisfitObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal *, Vec, void *), void *ctx)
1096: {
1097: TAO_ADMM *am = (TAO_ADMM *)tao->data;
1099: PetscFunctionBegin;
1101: am->misfitobjgradP = ctx;
1102: am->ops->misfitobjgrad = func;
1103: PetscFunctionReturn(PETSC_SUCCESS);
1104: }
1106: /*@C
1107: TaoADMMSetMisfitHessianRoutine - Sets the user-defined misfit Hessian call-back
1108: function into the algorithm, to be used for subsolverX.
1110: Collective
1112: Input Parameters:
1113: + tao - the `Tao` context
1114: . H - user-created matrix for the Hessian of the misfit term
1115: . Hpre - user-created matrix for the preconditioner of Hessian of the misfit term
1116: . func - function pointer for the misfit Hessian evaluation
1117: - ctx - user context for the misfit Hessian
1119: Level: advanced
1121: .seealso: `TAOADMM`
1122: @*/
1123: PetscErrorCode TaoADMMSetMisfitHessianRoutine(Tao tao, Mat H, Mat Hpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx)
1124: {
1125: TAO_ADMM *am = (TAO_ADMM *)tao->data;
1127: PetscFunctionBegin;
1129: if (H) {
1131: PetscCheckSameComm(tao, 1, H, 2);
1132: }
1133: if (Hpre) {
1135: PetscCheckSameComm(tao, 1, Hpre, 3);
1136: }
1137: if (ctx) am->misfithessP = ctx;
1138: if (func) am->ops->misfithess = func;
1139: if (H) {
1140: PetscCall(PetscObjectReference((PetscObject)H));
1141: PetscCall(MatDestroy(&am->Hx));
1142: am->Hx = H;
1143: }
1144: if (Hpre) {
1145: PetscCall(PetscObjectReference((PetscObject)Hpre));
1146: PetscCall(MatDestroy(&am->Hxpre));
1147: am->Hxpre = Hpre;
1148: }
1149: PetscFunctionReturn(PETSC_SUCCESS);
1150: }
1152: /*@C
1153: TaoADMMSetRegularizerObjectiveAndGradientRoutine - Sets the user-defined regularizer call-back function
1155: Collective
1157: Input Parameters:
1158: + tao - the Tao context
1159: . func - function pointer for the regularizer value and gradient evaluation
1160: - ctx - user context for the regularizer
1162: Level: advanced
1164: .seealso: `TAOADMM`
1165: @*/
1166: PetscErrorCode TaoADMMSetRegularizerObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal *, Vec, void *), void *ctx)
1167: {
1168: TAO_ADMM *am = (TAO_ADMM *)tao->data;
1170: PetscFunctionBegin;
1172: am->regobjgradP = ctx;
1173: am->ops->regobjgrad = func;
1174: PetscFunctionReturn(PETSC_SUCCESS);
1175: }
1177: /*@C
1178: TaoADMMSetRegularizerHessianRoutine - Sets the user-defined regularizer Hessian call-back
1179: function, to be used for subsolverZ.
1181: Collective
1183: Input Parameters:
1184: + tao - the `Tao` context
1185: . H - user-created matrix for the Hessian of the regularization term
1186: . Hpre - user-created matrix for the preconditioner of Hessian of the regularization term
1187: . func - function pointer for the regularizer Hessian evaluation
1188: - ctx - user context for the regularizer Hessian
1190: Level: advanced
1192: .seealso: `TAOADMM`
1193: @*/
1194: PetscErrorCode TaoADMMSetRegularizerHessianRoutine(Tao tao, Mat H, Mat Hpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx)
1195: {
1196: TAO_ADMM *am = (TAO_ADMM *)tao->data;
1198: PetscFunctionBegin;
1200: if (H) {
1202: PetscCheckSameComm(tao, 1, H, 2);
1203: }
1204: if (Hpre) {
1206: PetscCheckSameComm(tao, 1, Hpre, 3);
1207: }
1208: if (ctx) am->reghessP = ctx;
1209: if (func) am->ops->reghess = func;
1210: if (H) {
1211: PetscCall(PetscObjectReference((PetscObject)H));
1212: PetscCall(MatDestroy(&am->Hz));
1213: am->Hz = H;
1214: }
1215: if (Hpre) {
1216: PetscCall(PetscObjectReference((PetscObject)Hpre));
1217: PetscCall(MatDestroy(&am->Hzpre));
1218: am->Hzpre = Hpre;
1219: }
1220: PetscFunctionReturn(PETSC_SUCCESS);
1221: }
1223: /*@
1224: TaoGetADMMParentTao - Gets pointer to parent `TAOADMM`, used by inner subsolver.
1226: Collective
1228: Input Parameter:
1229: . tao - the `Tao` context
1231: Output Parameter:
1232: . admm_tao - the parent `Tao` context
1234: Level: advanced
1236: .seealso: `TAOADMM`
1237: @*/
1238: PetscErrorCode TaoGetADMMParentTao(Tao tao, Tao *admm_tao)
1239: {
1240: PetscFunctionBegin;
1242: PetscCall(PetscObjectQuery((PetscObject)tao, "TaoGetADMMParentTao_ADMM", (PetscObject *)admm_tao));
1243: PetscFunctionReturn(PETSC_SUCCESS);
1244: }
1246: /*@
1247: TaoADMMGetDualVector - Returns the dual vector associated with the current `TAOADMM` state
1249: Not Collective
1251: Input Parameter:
1252: . tao - the `Tao` context
1254: Output Parameter:
1255: . Y - the current solution
1257: Level: intermediate
1259: .seealso: `TAOADMM`
1260: @*/
1261: PetscErrorCode TaoADMMGetDualVector(Tao tao, Vec *Y)
1262: {
1263: TAO_ADMM *am = (TAO_ADMM *)tao->data;
1265: PetscFunctionBegin;
1267: *Y = am->y;
1268: PetscFunctionReturn(PETSC_SUCCESS);
1269: }
1271: /*@
1272: TaoADMMSetRegularizerType - Set regularizer type for `TAOADMM` routine
1274: Not Collective
1276: Input Parameters:
1277: + tao - the `Tao` context
1278: - type - regularizer type
1280: Options Database Key:
1281: . -tao_admm_regularizer_type <admm_regularizer_user,admm_regularizer_soft_thresh> - select the regularizer
1283: Level: intermediate
1285: .seealso: `TaoADMMGetRegularizerType()`, `TaoADMMRegularizerType`, `TAOADMM`
1286: @*/
1287: PetscErrorCode TaoADMMSetRegularizerType(Tao tao, TaoADMMRegularizerType type)
1288: {
1289: PetscFunctionBegin;
1292: PetscTryMethod(tao, "TaoADMMSetRegularizerType_C", (Tao, TaoADMMRegularizerType), (tao, type));
1293: PetscFunctionReturn(PETSC_SUCCESS);
1294: }
1296: /*@
1297: TaoADMMGetRegularizerType - Gets the type of regularizer routine for `TAOADMM`
1299: Not Collective
1301: Input Parameter:
1302: . tao - the `Tao` context
1304: Output Parameter:
1305: . type - the type of regularizer
1307: Level: intermediate
1309: .seealso: `TaoADMMSetRegularizerType()`, `TaoADMMRegularizerType`, `TAOADMM`
1310: @*/
1311: PetscErrorCode TaoADMMGetRegularizerType(Tao tao, TaoADMMRegularizerType *type)
1312: {
1313: PetscFunctionBegin;
1315: PetscUseMethod(tao, "TaoADMMGetRegularizerType_C", (Tao, TaoADMMRegularizerType *), (tao, type));
1316: PetscFunctionReturn(PETSC_SUCCESS);
1317: }
1319: /*@
1320: TaoADMMSetUpdateType - Set update routine for `TAOADMM` routine
1322: Not Collective
1324: Input Parameters:
1325: + tao - the `Tao` context
1326: - type - spectral parameter update type
1328: Level: intermediate
1330: .seealso: `TaoADMMGetUpdateType()`, `TaoADMMUpdateType`, `TAOADMM`
1331: @*/
1332: PetscErrorCode TaoADMMSetUpdateType(Tao tao, TaoADMMUpdateType type)
1333: {
1334: PetscFunctionBegin;
1337: PetscTryMethod(tao, "TaoADMMSetUpdateType_C", (Tao, TaoADMMUpdateType), (tao, type));
1338: PetscFunctionReturn(PETSC_SUCCESS);
1339: }
1341: /*@
1342: TaoADMMGetUpdateType - Gets the type of spectral penalty update routine for `TAOADMM`
1344: Not Collective
1346: Input Parameter:
1347: . tao - the `Tao` context
1349: Output Parameter:
1350: . type - the type of spectral penalty update routine
1352: Level: intermediate
1354: .seealso: `TaoADMMSetUpdateType()`, `TaoADMMUpdateType`, `TAOADMM`
1355: @*/
1356: PetscErrorCode TaoADMMGetUpdateType(Tao tao, TaoADMMUpdateType *type)
1357: {
1358: PetscFunctionBegin;
1360: PetscUseMethod(tao, "TaoADMMGetUpdateType_C", (Tao, TaoADMMUpdateType *), (tao, type));
1361: PetscFunctionReturn(PETSC_SUCCESS);
1362: }