Actual source code: admm.c
1: #include <../src/tao/constrained/impls/admm/admm.h>
2: #include <petsctao.h>
3: #include <petsc/private/petscimpl.h>
5: /* Updates terminating criteria
6: *
7: * 1 ||r_k|| = ||Ax+Bz-c|| =< catol_admm* max{||Ax||,||Bz||,||c||}
8: *
9: * 2. Updates dual residual, d_k
10: *
11: * 3. ||d_k|| = ||mu*A^T*B(z_k-z_{k-1})|| =< gatol_admm * ||A^Ty|| */
13: static PetscBool cited = PETSC_FALSE;
14: static const char citation[] = "@misc{xu2017adaptive,\n"
15: " title={Adaptive Relaxed ADMM: Convergence Theory and Practical Implementation},\n"
16: " author={Zheng Xu and Mario A. T. Figueiredo and Xiaoming Yuan and Christoph Studer and Tom Goldstein},\n"
17: " year={2017},\n"
18: " eprint={1704.02712},\n"
19: " archivePrefix={arXiv},\n"
20: " primaryClass={cs.CV}\n"
21: "} \n";
23: const char *const TaoADMMRegularizerTypes[] = {"REGULARIZER_USER", "REGULARIZER_SOFT_THRESH", "TaoADMMRegularizerType", "TAO_ADMM_", NULL};
24: const char *const TaoADMMUpdateTypes[] = {"UPDATE_BASIC", "UPDATE_ADAPTIVE", "UPDATE_ADAPTIVE_RELAXED", "TaoADMMUpdateType", "TAO_ADMM_", NULL};
25: const char *const TaoALMMTypes[] = {"CLASSIC", "PHR", "TaoALMMType", "TAO_ALMM_", NULL};
27: static PetscErrorCode TaoADMMToleranceUpdate(Tao tao)
28: {
29: TAO_ADMM *am = (TAO_ADMM *)tao->data;
30: PetscReal Axnorm, Bznorm, ATynorm, temp;
31: Vec tempJR, tempL;
32: Tao mis;
34: PetscFunctionBegin;
35: mis = am->subsolverX;
36: tempJR = am->workJacobianRight;
37: tempL = am->workLeft;
38: /* ATy */
39: PetscCall(TaoComputeJacobianEquality(mis, am->y, mis->jacobian_equality, mis->jacobian_equality_pre));
40: PetscCall(MatMultTranspose(mis->jacobian_equality, am->y, tempJR));
41: PetscCall(VecNorm(tempJR, NORM_2, &ATynorm));
42: /* dualres = mu * ||AT(Bz-Bzold)||_2 */
43: PetscCall(VecWAXPY(tempJR, -1., am->Bzold, am->Bz));
44: PetscCall(MatMultTranspose(mis->jacobian_equality, tempJR, tempL));
45: PetscCall(VecNorm(tempL, NORM_2, &am->dualres));
46: am->dualres *= am->mu;
48: /* ||Ax||_2, ||Bz||_2 */
49: PetscCall(VecNorm(am->Ax, NORM_2, &Axnorm));
50: PetscCall(VecNorm(am->Bz, NORM_2, &Bznorm));
52: /* Set catol to be catol_admm * max{||Ax||,||Bz||,||c||} *
53: * Set gatol to be gatol_admm * ||A^Ty|| *
54: * while cnorm is ||r_k||_2, and gnorm is ||d_k||_2 */
55: temp = am->catol_admm * PetscMax(Axnorm, (!am->const_norm) ? Bznorm : PetscMax(Bznorm, am->const_norm));
56: PetscCall(TaoSetConstraintTolerances(tao, temp, PETSC_DEFAULT));
57: PetscCall(TaoSetTolerances(tao, am->gatol_admm * ATynorm, PETSC_DEFAULT, PETSC_DEFAULT));
58: PetscFunctionReturn(PETSC_SUCCESS);
59: }
61: /* Penaly Update for Adaptive ADMM. */
62: static PetscErrorCode AdaptiveADMMPenaltyUpdate(Tao tao)
63: {
64: TAO_ADMM *am = (TAO_ADMM *)tao->data;
65: PetscReal ydiff_norm, yhatdiff_norm, Axdiff_norm, Bzdiff_norm, Axyhat, Bzy, a_sd, a_mg, a_k, b_sd, b_mg, b_k;
66: PetscBool hflag, gflag;
67: Vec tempJR, tempJR2;
69: PetscFunctionBegin;
70: tempJR = am->workJacobianRight;
71: tempJR2 = am->workJacobianRight2;
72: hflag = PETSC_FALSE;
73: gflag = PETSC_FALSE;
74: a_k = -1;
75: b_k = -1;
77: PetscCall(VecWAXPY(tempJR, -1., am->Axold, am->Ax));
78: PetscCall(VecWAXPY(tempJR2, -1., am->yhatold, am->yhat));
79: PetscCall(VecNorm(tempJR, NORM_2, &Axdiff_norm));
80: PetscCall(VecNorm(tempJR2, NORM_2, &yhatdiff_norm));
81: PetscCall(VecDot(tempJR, tempJR2, &Axyhat));
83: PetscCall(VecWAXPY(tempJR, -1., am->Bz0, am->Bz));
84: PetscCall(VecWAXPY(tempJR2, -1., am->y, am->y0));
85: PetscCall(VecNorm(tempJR, NORM_2, &Bzdiff_norm));
86: PetscCall(VecNorm(tempJR2, NORM_2, &ydiff_norm));
87: PetscCall(VecDot(tempJR, tempJR2, &Bzy));
89: if (Axyhat > am->orthval * Axdiff_norm * yhatdiff_norm + am->mueps) {
90: hflag = PETSC_TRUE;
91: a_sd = PetscSqr(yhatdiff_norm) / Axyhat; /* alphaSD */
92: a_mg = Axyhat / PetscSqr(Axdiff_norm); /* alphaMG */
93: a_k = (a_mg / a_sd) > 0.5 ? a_mg : a_sd - 0.5 * a_mg;
94: }
95: if (Bzy > am->orthval * Bzdiff_norm * ydiff_norm + am->mueps) {
96: gflag = PETSC_TRUE;
97: b_sd = PetscSqr(ydiff_norm) / Bzy; /* betaSD */
98: b_mg = Bzy / PetscSqr(Bzdiff_norm); /* betaMG */
99: b_k = (b_mg / b_sd) > 0.5 ? b_mg : b_sd - 0.5 * b_mg;
100: }
101: am->muold = am->mu;
102: if (gflag && hflag) {
103: am->mu = PetscSqrtReal(a_k * b_k);
104: } else if (hflag) {
105: am->mu = a_k;
106: } else if (gflag) {
107: am->mu = b_k;
108: }
109: if (am->mu > am->muold) am->mu = am->muold;
110: if (am->mu < am->mumin) am->mu = am->mumin;
111: PetscFunctionReturn(PETSC_SUCCESS);
112: }
114: static PetscErrorCode TaoADMMSetRegularizerType_ADMM(Tao tao, TaoADMMRegularizerType type)
115: {
116: TAO_ADMM *am = (TAO_ADMM *)tao->data;
118: PetscFunctionBegin;
119: am->regswitch = type;
120: PetscFunctionReturn(PETSC_SUCCESS);
121: }
123: static PetscErrorCode TaoADMMGetRegularizerType_ADMM(Tao tao, TaoADMMRegularizerType *type)
124: {
125: TAO_ADMM *am = (TAO_ADMM *)tao->data;
127: PetscFunctionBegin;
128: *type = am->regswitch;
129: PetscFunctionReturn(PETSC_SUCCESS);
130: }
132: static PetscErrorCode TaoADMMSetUpdateType_ADMM(Tao tao, TaoADMMUpdateType type)
133: {
134: TAO_ADMM *am = (TAO_ADMM *)tao->data;
136: PetscFunctionBegin;
137: am->update = type;
138: PetscFunctionReturn(PETSC_SUCCESS);
139: }
141: static PetscErrorCode TaoADMMGetUpdateType_ADMM(Tao tao, TaoADMMUpdateType *type)
142: {
143: TAO_ADMM *am = (TAO_ADMM *)tao->data;
145: PetscFunctionBegin;
146: *type = am->update;
147: PetscFunctionReturn(PETSC_SUCCESS);
148: }
150: /* This routine updates Jacobians with new x,z vectors,
151: * and then updates Ax and Bz vectors, then computes updated residual vector*/
152: static PetscErrorCode ADMMUpdateConstraintResidualVector(Tao tao, Vec x, Vec z, Vec Ax, Vec Bz, Vec residual)
153: {
154: TAO_ADMM *am = (TAO_ADMM *)tao->data;
155: Tao mis, reg;
157: PetscFunctionBegin;
158: mis = am->subsolverX;
159: reg = am->subsolverZ;
160: PetscCall(TaoComputeJacobianEquality(mis, x, mis->jacobian_equality, mis->jacobian_equality_pre));
161: PetscCall(MatMult(mis->jacobian_equality, x, Ax));
162: PetscCall(TaoComputeJacobianEquality(reg, z, reg->jacobian_equality, reg->jacobian_equality_pre));
163: PetscCall(MatMult(reg->jacobian_equality, z, Bz));
165: PetscCall(VecWAXPY(residual, 1., Bz, Ax));
166: if (am->constraint != NULL) PetscCall(VecAXPY(residual, -1., am->constraint));
167: PetscFunctionReturn(PETSC_SUCCESS);
168: }
170: /* Updates Augmented Lagrangians to given routines *
171: * For subsolverX, routine needs to be ComputeObjectiveAndGraidnet
172: * Separate Objective and Gradient routines are not supported. */
173: static PetscErrorCode SubObjGradUpdate(Tao tao, Vec x, PetscReal *f, Vec g, void *ptr)
174: {
175: Tao parent = (Tao)ptr;
176: TAO_ADMM *am = (TAO_ADMM *)parent->data;
177: PetscReal temp, temp2;
178: Vec tempJR;
180: PetscFunctionBegin;
181: tempJR = am->workJacobianRight;
182: PetscCall(ADMMUpdateConstraintResidualVector(parent, x, am->subsolverZ->solution, am->Ax, am->Bz, am->residual));
183: PetscCall((*am->ops->misfitobjgrad)(am->subsolverX, x, f, g, am->misfitobjgradP));
185: am->last_misfit_val = *f;
186: /* Objective Add + yT(Ax+Bz-c) + mu/2*||Ax+Bz-c||_2^2 */
187: PetscCall(VecTDot(am->residual, am->y, &temp));
188: PetscCall(VecTDot(am->residual, am->residual, &temp2));
189: *f += temp + (am->mu / 2) * temp2;
191: /* Gradient. Add + mu*AT(Ax+Bz-c) + yTA*/
192: PetscCall(MatMultTranspose(tao->jacobian_equality, am->residual, tempJR));
193: PetscCall(VecAXPY(g, am->mu, tempJR));
194: PetscCall(MatMultTranspose(tao->jacobian_equality, am->y, tempJR));
195: PetscCall(VecAXPY(g, 1., tempJR));
196: PetscFunctionReturn(PETSC_SUCCESS);
197: }
199: /* Updates Augmented Lagrangians to given routines
200: * For subsolverZ, routine needs to be ComputeObjectiveAndGraidnet
201: * Separate Objective and Gradient routines are not supported. */
202: static PetscErrorCode RegObjGradUpdate(Tao tao, Vec z, PetscReal *f, Vec g, void *ptr)
203: {
204: Tao parent = (Tao)ptr;
205: TAO_ADMM *am = (TAO_ADMM *)parent->data;
206: PetscReal temp, temp2;
207: Vec tempJR;
209: PetscFunctionBegin;
210: tempJR = am->workJacobianRight;
211: PetscCall(ADMMUpdateConstraintResidualVector(parent, am->subsolverX->solution, z, am->Ax, am->Bz, am->residual));
212: PetscCall((*am->ops->regobjgrad)(am->subsolverZ, z, f, g, am->regobjgradP));
213: am->last_reg_val = *f;
214: /* Objective Add + yT(Ax+Bz-c) + mu/2*||Ax+Bz-c||_2^2 */
215: PetscCall(VecTDot(am->residual, am->y, &temp));
216: PetscCall(VecTDot(am->residual, am->residual, &temp2));
217: *f += temp + (am->mu / 2) * temp2;
219: /* Gradient. Add + mu*BT(Ax+Bz-c) + yTB*/
220: PetscCall(MatMultTranspose(am->subsolverZ->jacobian_equality, am->residual, tempJR));
221: PetscCall(VecAXPY(g, am->mu, tempJR));
222: PetscCall(MatMultTranspose(am->subsolverZ->jacobian_equality, am->y, tempJR));
223: PetscCall(VecAXPY(g, 1., tempJR));
224: PetscFunctionReturn(PETSC_SUCCESS);
225: }
227: /* Computes epsilon padded L1 norm lambda*sum(sqrt(x^2+eps^2)-eps */
228: static PetscErrorCode ADMML1EpsilonNorm(Tao tao, Vec x, PetscReal eps, PetscReal *norm)
229: {
230: TAO_ADMM *am = (TAO_ADMM *)tao->data;
231: PetscInt N;
233: PetscFunctionBegin;
234: PetscCall(VecGetSize(am->workLeft, &N));
235: PetscCall(VecPointwiseMult(am->workLeft, x, x));
236: PetscCall(VecShift(am->workLeft, am->l1epsilon * am->l1epsilon));
237: PetscCall(VecSqrtAbs(am->workLeft));
238: PetscCall(VecSum(am->workLeft, norm));
239: *norm += N * am->l1epsilon;
240: *norm *= am->lambda;
241: PetscFunctionReturn(PETSC_SUCCESS);
242: }
244: static PetscErrorCode ADMMInternalHessianUpdate(Mat H, Mat Constraint, PetscBool Identity, void *ptr)
245: {
246: TAO_ADMM *am = (TAO_ADMM *)ptr;
248: PetscFunctionBegin;
249: switch (am->update) {
250: case (TAO_ADMM_UPDATE_BASIC):
251: break;
252: case (TAO_ADMM_UPDATE_ADAPTIVE):
253: case (TAO_ADMM_UPDATE_ADAPTIVE_RELAXED):
254: if (H && (am->muold != am->mu)) {
255: if (!Identity) {
256: PetscCall(MatAXPY(H, am->mu - am->muold, Constraint, DIFFERENT_NONZERO_PATTERN));
257: } else {
258: PetscCall(MatShift(H, am->mu - am->muold));
259: }
260: }
261: break;
262: }
263: PetscFunctionReturn(PETSC_SUCCESS);
264: }
266: /* Updates Hessian - adds second derivative of augmented Lagrangian
267: * H \gets H + \rho*ATA
268: Here, \rho does not change in TAO_ADMM_UPDATE_BASIC - thus no-op
269: For ADAPTAIVE,ADAPTIVE_RELAXED,
270: H \gets H + (\rho-\rhoold)*ATA
271: Here, we assume that A is linear constraint i.e., does not change.
272: Thus, for both ADAPTIVE, and RELAXED, ATA matrix is pre-set (except for A=I (null case)) see TaoSetUp_ADMM */
273: static PetscErrorCode SubHessianUpdate(Tao tao, Vec x, Mat H, Mat Hpre, void *ptr)
274: {
275: Tao parent = (Tao)ptr;
276: TAO_ADMM *am = (TAO_ADMM *)parent->data;
278: PetscFunctionBegin;
279: if (am->Hxchange) {
280: /* Case where Hessian gets updated with respect to x vector input. */
281: PetscCall((*am->ops->misfithess)(am->subsolverX, x, H, Hpre, am->misfithessP));
282: PetscCall(ADMMInternalHessianUpdate(am->subsolverX->hessian, am->ATA, am->xJI, am));
283: } else if (am->Hxbool) {
284: /* Hessian doesn't get updated. H(x) = c */
285: /* Update Lagrangian only once per TAO call */
286: PetscCall(ADMMInternalHessianUpdate(am->subsolverX->hessian, am->ATA, am->xJI, am));
287: am->Hxbool = PETSC_FALSE;
288: }
289: PetscFunctionReturn(PETSC_SUCCESS);
290: }
292: /* Same as SubHessianUpdate, except for B matrix instead of A matrix */
293: static PetscErrorCode RegHessianUpdate(Tao tao, Vec z, Mat H, Mat Hpre, void *ptr)
294: {
295: Tao parent = (Tao)ptr;
296: TAO_ADMM *am = (TAO_ADMM *)parent->data;
298: PetscFunctionBegin;
299: if (am->Hzchange) {
300: /* Case where Hessian gets updated with respect to x vector input. */
301: PetscCall((*am->ops->reghess)(am->subsolverZ, z, H, Hpre, am->reghessP));
302: PetscCall(ADMMInternalHessianUpdate(am->subsolverZ->hessian, am->BTB, am->zJI, am));
303: } else if (am->Hzbool) {
304: /* Hessian doesn't get updated. H(x) = c */
305: /* Update Lagrangian only once per TAO call */
306: PetscCall(ADMMInternalHessianUpdate(am->subsolverZ->hessian, am->BTB, am->zJI, am));
307: am->Hzbool = PETSC_FALSE;
308: }
309: PetscFunctionReturn(PETSC_SUCCESS);
310: }
312: /* Shell Matrix routine for A matrix.
313: * This gets used when user puts NULL for
314: * TaoSetJacobianEqualityRoutine(tao, NULL,NULL, ...)
315: * Essentially sets A=I*/
316: static PetscErrorCode JacobianIdentity(Mat mat, Vec in, Vec out)
317: {
318: PetscFunctionBegin;
319: PetscCall(VecCopy(in, out));
320: PetscFunctionReturn(PETSC_SUCCESS);
321: }
323: /* Shell Matrix routine for B matrix.
324: * This gets used when user puts NULL for
325: * TaoADMMSetRegularizerConstraintJacobian(tao, NULL,NULL, ...)
326: * Sets B=-I */
327: static PetscErrorCode JacobianIdentityB(Mat mat, Vec in, Vec out)
328: {
329: PetscFunctionBegin;
330: PetscCall(VecCopy(in, out));
331: PetscCall(VecScale(out, -1.));
332: PetscFunctionReturn(PETSC_SUCCESS);
333: }
335: /* Solve f(x) + g(z) s.t. Ax + Bz = c */
336: static PetscErrorCode TaoSolve_ADMM(Tao tao)
337: {
338: TAO_ADMM *am = (TAO_ADMM *)tao->data;
339: PetscInt N;
340: PetscReal reg_func;
341: PetscBool is_reg_shell;
342: Vec tempL;
344: PetscFunctionBegin;
345: if (am->regswitch != TAO_ADMM_REGULARIZER_SOFT_THRESH) {
346: PetscCheck(am->subsolverX->ops->computejacobianequality, PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "Must call TaoADMMSetMisfitConstraintJacobian() first");
347: PetscCheck(am->subsolverZ->ops->computejacobianequality, PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "Must call TaoADMMSetRegularizerConstraintJacobian() first");
348: if (am->constraint != NULL) PetscCall(VecNorm(am->constraint, NORM_2, &am->const_norm));
349: }
350: tempL = am->workLeft;
351: PetscCall(VecGetSize(tempL, &N));
353: if (am->Hx && am->ops->misfithess) PetscCall(TaoSetHessian(am->subsolverX, am->Hx, am->Hx, SubHessianUpdate, tao));
355: if (!am->zJI) {
356: /* Currently, B is assumed to be a linear system, i.e., not getting updated*/
357: PetscCall(MatTransposeMatMult(am->JB, am->JB, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &am->BTB));
358: }
359: if (!am->xJI) {
360: /* Currently, A is assumed to be a linear system, i.e., not getting updated*/
361: PetscCall(MatTransposeMatMult(am->subsolverX->jacobian_equality, am->subsolverX->jacobian_equality, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &am->ATA));
362: }
364: is_reg_shell = PETSC_FALSE;
366: PetscCall(PetscObjectTypeCompare((PetscObject)am->subsolverZ, TAOSHELL, &is_reg_shell));
368: if (!is_reg_shell) {
369: switch (am->regswitch) {
370: case (TAO_ADMM_REGULARIZER_USER):
371: break;
372: case (TAO_ADMM_REGULARIZER_SOFT_THRESH):
373: /* Soft Threshold. */
374: break;
375: }
376: if (am->ops->regobjgrad) PetscCall(TaoSetObjectiveAndGradient(am->subsolverZ, NULL, RegObjGradUpdate, tao));
377: if (am->Hz && am->ops->reghess) PetscCall(TaoSetHessian(am->subsolverZ, am->Hz, am->Hzpre, RegHessianUpdate, tao));
378: }
380: switch (am->update) {
381: case TAO_ADMM_UPDATE_BASIC:
382: if (am->subsolverX->hessian) {
383: /* In basic case, Hessian does not get updated w.r.t. to spectral penalty
384: * Here, when A is set, i.e., am->xJI, add mu*ATA to Hessian*/
385: if (!am->xJI) {
386: PetscCall(MatAXPY(am->subsolverX->hessian, am->mu, am->ATA, DIFFERENT_NONZERO_PATTERN));
387: } else {
388: PetscCall(MatShift(am->subsolverX->hessian, am->mu));
389: }
390: }
391: if (am->subsolverZ->hessian && am->regswitch == TAO_ADMM_REGULARIZER_USER) {
392: if (am->regswitch == TAO_ADMM_REGULARIZER_USER && !am->zJI) {
393: PetscCall(MatAXPY(am->subsolverZ->hessian, am->mu, am->BTB, DIFFERENT_NONZERO_PATTERN));
394: } else {
395: PetscCall(MatShift(am->subsolverZ->hessian, am->mu));
396: }
397: }
398: break;
399: case TAO_ADMM_UPDATE_ADAPTIVE:
400: case TAO_ADMM_UPDATE_ADAPTIVE_RELAXED:
401: break;
402: }
404: PetscCall(PetscCitationsRegister(citation, &cited));
405: tao->reason = TAO_CONTINUE_ITERATING;
407: while (tao->reason == TAO_CONTINUE_ITERATING) {
408: PetscTryTypeMethod(tao, update, tao->niter, tao->user_update);
409: PetscCall(VecCopy(am->Bz, am->Bzold));
411: /* x update */
412: PetscCall(TaoSolve(am->subsolverX));
413: PetscCall(TaoComputeJacobianEquality(am->subsolverX, am->subsolverX->solution, am->subsolverX->jacobian_equality, am->subsolverX->jacobian_equality_pre));
414: PetscCall(MatMult(am->subsolverX->jacobian_equality, am->subsolverX->solution, am->Ax));
416: am->Hxbool = PETSC_TRUE;
418: /* z update */
419: switch (am->regswitch) {
420: case TAO_ADMM_REGULARIZER_USER:
421: PetscCall(TaoSolve(am->subsolverZ));
422: break;
423: case TAO_ADMM_REGULARIZER_SOFT_THRESH:
424: /* L1 assumes A,B jacobians are identity nxn matrix */
425: PetscCall(VecWAXPY(am->workJacobianRight, 1 / am->mu, am->y, am->Ax));
426: PetscCall(TaoSoftThreshold(am->workJacobianRight, -am->lambda / am->mu, am->lambda / am->mu, am->subsolverZ->solution));
427: break;
428: }
429: am->Hzbool = PETSC_TRUE;
430: /* Returns Ax + Bz - c with updated Ax,Bz vectors */
431: PetscCall(ADMMUpdateConstraintResidualVector(tao, am->subsolverX->solution, am->subsolverZ->solution, am->Ax, am->Bz, am->residual));
432: /* Dual variable, y += y + mu*(Ax+Bz-c) */
433: PetscCall(VecWAXPY(am->y, am->mu, am->residual, am->yold));
435: /* stopping tolerance update */
436: PetscCall(TaoADMMToleranceUpdate(tao));
438: /* Updating Spectral Penalty */
439: switch (am->update) {
440: case TAO_ADMM_UPDATE_BASIC:
441: am->muold = am->mu;
442: break;
443: case TAO_ADMM_UPDATE_ADAPTIVE:
444: case TAO_ADMM_UPDATE_ADAPTIVE_RELAXED:
445: if (tao->niter == 0) {
446: PetscCall(VecCopy(am->y, am->y0));
447: PetscCall(VecWAXPY(am->residual, 1., am->Ax, am->Bzold));
448: if (am->constraint) PetscCall(VecAXPY(am->residual, -1., am->constraint));
449: PetscCall(VecWAXPY(am->yhatold, -am->mu, am->residual, am->yold));
450: PetscCall(VecCopy(am->Ax, am->Axold));
451: PetscCall(VecCopy(am->Bz, am->Bz0));
452: am->muold = am->mu;
453: } else if (tao->niter % am->T == 1) {
454: /* we have compute Bzold in a previous iteration, and we computed Ax above */
455: PetscCall(VecWAXPY(am->residual, 1., am->Ax, am->Bzold));
456: if (am->constraint) PetscCall(VecAXPY(am->residual, -1., am->constraint));
457: PetscCall(VecWAXPY(am->yhat, -am->mu, am->residual, am->yold));
458: PetscCall(AdaptiveADMMPenaltyUpdate(tao));
459: PetscCall(VecCopy(am->Ax, am->Axold));
460: PetscCall(VecCopy(am->Bz, am->Bz0));
461: PetscCall(VecCopy(am->yhat, am->yhatold));
462: PetscCall(VecCopy(am->y, am->y0));
463: } else {
464: am->muold = am->mu;
465: }
466: break;
467: default:
468: break;
469: }
470: tao->niter++;
472: /* Calculate original function values. misfit part was done in TaoADMMToleranceUpdate*/
473: switch (am->regswitch) {
474: case TAO_ADMM_REGULARIZER_USER:
475: if (is_reg_shell) {
476: PetscCall(ADMML1EpsilonNorm(tao, am->subsolverZ->solution, am->l1epsilon, ®_func));
477: } else {
478: PetscCall((*am->ops->regobjgrad)(am->subsolverZ, am->subsolverX->solution, ®_func, tempL, am->regobjgradP));
479: }
480: break;
481: case TAO_ADMM_REGULARIZER_SOFT_THRESH:
482: PetscCall(ADMML1EpsilonNorm(tao, am->subsolverZ->solution, am->l1epsilon, ®_func));
483: break;
484: }
485: PetscCall(VecCopy(am->y, am->yold));
486: PetscCall(ADMMUpdateConstraintResidualVector(tao, am->subsolverX->solution, am->subsolverZ->solution, am->Ax, am->Bz, am->residual));
487: PetscCall(VecNorm(am->residual, NORM_2, &am->resnorm));
488: PetscCall(TaoLogConvergenceHistory(tao, am->last_misfit_val + reg_func, am->dualres, am->resnorm, tao->ksp_its));
490: PetscCall(TaoMonitor(tao, tao->niter, am->last_misfit_val + reg_func, am->dualres, am->resnorm, 1.0));
491: PetscUseTypeMethod(tao, convergencetest, tao->cnvP);
492: }
493: /* Update vectors */
494: PetscCall(VecCopy(am->subsolverX->solution, tao->solution));
495: PetscCall(VecCopy(am->subsolverX->gradient, tao->gradient));
496: PetscCall(PetscObjectCompose((PetscObject)am->subsolverX, "TaoGetADMMParentTao_ADMM", NULL));
497: PetscCall(PetscObjectCompose((PetscObject)am->subsolverZ, "TaoGetADMMParentTao_ADMM", NULL));
498: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetRegularizerType_C", NULL));
499: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetRegularizerType_C", NULL));
500: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetUpdateType_C", NULL));
501: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetUpdateType_C", NULL));
502: PetscFunctionReturn(PETSC_SUCCESS);
503: }
505: static PetscErrorCode TaoSetFromOptions_ADMM(Tao tao, PetscOptionItems *PetscOptionsObject)
506: {
507: TAO_ADMM *am = (TAO_ADMM *)tao->data;
509: PetscFunctionBegin;
510: PetscOptionsHeadBegin(PetscOptionsObject, "ADMM problem that solves f(x) in a form of f(x) + g(z) subject to x - z = 0. Norm 1 and 2 are supported. Different subsolver routines can be selected. ");
511: PetscCall(PetscOptionsReal("-tao_admm_regularizer_coefficient", "regularizer constant", "", am->lambda, &am->lambda, NULL));
512: PetscCall(PetscOptionsReal("-tao_admm_spectral_penalty", "Constant for Augmented Lagrangian term.", "", am->mu, &am->mu, NULL));
513: PetscCall(PetscOptionsReal("-tao_admm_relaxation_parameter", "x relaxation parameter for Z update.", "", am->gamma, &am->gamma, NULL));
514: PetscCall(PetscOptionsReal("-tao_admm_tolerance_update_factor", "ADMM dynamic tolerance update factor.", "", am->tol, &am->tol, NULL));
515: PetscCall(PetscOptionsReal("-tao_admm_spectral_penalty_update_factor", "ADMM spectral penalty update curvature safeguard value.", "", am->orthval, &am->orthval, NULL));
516: PetscCall(PetscOptionsReal("-tao_admm_minimum_spectral_penalty", "Set ADMM minimum spectral penalty.", "", am->mumin, &am->mumin, NULL));
517: PetscCall(PetscOptionsEnum("-tao_admm_dual_update", "Lagrangian dual update policy", "TaoADMMUpdateType", TaoADMMUpdateTypes, (PetscEnum)am->update, (PetscEnum *)&am->update, NULL));
518: PetscCall(PetscOptionsEnum("-tao_admm_regularizer_type", "ADMM regularizer update rule", "TaoADMMRegularizerType", TaoADMMRegularizerTypes, (PetscEnum)am->regswitch, (PetscEnum *)&am->regswitch, NULL));
519: PetscOptionsHeadEnd();
520: PetscCall(TaoSetFromOptions(am->subsolverX));
521: if (am->regswitch != TAO_ADMM_REGULARIZER_SOFT_THRESH) PetscCall(TaoSetFromOptions(am->subsolverZ));
522: PetscFunctionReturn(PETSC_SUCCESS);
523: }
525: static PetscErrorCode TaoView_ADMM(Tao tao, PetscViewer viewer)
526: {
527: TAO_ADMM *am = (TAO_ADMM *)tao->data;
529: PetscFunctionBegin;
530: PetscCall(PetscViewerASCIIPushTab(viewer));
531: PetscCall(TaoView(am->subsolverX, viewer));
532: PetscCall(TaoView(am->subsolverZ, viewer));
533: PetscCall(PetscViewerASCIIPopTab(viewer));
534: PetscFunctionReturn(PETSC_SUCCESS);
535: }
537: static PetscErrorCode TaoSetUp_ADMM(Tao tao)
538: {
539: TAO_ADMM *am = (TAO_ADMM *)tao->data;
540: PetscInt n, N, M;
542: PetscFunctionBegin;
543: PetscCall(VecGetLocalSize(tao->solution, &n));
544: PetscCall(VecGetSize(tao->solution, &N));
545: /* If Jacobian is given as NULL, it means Jacobian is identity matrix with size of solution vector */
546: if (!am->JB) {
547: am->zJI = PETSC_TRUE;
548: PetscCall(MatCreateShell(PetscObjectComm((PetscObject)tao), n, n, PETSC_DETERMINE, PETSC_DETERMINE, NULL, &am->JB));
549: PetscCall(MatShellSetOperation(am->JB, MATOP_MULT, (void (*)(void))JacobianIdentityB));
550: PetscCall(MatShellSetOperation(am->JB, MATOP_MULT_TRANSPOSE, (void (*)(void))JacobianIdentityB));
551: am->JBpre = am->JB;
552: }
553: if (!am->JA) {
554: am->xJI = PETSC_TRUE;
555: PetscCall(MatCreateShell(PetscObjectComm((PetscObject)tao), n, n, PETSC_DETERMINE, PETSC_DETERMINE, NULL, &am->JA));
556: PetscCall(MatShellSetOperation(am->JA, MATOP_MULT, (void (*)(void))JacobianIdentity));
557: PetscCall(MatShellSetOperation(am->JA, MATOP_MULT_TRANSPOSE, (void (*)(void))JacobianIdentity));
558: am->JApre = am->JA;
559: }
560: PetscCall(MatCreateVecs(am->JA, NULL, &am->Ax));
561: if (!tao->gradient) PetscCall(VecDuplicate(tao->solution, &tao->gradient));
562: PetscCall(TaoSetSolution(am->subsolverX, tao->solution));
563: if (!am->z) {
564: PetscCall(VecDuplicate(tao->solution, &am->z));
565: PetscCall(VecSet(am->z, 0.0));
566: }
567: PetscCall(TaoSetSolution(am->subsolverZ, am->z));
568: if (!am->workLeft) PetscCall(VecDuplicate(tao->solution, &am->workLeft));
569: if (!am->Axold) PetscCall(VecDuplicate(am->Ax, &am->Axold));
570: if (!am->workJacobianRight) PetscCall(VecDuplicate(am->Ax, &am->workJacobianRight));
571: if (!am->workJacobianRight2) PetscCall(VecDuplicate(am->Ax, &am->workJacobianRight2));
572: if (!am->Bz) PetscCall(VecDuplicate(am->Ax, &am->Bz));
573: if (!am->Bzold) PetscCall(VecDuplicate(am->Ax, &am->Bzold));
574: if (!am->Bz0) PetscCall(VecDuplicate(am->Ax, &am->Bz0));
575: if (!am->y) {
576: PetscCall(VecDuplicate(am->Ax, &am->y));
577: PetscCall(VecSet(am->y, 0.0));
578: }
579: if (!am->yold) {
580: PetscCall(VecDuplicate(am->Ax, &am->yold));
581: PetscCall(VecSet(am->yold, 0.0));
582: }
583: if (!am->y0) {
584: PetscCall(VecDuplicate(am->Ax, &am->y0));
585: PetscCall(VecSet(am->y0, 0.0));
586: }
587: if (!am->yhat) {
588: PetscCall(VecDuplicate(am->Ax, &am->yhat));
589: PetscCall(VecSet(am->yhat, 0.0));
590: }
591: if (!am->yhatold) {
592: PetscCall(VecDuplicate(am->Ax, &am->yhatold));
593: PetscCall(VecSet(am->yhatold, 0.0));
594: }
595: if (!am->residual) {
596: PetscCall(VecDuplicate(am->Ax, &am->residual));
597: PetscCall(VecSet(am->residual, 0.0));
598: }
599: if (!am->constraint) {
600: am->constraint = NULL;
601: } else {
602: PetscCall(VecGetSize(am->constraint, &M));
603: PetscCheck(M == N, PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "Solution vector and constraint vector must be of same size!");
604: }
606: /* Save changed tao tolerance for adaptive tolerance */
607: if (tao->gatol_changed) am->gatol_admm = tao->gatol;
608: if (tao->catol_changed) am->catol_admm = tao->catol;
610: /*Update spectral and dual elements to X subsolver */
611: PetscCall(TaoSetObjectiveAndGradient(am->subsolverX, NULL, SubObjGradUpdate, tao));
612: PetscCall(TaoSetJacobianEqualityRoutine(am->subsolverX, am->JA, am->JApre, am->ops->misfitjac, am->misfitjacobianP));
613: PetscCall(TaoSetJacobianEqualityRoutine(am->subsolverZ, am->JB, am->JBpre, am->ops->regjac, am->regjacobianP));
614: PetscFunctionReturn(PETSC_SUCCESS);
615: }
617: static PetscErrorCode TaoDestroy_ADMM(Tao tao)
618: {
619: TAO_ADMM *am = (TAO_ADMM *)tao->data;
621: PetscFunctionBegin;
622: PetscCall(VecDestroy(&am->z));
623: PetscCall(VecDestroy(&am->Ax));
624: PetscCall(VecDestroy(&am->Axold));
625: PetscCall(VecDestroy(&am->Bz));
626: PetscCall(VecDestroy(&am->Bzold));
627: PetscCall(VecDestroy(&am->Bz0));
628: PetscCall(VecDestroy(&am->residual));
629: PetscCall(VecDestroy(&am->y));
630: PetscCall(VecDestroy(&am->yold));
631: PetscCall(VecDestroy(&am->y0));
632: PetscCall(VecDestroy(&am->yhat));
633: PetscCall(VecDestroy(&am->yhatold));
634: PetscCall(VecDestroy(&am->workLeft));
635: PetscCall(VecDestroy(&am->workJacobianRight));
636: PetscCall(VecDestroy(&am->workJacobianRight2));
638: PetscCall(MatDestroy(&am->JA));
639: PetscCall(MatDestroy(&am->JB));
640: if (!am->xJI) PetscCall(MatDestroy(&am->JApre));
641: if (!am->zJI) PetscCall(MatDestroy(&am->JBpre));
642: if (am->Hx) {
643: PetscCall(MatDestroy(&am->Hx));
644: PetscCall(MatDestroy(&am->Hxpre));
645: }
646: if (am->Hz) {
647: PetscCall(MatDestroy(&am->Hz));
648: PetscCall(MatDestroy(&am->Hzpre));
649: }
650: PetscCall(MatDestroy(&am->ATA));
651: PetscCall(MatDestroy(&am->BTB));
652: PetscCall(TaoDestroy(&am->subsolverX));
653: PetscCall(TaoDestroy(&am->subsolverZ));
654: am->parent = NULL;
655: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetRegularizerType_C", NULL));
656: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetRegularizerType_C", NULL));
657: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetUpdateType_C", NULL));
658: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetUpdateType_C", NULL));
659: PetscCall(PetscFree(tao->data));
660: PetscFunctionReturn(PETSC_SUCCESS);
661: }
663: /*MC
664: TAOADMM - Alternating direction method of multipliers method for solving linear problems with
665: constraints. in a $ \min_x f(x) + g(z)$ s.t. $Ax+Bz=c$.
666: This algorithm employs two sub Tao solvers, of which type can be specified
667: by the user. User need to provide ObjectiveAndGradient routine, and/or HessianRoutine for both subsolvers.
668: Hessians can be given boolean flag determining whether they change with respect to a input vector. This can be set via
669: `TaoADMMSet{Misfit,Regularizer}HessianChangeStatus()`.
670: Second subsolver does support `TAOSHELL`. It should be noted that L1-norm is used for objective value for `TAOSHELL` type.
671: There is option to set regularizer option, and currently soft-threshold is implemented. For spectral penalty update,
672: currently there are basic option and adaptive option.
673: Constraint is set at Ax+Bz=c, and A and B can be set with `TaoADMMSet{Misfit,Regularizer}ConstraintJacobian()`.
674: c can be set with `TaoADMMSetConstraintVectorRHS()`.
675: The user can also provide regularizer weight for second subsolver. {cite}`xu2017adaptive`
677: Options Database Keys:
678: + -tao_admm_regularizer_coefficient - regularizer constant (default 1.e-6)
679: . -tao_admm_spectral_penalty - Constant for Augmented Lagrangian term (default 1.)
680: . -tao_admm_relaxation_parameter - relaxation parameter for Z update (default 1.)
681: . -tao_admm_tolerance_update_factor - ADMM dynamic tolerance update factor (default 1.e-12)
682: . -tao_admm_spectral_penalty_update_factor - ADMM spectral penalty update curvature safeguard value (default 0.2)
683: . -tao_admm_minimum_spectral_penalty - Set ADMM minimum spectral penalty (default 0)
684: . -tao_admm_dual_update - Lagrangian dual update policy ("basic","adaptive","adaptive-relaxed") (default "basic")
685: - -tao_admm_regularizer_type - ADMM regularizer update rule ("user","soft-threshold") (default "soft-threshold")
687: Level: beginner
689: .seealso: `TaoADMMSetMisfitHessianChangeStatus()`, `TaoADMMSetRegHessianChangeStatus()`, `TaoADMMGetSpectralPenalty()`,
690: `TaoADMMGetMisfitSubsolver()`, `TaoADMMGetRegularizationSubsolver()`, `TaoADMMSetConstraintVectorRHS()`,
691: `TaoADMMSetMinimumSpectralPenalty()`, `TaoADMMSetRegularizerCoefficient()`, `TaoADMMGetRegularizerCoefficient()`,
692: `TaoADMMSetRegularizerConstraintJacobian()`, `TaoADMMSetMisfitConstraintJacobian()`,
693: `TaoADMMSetMisfitObjectiveAndGradientRoutine()`, `TaoADMMSetMisfitHessianRoutine()`,
694: `TaoADMMSetRegularizerObjectiveAndGradientRoutine()`, `TaoADMMSetRegularizerHessianRoutine()`,
695: `TaoGetADMMParentTao()`, `TaoADMMGetDualVector()`, `TaoADMMSetRegularizerType()`,
696: `TaoADMMGetRegularizerType()`, `TaoADMMSetUpdateType()`, `TaoADMMGetUpdateType()`
697: M*/
699: PETSC_EXTERN PetscErrorCode TaoCreate_ADMM(Tao tao)
700: {
701: TAO_ADMM *am;
703: PetscFunctionBegin;
704: PetscCall(PetscNew(&am));
706: tao->ops->destroy = TaoDestroy_ADMM;
707: tao->ops->setup = TaoSetUp_ADMM;
708: tao->ops->setfromoptions = TaoSetFromOptions_ADMM;
709: tao->ops->view = TaoView_ADMM;
710: tao->ops->solve = TaoSolve_ADMM;
712: tao->data = (void *)am;
713: am->l1epsilon = 1e-6;
714: am->lambda = 1e-4;
715: am->mu = 1.;
716: am->muold = 0.;
717: am->mueps = PETSC_MACHINE_EPSILON;
718: am->mumin = 0.;
719: am->orthval = 0.2;
720: am->T = 2;
721: am->parent = tao;
722: am->update = TAO_ADMM_UPDATE_BASIC;
723: am->regswitch = TAO_ADMM_REGULARIZER_SOFT_THRESH;
724: am->tol = PETSC_SMALL;
725: am->const_norm = 0;
726: am->resnorm = 0;
727: am->dualres = 0;
728: am->ops->regobjgrad = NULL;
729: am->ops->reghess = NULL;
730: am->gamma = 1;
731: am->regobjgradP = NULL;
732: am->reghessP = NULL;
733: am->gatol_admm = 1e-8;
734: am->catol_admm = 0;
735: am->Hxchange = PETSC_TRUE;
736: am->Hzchange = PETSC_TRUE;
737: am->Hzbool = PETSC_TRUE;
738: am->Hxbool = PETSC_TRUE;
740: PetscCall(TaoCreate(PetscObjectComm((PetscObject)tao), &am->subsolverX));
741: PetscCall(TaoSetOptionsPrefix(am->subsolverX, "misfit_"));
742: PetscCall(PetscObjectIncrementTabLevel((PetscObject)am->subsolverX, (PetscObject)tao, 1));
743: PetscCall(TaoCreate(PetscObjectComm((PetscObject)tao), &am->subsolverZ));
744: PetscCall(TaoSetOptionsPrefix(am->subsolverZ, "reg_"));
745: PetscCall(PetscObjectIncrementTabLevel((PetscObject)am->subsolverZ, (PetscObject)tao, 1));
747: PetscCall(TaoSetType(am->subsolverX, TAONLS));
748: PetscCall(TaoSetType(am->subsolverZ, TAONLS));
749: PetscCall(PetscObjectCompose((PetscObject)am->subsolverX, "TaoGetADMMParentTao_ADMM", (PetscObject)tao));
750: PetscCall(PetscObjectCompose((PetscObject)am->subsolverZ, "TaoGetADMMParentTao_ADMM", (PetscObject)tao));
751: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetRegularizerType_C", TaoADMMSetRegularizerType_ADMM));
752: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetRegularizerType_C", TaoADMMGetRegularizerType_ADMM));
753: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetUpdateType_C", TaoADMMSetUpdateType_ADMM));
754: PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetUpdateType_C", TaoADMMGetUpdateType_ADMM));
755: PetscFunctionReturn(PETSC_SUCCESS);
756: }
758: /*@
759: TaoADMMSetMisfitHessianChangeStatus - Set boolean that determines whether Hessian matrix of misfit subsolver changes with respect to input vector.
761: Collective
763: Input Parameters:
764: + tao - the Tao solver context.
765: - b - the Hessian matrix change status boolean, `PETSC_FALSE` when the Hessian matrix does not change, `PETSC_TRUE` otherwise.
767: Level: advanced
769: .seealso: `TAOADMM`
770: @*/
771: PetscErrorCode TaoADMMSetMisfitHessianChangeStatus(Tao tao, PetscBool b)
772: {
773: TAO_ADMM *am = (TAO_ADMM *)tao->data;
775: PetscFunctionBegin;
776: am->Hxchange = b;
777: PetscFunctionReturn(PETSC_SUCCESS);
778: }
780: /*@
781: TaoADMMSetRegHessianChangeStatus - Set boolean that determines whether Hessian matrix of regularization subsolver changes with respect to input vector.
783: Collective
785: Input Parameters:
786: + tao - the `Tao` solver context
787: - b - the Hessian matrix change status boolean, `PETSC_FALSE` when the Hessian matrix does not change, `PETSC_TRUE` otherwise.
789: Level: advanced
791: .seealso: `TAOADMM`
792: @*/
793: PetscErrorCode TaoADMMSetRegHessianChangeStatus(Tao tao, PetscBool b)
794: {
795: TAO_ADMM *am = (TAO_ADMM *)tao->data;
797: PetscFunctionBegin;
798: am->Hzchange = b;
799: PetscFunctionReturn(PETSC_SUCCESS);
800: }
802: /*@
803: TaoADMMSetSpectralPenalty - Set the spectral penalty (mu) value
805: Collective
807: Input Parameters:
808: + tao - the `Tao` solver context
809: - mu - spectral penalty
811: Level: advanced
813: .seealso: `TaoADMMSetMinimumSpectralPenalty()`, `TAOADMM`
814: @*/
815: PetscErrorCode TaoADMMSetSpectralPenalty(Tao tao, PetscReal mu)
816: {
817: TAO_ADMM *am = (TAO_ADMM *)tao->data;
819: PetscFunctionBegin;
820: am->mu = mu;
821: PetscFunctionReturn(PETSC_SUCCESS);
822: }
824: /*@
825: TaoADMMGetSpectralPenalty - Get the spectral penalty (mu) value
827: Collective
829: Input Parameter:
830: . tao - the `Tao` solver context
832: Output Parameter:
833: . mu - spectral penalty
835: Level: advanced
837: .seealso: `TaoADMMSetMinimumSpectralPenalty()`, `TaoADMMSetSpectralPenalty()`, `TAOADMM`
838: @*/
839: PetscErrorCode TaoADMMGetSpectralPenalty(Tao tao, PetscReal *mu)
840: {
841: TAO_ADMM *am = (TAO_ADMM *)tao->data;
843: PetscFunctionBegin;
845: PetscAssertPointer(mu, 2);
846: *mu = am->mu;
847: PetscFunctionReturn(PETSC_SUCCESS);
848: }
850: /*@
851: TaoADMMGetMisfitSubsolver - Get the pointer to the misfit subsolver inside `TAOADMM`
853: Collective
855: Input Parameter:
856: . tao - the `Tao` solver context
858: Output Parameter:
859: . misfit - the `Tao` subsolver context
861: Level: advanced
863: .seealso: `TAOADMM`, `Tao`
864: @*/
865: PetscErrorCode TaoADMMGetMisfitSubsolver(Tao tao, Tao *misfit)
866: {
867: TAO_ADMM *am = (TAO_ADMM *)tao->data;
869: PetscFunctionBegin;
870: *misfit = am->subsolverX;
871: PetscFunctionReturn(PETSC_SUCCESS);
872: }
874: /*@
875: TaoADMMGetRegularizationSubsolver - Get the pointer to the regularization subsolver inside `TAOADMM`
877: Collective
879: Input Parameter:
880: . tao - the `Tao` solver context
882: Output Parameter:
883: . reg - the `Tao` subsolver context
885: Level: advanced
887: .seealso: `TAOADMM`, `Tao`
888: @*/
889: PetscErrorCode TaoADMMGetRegularizationSubsolver(Tao tao, Tao *reg)
890: {
891: TAO_ADMM *am = (TAO_ADMM *)tao->data;
893: PetscFunctionBegin;
894: *reg = am->subsolverZ;
895: PetscFunctionReturn(PETSC_SUCCESS);
896: }
898: /*@
899: TaoADMMSetConstraintVectorRHS - Set the RHS constraint vector for `TAOADMM`
901: Collective
903: Input Parameters:
904: + tao - the `Tao` solver context
905: - c - RHS vector
907: Level: advanced
909: .seealso: `TAOADMM`
910: @*/
911: PetscErrorCode TaoADMMSetConstraintVectorRHS(Tao tao, Vec c)
912: {
913: TAO_ADMM *am = (TAO_ADMM *)tao->data;
915: PetscFunctionBegin;
916: am->constraint = c;
917: PetscFunctionReturn(PETSC_SUCCESS);
918: }
920: /*@
921: TaoADMMSetMinimumSpectralPenalty - Set the minimum value for the spectral penalty
923: Collective
925: Input Parameters:
926: + tao - the `Tao` solver context
927: - mu - minimum spectral penalty value
929: Level: advanced
931: .seealso: `TaoADMMGetSpectralPenalty()`, `TAOADMM`
932: @*/
933: PetscErrorCode TaoADMMSetMinimumSpectralPenalty(Tao tao, PetscReal mu)
934: {
935: TAO_ADMM *am = (TAO_ADMM *)tao->data;
937: PetscFunctionBegin;
938: am->mumin = mu;
939: PetscFunctionReturn(PETSC_SUCCESS);
940: }
942: /*@
943: TaoADMMSetRegularizerCoefficient - Set the regularization coefficient lambda for L1 norm regularization case
945: Collective
947: Input Parameters:
948: + tao - the `Tao` solver context
949: - lambda - L1-norm regularizer coefficient
951: Level: advanced
953: .seealso: `TaoADMMSetMisfitConstraintJacobian()`, `TaoADMMSetRegularizerConstraintJacobian()`, `TAOADMM`
954: @*/
955: PetscErrorCode TaoADMMSetRegularizerCoefficient(Tao tao, PetscReal lambda)
956: {
957: TAO_ADMM *am = (TAO_ADMM *)tao->data;
959: PetscFunctionBegin;
960: am->lambda = lambda;
961: PetscFunctionReturn(PETSC_SUCCESS);
962: }
964: /*@
965: TaoADMMGetRegularizerCoefficient - Get the regularization coefficient lambda for L1 norm regularization case
967: Collective
969: Input Parameter:
970: . tao - the `Tao` solver context
972: Output Parameter:
973: . lambda - L1-norm regularizer coefficient
975: Level: advanced
977: .seealso: `TaoADMMSetMisfitConstraintJacobian()`, `TaoADMMSetRegularizerConstraintJacobian()`, `TAOADMM`
978: @*/
979: PetscErrorCode TaoADMMGetRegularizerCoefficient(Tao tao, PetscReal *lambda)
980: {
981: TAO_ADMM *am = (TAO_ADMM *)tao->data;
983: PetscFunctionBegin;
984: *lambda = am->lambda;
985: PetscFunctionReturn(PETSC_SUCCESS);
986: }
988: /*@C
989: TaoADMMSetMisfitConstraintJacobian - Set the constraint matrix B for the `TAOADMM` algorithm. Matrix B constrains the z variable.
991: Collective
993: Input Parameters:
994: + tao - the Tao solver context
995: . J - user-created regularizer constraint Jacobian matrix
996: . Jpre - user-created regularizer Jacobian constraint matrix for constructing the preconditioner, often this is `J`
997: . func - function pointer for the regularizer constraint Jacobian update function
998: - ctx - user context for the regularizer Hessian
1000: Level: advanced
1002: .seealso: `TaoADMMSetRegularizerCoefficient()`, `TaoADMMSetRegularizerConstraintJacobian()`, `TAOADMM`
1003: @*/
1004: PetscErrorCode TaoADMMSetMisfitConstraintJacobian(Tao tao, Mat J, Mat Jpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx)
1005: {
1006: TAO_ADMM *am = (TAO_ADMM *)tao->data;
1008: PetscFunctionBegin;
1010: if (J) {
1012: PetscCheckSameComm(tao, 1, J, 2);
1013: }
1014: if (Jpre) {
1016: PetscCheckSameComm(tao, 1, Jpre, 3);
1017: }
1018: if (ctx) am->misfitjacobianP = ctx;
1019: if (func) am->ops->misfitjac = func;
1021: if (J) {
1022: PetscCall(PetscObjectReference((PetscObject)J));
1023: PetscCall(MatDestroy(&am->JA));
1024: am->JA = J;
1025: }
1026: if (Jpre) {
1027: PetscCall(PetscObjectReference((PetscObject)Jpre));
1028: PetscCall(MatDestroy(&am->JApre));
1029: am->JApre = Jpre;
1030: }
1031: PetscFunctionReturn(PETSC_SUCCESS);
1032: }
1034: /*@C
1035: TaoADMMSetRegularizerConstraintJacobian - Set the constraint matrix B for `TAOADMM` algorithm. Matrix B constraints z variable.
1037: Collective
1039: Input Parameters:
1040: + tao - the `Tao` solver context
1041: . J - user-created regularizer constraint Jacobian matrix
1042: . Jpre - user-created regularizer Jacobian constraint matrix for constructing the preconditioner, often this is `J`
1043: . func - function pointer for the regularizer constraint Jacobian update function
1044: - ctx - user context for the regularizer Hessian
1046: Level: advanced
1048: .seealso: `TaoADMMSetRegularizerCoefficient()`, `TaoADMMSetMisfitConstraintJacobian()`, `TAOADMM`
1049: @*/
1050: PetscErrorCode TaoADMMSetRegularizerConstraintJacobian(Tao tao, Mat J, Mat Jpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx)
1051: {
1052: TAO_ADMM *am = (TAO_ADMM *)tao->data;
1054: PetscFunctionBegin;
1056: if (J) {
1058: PetscCheckSameComm(tao, 1, J, 2);
1059: }
1060: if (Jpre) {
1062: PetscCheckSameComm(tao, 1, Jpre, 3);
1063: }
1064: if (ctx) am->regjacobianP = ctx;
1065: if (func) am->ops->regjac = func;
1067: if (J) {
1068: PetscCall(PetscObjectReference((PetscObject)J));
1069: PetscCall(MatDestroy(&am->JB));
1070: am->JB = J;
1071: }
1072: if (Jpre) {
1073: PetscCall(PetscObjectReference((PetscObject)Jpre));
1074: PetscCall(MatDestroy(&am->JBpre));
1075: am->JBpre = Jpre;
1076: }
1077: PetscFunctionReturn(PETSC_SUCCESS);
1078: }
1080: /*@C
1081: TaoADMMSetMisfitObjectiveAndGradientRoutine - Sets the user-defined misfit call-back function
1083: Collective
1085: Input Parameters:
1086: + tao - the `Tao` context
1087: . func - function pointer for the misfit value and gradient evaluation
1088: - ctx - user context for the misfit
1090: Level: advanced
1092: .seealso: `TAOADMM`
1093: @*/
1094: PetscErrorCode TaoADMMSetMisfitObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal *, Vec, void *), void *ctx)
1095: {
1096: TAO_ADMM *am = (TAO_ADMM *)tao->data;
1098: PetscFunctionBegin;
1100: am->misfitobjgradP = ctx;
1101: am->ops->misfitobjgrad = func;
1102: PetscFunctionReturn(PETSC_SUCCESS);
1103: }
1105: /*@C
1106: TaoADMMSetMisfitHessianRoutine - Sets the user-defined misfit Hessian call-back
1107: function into the algorithm, to be used for subsolverX.
1109: Collective
1111: Input Parameters:
1112: + tao - the `Tao` context
1113: . H - user-created matrix for the Hessian of the misfit term
1114: . Hpre - user-created matrix for the preconditioner of Hessian of the misfit term
1115: . func - function pointer for the misfit Hessian evaluation
1116: - ctx - user context for the misfit Hessian
1118: Level: advanced
1120: .seealso: `TAOADMM`
1121: @*/
1122: PetscErrorCode TaoADMMSetMisfitHessianRoutine(Tao tao, Mat H, Mat Hpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx)
1123: {
1124: TAO_ADMM *am = (TAO_ADMM *)tao->data;
1126: PetscFunctionBegin;
1128: if (H) {
1130: PetscCheckSameComm(tao, 1, H, 2);
1131: }
1132: if (Hpre) {
1134: PetscCheckSameComm(tao, 1, Hpre, 3);
1135: }
1136: if (ctx) am->misfithessP = ctx;
1137: if (func) am->ops->misfithess = func;
1138: if (H) {
1139: PetscCall(PetscObjectReference((PetscObject)H));
1140: PetscCall(MatDestroy(&am->Hx));
1141: am->Hx = H;
1142: }
1143: if (Hpre) {
1144: PetscCall(PetscObjectReference((PetscObject)Hpre));
1145: PetscCall(MatDestroy(&am->Hxpre));
1146: am->Hxpre = Hpre;
1147: }
1148: PetscFunctionReturn(PETSC_SUCCESS);
1149: }
1151: /*@C
1152: TaoADMMSetRegularizerObjectiveAndGradientRoutine - Sets the user-defined regularizer call-back function
1154: Collective
1156: Input Parameters:
1157: + tao - the Tao context
1158: . func - function pointer for the regularizer value and gradient evaluation
1159: - ctx - user context for the regularizer
1161: Level: advanced
1163: .seealso: `TAOADMM`
1164: @*/
1165: PetscErrorCode TaoADMMSetRegularizerObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal *, Vec, void *), void *ctx)
1166: {
1167: TAO_ADMM *am = (TAO_ADMM *)tao->data;
1169: PetscFunctionBegin;
1171: am->regobjgradP = ctx;
1172: am->ops->regobjgrad = func;
1173: PetscFunctionReturn(PETSC_SUCCESS);
1174: }
1176: /*@C
1177: TaoADMMSetRegularizerHessianRoutine - Sets the user-defined regularizer Hessian call-back
1178: function, to be used for subsolverZ.
1180: Collective
1182: Input Parameters:
1183: + tao - the `Tao` context
1184: . H - user-created matrix for the Hessian of the regularization term
1185: . Hpre - user-created matrix for the preconditioner of Hessian of the regularization term
1186: . func - function pointer for the regularizer Hessian evaluation
1187: - ctx - user context for the regularizer Hessian
1189: Level: advanced
1191: .seealso: `TAOADMM`
1192: @*/
1193: PetscErrorCode TaoADMMSetRegularizerHessianRoutine(Tao tao, Mat H, Mat Hpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx)
1194: {
1195: TAO_ADMM *am = (TAO_ADMM *)tao->data;
1197: PetscFunctionBegin;
1199: if (H) {
1201: PetscCheckSameComm(tao, 1, H, 2);
1202: }
1203: if (Hpre) {
1205: PetscCheckSameComm(tao, 1, Hpre, 3);
1206: }
1207: if (ctx) am->reghessP = ctx;
1208: if (func) am->ops->reghess = func;
1209: if (H) {
1210: PetscCall(PetscObjectReference((PetscObject)H));
1211: PetscCall(MatDestroy(&am->Hz));
1212: am->Hz = H;
1213: }
1214: if (Hpre) {
1215: PetscCall(PetscObjectReference((PetscObject)Hpre));
1216: PetscCall(MatDestroy(&am->Hzpre));
1217: am->Hzpre = Hpre;
1218: }
1219: PetscFunctionReturn(PETSC_SUCCESS);
1220: }
1222: /*@
1223: TaoGetADMMParentTao - Gets pointer to parent `TAOADMM`, used by inner subsolver.
1225: Collective
1227: Input Parameter:
1228: . tao - the `Tao` context
1230: Output Parameter:
1231: . admm_tao - the parent `Tao` context
1233: Level: advanced
1235: .seealso: `TAOADMM`
1236: @*/
1237: PetscErrorCode TaoGetADMMParentTao(Tao tao, Tao *admm_tao)
1238: {
1239: PetscFunctionBegin;
1241: PetscCall(PetscObjectQuery((PetscObject)tao, "TaoGetADMMParentTao_ADMM", (PetscObject *)admm_tao));
1242: PetscFunctionReturn(PETSC_SUCCESS);
1243: }
1245: /*@
1246: TaoADMMGetDualVector - Returns the dual vector associated with the current `TAOADMM` state
1248: Not Collective
1250: Input Parameter:
1251: . tao - the `Tao` context
1253: Output Parameter:
1254: . Y - the current solution
1256: Level: intermediate
1258: .seealso: `TAOADMM`
1259: @*/
1260: PetscErrorCode TaoADMMGetDualVector(Tao tao, Vec *Y)
1261: {
1262: TAO_ADMM *am = (TAO_ADMM *)tao->data;
1264: PetscFunctionBegin;
1266: *Y = am->y;
1267: PetscFunctionReturn(PETSC_SUCCESS);
1268: }
1270: /*@
1271: TaoADMMSetRegularizerType - Set regularizer type for `TAOADMM` routine
1273: Not Collective
1275: Input Parameters:
1276: + tao - the `Tao` context
1277: - type - regularizer type
1279: Options Database Key:
1280: . -tao_admm_regularizer_type <admm_regularizer_user,admm_regularizer_soft_thresh> - select the regularizer
1282: Level: intermediate
1284: .seealso: `TaoADMMGetRegularizerType()`, `TaoADMMRegularizerType`, `TAOADMM`
1285: @*/
1286: PetscErrorCode TaoADMMSetRegularizerType(Tao tao, TaoADMMRegularizerType type)
1287: {
1288: PetscFunctionBegin;
1291: PetscTryMethod(tao, "TaoADMMSetRegularizerType_C", (Tao, TaoADMMRegularizerType), (tao, type));
1292: PetscFunctionReturn(PETSC_SUCCESS);
1293: }
1295: /*@
1296: TaoADMMGetRegularizerType - Gets the type of regularizer routine for `TAOADMM`
1298: Not Collective
1300: Input Parameter:
1301: . tao - the `Tao` context
1303: Output Parameter:
1304: . type - the type of regularizer
1306: Level: intermediate
1308: .seealso: `TaoADMMSetRegularizerType()`, `TaoADMMRegularizerType`, `TAOADMM`
1309: @*/
1310: PetscErrorCode TaoADMMGetRegularizerType(Tao tao, TaoADMMRegularizerType *type)
1311: {
1312: PetscFunctionBegin;
1314: PetscUseMethod(tao, "TaoADMMGetRegularizerType_C", (Tao, TaoADMMRegularizerType *), (tao, type));
1315: PetscFunctionReturn(PETSC_SUCCESS);
1316: }
1318: /*@
1319: TaoADMMSetUpdateType - Set update routine for `TAOADMM` routine
1321: Not Collective
1323: Input Parameters:
1324: + tao - the `Tao` context
1325: - type - spectral parameter update type
1327: Level: intermediate
1329: .seealso: `TaoADMMGetUpdateType()`, `TaoADMMUpdateType`, `TAOADMM`
1330: @*/
1331: PetscErrorCode TaoADMMSetUpdateType(Tao tao, TaoADMMUpdateType type)
1332: {
1333: PetscFunctionBegin;
1336: PetscTryMethod(tao, "TaoADMMSetUpdateType_C", (Tao, TaoADMMUpdateType), (tao, type));
1337: PetscFunctionReturn(PETSC_SUCCESS);
1338: }
1340: /*@
1341: TaoADMMGetUpdateType - Gets the type of spectral penalty update routine for `TAOADMM`
1343: Not Collective
1345: Input Parameter:
1346: . tao - the `Tao` context
1348: Output Parameter:
1349: . type - the type of spectral penalty update routine
1351: Level: intermediate
1353: .seealso: `TaoADMMSetUpdateType()`, `TaoADMMUpdateType`, `TAOADMM`
1354: @*/
1355: PetscErrorCode TaoADMMGetUpdateType(Tao tao, TaoADMMUpdateType *type)
1356: {
1357: PetscFunctionBegin;
1359: PetscUseMethod(tao, "TaoADMMGetUpdateType_C", (Tao, TaoADMMUpdateType *), (tao, type));
1360: PetscFunctionReturn(PETSC_SUCCESS);
1361: }