Actual source code: asils.c
1: #include <../src/tao/complementarity/impls/ssls/ssls.h>
2: /*
3: Context for ASXLS
4: -- active-set - reduced matrices formed
5: - inherit properties of original system
6: -- semismooth (S) - function not differentiable
7: - merit function continuously differentiable
8: - Fischer-Burmeister reformulation of complementarity
9: - Billups composition for two finite bounds
10: -- infeasible (I) - iterates not guaranteed to remain within bounds
11: -- feasible (F) - iterates guaranteed to remain within bounds
12: -- linesearch (LS) - Armijo rule on direction
14: Many other reformulations are possible and combinations of
15: feasible/infeasible and linesearch/trust region are possible.
17: Basic theory
18: Fischer-Burmeister reformulation is semismooth with a continuously
19: differentiable merit function and strongly semismooth if the F has
20: lipschitz continuous derivatives.
22: Every accumulation point generated by the algorithm is a stationary
23: point for the merit function. Stationary points of the merit function
24: are solutions of the complementarity problem if
25: a. the stationary point has a BD-regular subdifferential, or
26: b. the Schur complement F'/F'_ff is a P_0-matrix where ff is the
27: index set corresponding to the free variables.
29: If one of the accumulation points has a BD-regular subdifferential then
30: a. the entire sequence converges to this accumulation point at
31: a local q-superlinear rate
32: b. if in addition the reformulation is strongly semismooth near
33: this accumulation point, then the algorithm converges at a
34: local q-quadratic rate.
36: The theory for the feasible version follows from the feasible descent
37: algorithm framework. See {cite}`billups:algorithms`, {cite}`deluca.facchinei.ea:semismooth`,
38: {cite}`ferris.kanzow.ea:feasible`, {cite}`fischer:special`, and {cite}`munson.facchinei.ea:semismooth`.
39: */
41: static PetscErrorCode TaoSetUp_ASILS(Tao tao)
42: {
43: TAO_SSLS *asls = (TAO_SSLS *)tao->data;
45: PetscFunctionBegin;
46: PetscCall(VecDuplicate(tao->solution, &tao->gradient));
47: PetscCall(VecDuplicate(tao->solution, &tao->stepdirection));
48: PetscCall(VecDuplicate(tao->solution, &asls->ff));
49: PetscCall(VecDuplicate(tao->solution, &asls->dpsi));
50: PetscCall(VecDuplicate(tao->solution, &asls->da));
51: PetscCall(VecDuplicate(tao->solution, &asls->db));
52: PetscCall(VecDuplicate(tao->solution, &asls->t1));
53: PetscCall(VecDuplicate(tao->solution, &asls->t2));
54: asls->fixed = NULL;
55: asls->free = NULL;
56: asls->J_sub = NULL;
57: asls->Jpre_sub = NULL;
58: asls->w = NULL;
59: asls->r1 = NULL;
60: asls->r2 = NULL;
61: asls->r3 = NULL;
62: asls->dxfree = NULL;
63: PetscFunctionReturn(PETSC_SUCCESS);
64: }
66: static PetscErrorCode Tao_ASLS_FunctionGradient(TaoLineSearch ls, Vec X, PetscReal *fcn, Vec G, void *ptr)
67: {
68: Tao tao = (Tao)ptr;
69: TAO_SSLS *asls = (TAO_SSLS *)tao->data;
71: PetscFunctionBegin;
72: PetscCall(TaoComputeConstraints(tao, X, tao->constraints));
73: PetscCall(VecFischer(X, tao->constraints, tao->XL, tao->XU, asls->ff));
74: PetscCall(VecNorm(asls->ff, NORM_2, &asls->merit));
75: *fcn = 0.5 * asls->merit * asls->merit;
77: PetscCall(TaoComputeJacobian(tao, tao->solution, tao->jacobian, tao->jacobian_pre));
78: PetscCall(MatDFischer(tao->jacobian, tao->solution, tao->constraints, tao->XL, tao->XU, asls->t1, asls->t2, asls->da, asls->db));
79: PetscCall(VecPointwiseMult(asls->t1, asls->ff, asls->db));
80: PetscCall(MatMultTranspose(tao->jacobian, asls->t1, G));
81: PetscCall(VecPointwiseMult(asls->t1, asls->ff, asls->da));
82: PetscCall(VecAXPY(G, 1.0, asls->t1));
83: PetscFunctionReturn(PETSC_SUCCESS);
84: }
86: static PetscErrorCode TaoDestroy_ASILS(Tao tao)
87: {
88: TAO_SSLS *ssls = (TAO_SSLS *)tao->data;
90: PetscFunctionBegin;
91: PetscCall(VecDestroy(&ssls->ff));
92: PetscCall(VecDestroy(&ssls->dpsi));
93: PetscCall(VecDestroy(&ssls->da));
94: PetscCall(VecDestroy(&ssls->db));
95: PetscCall(VecDestroy(&ssls->w));
96: PetscCall(VecDestroy(&ssls->t1));
97: PetscCall(VecDestroy(&ssls->t2));
98: PetscCall(VecDestroy(&ssls->r1));
99: PetscCall(VecDestroy(&ssls->r2));
100: PetscCall(VecDestroy(&ssls->r3));
101: PetscCall(VecDestroy(&ssls->dxfree));
102: PetscCall(MatDestroy(&ssls->J_sub));
103: PetscCall(MatDestroy(&ssls->Jpre_sub));
104: PetscCall(ISDestroy(&ssls->fixed));
105: PetscCall(ISDestroy(&ssls->free));
106: PetscCall(KSPDestroy(&tao->ksp));
107: PetscCall(PetscFree(tao->data));
108: PetscFunctionReturn(PETSC_SUCCESS);
109: }
111: static PetscErrorCode TaoSolve_ASILS(Tao tao)
112: {
113: TAO_SSLS *asls = (TAO_SSLS *)tao->data;
114: PetscReal psi, ndpsi, normd, innerd, t = 0;
115: PetscInt nf;
116: TaoLineSearchConvergedReason ls_reason;
118: PetscFunctionBegin;
119: /* Assume that Setup has been called!
120: Set the structure for the Jacobian and create a linear solver. */
122: PetscCall(TaoComputeVariableBounds(tao));
123: PetscCall(TaoLineSearchSetObjectiveAndGradientRoutine(tao->linesearch, Tao_ASLS_FunctionGradient, tao));
124: PetscCall(TaoLineSearchSetObjectiveRoutine(tao->linesearch, Tao_SSLS_Function, tao));
126: /* Calculate the function value and fischer function value at the
127: current iterate */
128: PetscCall(TaoLineSearchComputeObjectiveAndGradient(tao->linesearch, tao->solution, &psi, asls->dpsi));
129: PetscCall(VecNorm(asls->dpsi, NORM_2, &ndpsi));
131: tao->reason = TAO_CONTINUE_ITERATING;
132: while (1) {
133: /* Check the termination criteria */
134: PetscCall(PetscInfo(tao, "iter %" PetscInt_FMT ", merit: %g, ||dpsi||: %g\n", tao->niter, (double)asls->merit, (double)ndpsi));
135: PetscCall(TaoLogConvergenceHistory(tao, asls->merit, ndpsi, 0.0, tao->ksp_its));
136: PetscCall(TaoMonitor(tao, tao->niter, asls->merit, ndpsi, 0.0, t));
137: PetscUseTypeMethod(tao, convergencetest, tao->cnvP);
138: if (TAO_CONTINUE_ITERATING != tao->reason) break;
140: /* Call general purpose update function */
141: PetscTryTypeMethod(tao, update, tao->niter, tao->user_update);
142: tao->niter++;
144: /* We are going to solve a linear system of equations. We need to
145: set the tolerances for the solve so that we maintain an asymptotic
146: rate of convergence that is superlinear.
147: Note: these tolerances are for the reduced system. We really need
148: to make sure that the full system satisfies the full-space conditions.
150: This rule gives superlinear asymptotic convergence
151: asls->atol = min(0.5, asls->merit*sqrt(asls->merit));
152: asls->rtol = 0.0;
154: This rule gives quadratic asymptotic convergence
155: asls->atol = min(0.5, asls->merit*asls->merit);
156: asls->rtol = 0.0;
158: Calculate a free and fixed set of variables. The fixed set of
159: variables are those for the d_b is approximately equal to zero.
160: The definition of approximately changes as we approach the solution
161: to the problem.
163: No one rule is guaranteed to work in all cases. The following
164: definition is based on the norm of the Jacobian matrix. If the
165: norm is large, the tolerance becomes smaller. */
166: PetscCall(MatNorm(tao->jacobian, NORM_1, &asls->identifier));
167: asls->identifier = PetscMin(asls->merit, 1e-2) / (1 + asls->identifier);
169: PetscCall(VecSet(asls->t1, -asls->identifier));
170: PetscCall(VecSet(asls->t2, asls->identifier));
172: PetscCall(ISDestroy(&asls->fixed));
173: PetscCall(ISDestroy(&asls->free));
174: PetscCall(VecWhichBetweenOrEqual(asls->t1, asls->db, asls->t2, &asls->fixed));
175: PetscCall(ISComplementVec(asls->fixed, asls->t1, &asls->free));
177: PetscCall(ISGetSize(asls->fixed, &nf));
178: PetscCall(PetscInfo(tao, "Number of fixed variables: %" PetscInt_FMT "\n", nf));
180: /* We now have our partition. Now calculate the direction in the
181: fixed variable space. */
182: PetscCall(TaoVecGetSubVec(asls->ff, asls->fixed, tao->subset_type, 0.0, &asls->r1));
183: PetscCall(TaoVecGetSubVec(asls->da, asls->fixed, tao->subset_type, 1.0, &asls->r2));
184: PetscCall(VecPointwiseDivide(asls->r1, asls->r1, asls->r2));
185: PetscCall(VecSet(tao->stepdirection, 0.0));
186: PetscCall(VecISAXPY(tao->stepdirection, asls->fixed, 1.0, asls->r1));
188: /* Our direction in the Fixed Variable Set is fixed. Calculate the
189: information needed for the step in the Free Variable Set. To
190: do this, we need to know the diagonal perturbation and the
191: right hand side. */
193: PetscCall(TaoVecGetSubVec(asls->da, asls->free, tao->subset_type, 0.0, &asls->r1));
194: PetscCall(TaoVecGetSubVec(asls->ff, asls->free, tao->subset_type, 0.0, &asls->r2));
195: PetscCall(TaoVecGetSubVec(asls->db, asls->free, tao->subset_type, 1.0, &asls->r3));
196: PetscCall(VecPointwiseDivide(asls->r1, asls->r1, asls->r3));
197: PetscCall(VecPointwiseDivide(asls->r2, asls->r2, asls->r3));
199: /* r1 is the diagonal perturbation
200: r2 is the right hand side
201: r3 is no longer needed
203: Now need to modify r2 for our direction choice in the fixed
204: variable set: calculate t1 = J*d, take the reduced vector
205: of t1 and modify r2. */
207: PetscCall(MatMult(tao->jacobian, tao->stepdirection, asls->t1));
208: PetscCall(TaoVecGetSubVec(asls->t1, asls->free, tao->subset_type, 0.0, &asls->r3));
209: PetscCall(VecAXPY(asls->r2, -1.0, asls->r3));
211: /* Calculate the reduced problem matrix and the direction */
212: if (!asls->w && (tao->subset_type == TAO_SUBSET_MASK || tao->subset_type == TAO_SUBSET_MATRIXFREE)) PetscCall(VecDuplicate(tao->solution, &asls->w));
213: PetscCall(TaoMatGetSubMat(tao->jacobian, asls->free, asls->w, tao->subset_type, &asls->J_sub));
214: if (tao->jacobian != tao->jacobian_pre) {
215: PetscCall(TaoMatGetSubMat(tao->jacobian_pre, asls->free, asls->w, tao->subset_type, &asls->Jpre_sub));
216: } else {
217: PetscCall(MatDestroy(&asls->Jpre_sub));
218: asls->Jpre_sub = asls->J_sub;
219: PetscCall(PetscObjectReference((PetscObject)(asls->Jpre_sub)));
220: }
221: PetscCall(MatDiagonalSet(asls->J_sub, asls->r1, ADD_VALUES));
222: PetscCall(TaoVecGetSubVec(tao->stepdirection, asls->free, tao->subset_type, 0.0, &asls->dxfree));
223: PetscCall(VecSet(asls->dxfree, 0.0));
225: /* Calculate the reduced direction. (Really negative of Newton
226: direction. Therefore, rest of the code uses -d.) */
227: PetscCall(KSPReset(tao->ksp));
228: PetscCall(KSPSetOperators(tao->ksp, asls->J_sub, asls->Jpre_sub));
229: PetscCall(KSPSolve(tao->ksp, asls->r2, asls->dxfree));
230: PetscCall(KSPGetIterationNumber(tao->ksp, &tao->ksp_its));
231: tao->ksp_tot_its += tao->ksp_its;
233: /* Add the direction in the free variables back into the real direction. */
234: PetscCall(VecISAXPY(tao->stepdirection, asls->free, 1.0, asls->dxfree));
236: /* Check the real direction for descent and if not, use the negative
237: gradient direction. */
238: PetscCall(VecNorm(tao->stepdirection, NORM_2, &normd));
239: PetscCall(VecDot(tao->stepdirection, asls->dpsi, &innerd));
241: if (innerd <= asls->delta * PetscPowReal(normd, asls->rho)) {
242: PetscCall(PetscInfo(tao, "Gradient direction: %5.4e.\n", (double)innerd));
243: PetscCall(PetscInfo(tao, "Iteration %" PetscInt_FMT ": newton direction not descent\n", tao->niter));
244: PetscCall(VecCopy(asls->dpsi, tao->stepdirection));
245: PetscCall(VecDot(asls->dpsi, tao->stepdirection, &innerd));
246: }
248: PetscCall(VecScale(tao->stepdirection, -1.0));
249: innerd = -innerd;
251: /* We now have a correct descent direction. Apply a linesearch to
252: find the new iterate. */
253: PetscCall(TaoLineSearchSetInitialStepLength(tao->linesearch, 1.0));
254: PetscCall(TaoLineSearchApply(tao->linesearch, tao->solution, &psi, asls->dpsi, tao->stepdirection, &t, &ls_reason));
255: PetscCall(VecNorm(asls->dpsi, NORM_2, &ndpsi));
256: }
257: PetscFunctionReturn(PETSC_SUCCESS);
258: }
260: /*MC
261: TAOASILS - Active-set infeasible linesearch algorithm for solving complementarity constraints
263: Options Database Keys:
264: + -tao_ssls_delta - descent test fraction
265: - -tao_ssls_rho - descent test power
267: Level: beginner
269: Note:
270: See {cite}`billups:algorithms`, {cite}`deluca.facchinei.ea:semismooth`,
271: {cite}`ferris.kanzow.ea:feasible`, {cite}`fischer:special`, and {cite}`munson.facchinei.ea:semismooth`.
273: .seealso: `Tao`, `TaoType`, `TAOASFLS`
274: M*/
275: PETSC_EXTERN PetscErrorCode TaoCreate_ASILS(Tao tao)
276: {
277: TAO_SSLS *asls;
278: const char *armijo_type = TAOLINESEARCHARMIJO;
280: PetscFunctionBegin;
281: PetscCall(PetscNew(&asls));
282: tao->data = (void *)asls;
283: tao->ops->solve = TaoSolve_ASILS;
284: tao->ops->setup = TaoSetUp_ASILS;
285: tao->ops->view = TaoView_SSLS;
286: tao->ops->setfromoptions = TaoSetFromOptions_SSLS;
287: tao->ops->destroy = TaoDestroy_ASILS;
288: tao->subset_type = TAO_SUBSET_SUBVEC;
289: asls->delta = 1e-10;
290: asls->rho = 2.1;
291: asls->fixed = NULL;
292: asls->free = NULL;
293: asls->J_sub = NULL;
294: asls->Jpre_sub = NULL;
295: asls->w = NULL;
296: asls->r1 = NULL;
297: asls->r2 = NULL;
298: asls->r3 = NULL;
299: asls->t1 = NULL;
300: asls->t2 = NULL;
301: asls->dxfree = NULL;
303: asls->identifier = 1e-5;
305: PetscCall(TaoLineSearchCreate(((PetscObject)tao)->comm, &tao->linesearch));
306: PetscCall(PetscObjectIncrementTabLevel((PetscObject)tao->linesearch, (PetscObject)tao, 1));
307: PetscCall(TaoLineSearchSetType(tao->linesearch, armijo_type));
308: PetscCall(TaoLineSearchSetOptionsPrefix(tao->linesearch, tao->hdr.prefix));
309: PetscCall(TaoLineSearchSetFromOptions(tao->linesearch));
311: PetscCall(KSPCreate(((PetscObject)tao)->comm, &tao->ksp));
312: PetscCall(PetscObjectIncrementTabLevel((PetscObject)tao->ksp, (PetscObject)tao, 1));
313: PetscCall(KSPSetOptionsPrefix(tao->ksp, tao->hdr.prefix));
314: PetscCall(KSPSetFromOptions(tao->ksp));
316: /* Override default settings (unless already changed) */
317: if (!tao->max_it_changed) tao->max_it = 2000;
318: if (!tao->max_funcs_changed) tao->max_funcs = 4000;
319: if (!tao->gttol_changed) tao->gttol = 0;
320: if (!tao->grtol_changed) tao->grtol = 0;
321: #if defined(PETSC_USE_REAL_SINGLE)
322: if (!tao->gatol_changed) tao->gatol = 1.0e-6;
323: if (!tao->fmin_changed) tao->fmin = 1.0e-4;
324: #else
325: if (!tao->gatol_changed) tao->gatol = 1.0e-16;
326: if (!tao->fmin_changed) tao->fmin = 1.0e-8;
327: #endif
328: PetscFunctionReturn(PETSC_SUCCESS);
329: }