Actual source code: asfls.c
1: #include <../src/tao/complementarity/impls/ssls/ssls.h>
2: /*
3: Context for ASXLS
4: -- active-set - reduced matrices formed
5: - inherit properties of original system
6: -- semismooth (S) - function not differentiable
7: - merit function continuously differentiable
8: - Fischer-Burmeister reformulation of complementarity
9: - Billups composition for two finite bounds
10: -- infeasible (I) - iterates not guaranteed to remain within bounds
11: -- feasible (F) - iterates guaranteed to remain within bounds
12: -- linesearch (LS) - Armijo rule on direction
14: Many other reformulations are possible and combinations of
15: feasible/infeasible and linesearch/trust region are possible.
17: Basic theory
18: Fischer-Burmeister reformulation is semismooth with a continuously
19: differentiable merit function and strongly semismooth if the F has
20: lipschitz continuous derivatives.
22: Every accumulation point generated by the algorithm is a stationary
23: point for the merit function. Stationary points of the merit function
24: are solutions of the complementarity problem if
25: a. the stationary point has a BD-regular subdifferential, or
26: b. the Schur complement F'/F'_ff is a P_0-matrix where ff is the
27: index set corresponding to the free variables.
29: If one of the accumulation points has a BD-regular subdifferential then
30: a. the entire sequence converges to this accumulation point at
31: a local q-superlinear rate
32: b. if in addition the reformulation is strongly semismooth near
33: this accumulation point, then the algorithm converges at a
34: local q-quadratic rate.
36: The theory for the feasible version follows from the feasible descent
37: algorithm framework.
39: References:
40: Billups, "Algorithms for Complementarity Problems and Generalized
41: Equations," Ph.D thesis, University of Wisconsin Madison, 1995.
42: De Luca, Facchinei, Kanzow, "A Semismooth Equation Approach to the
43: Solution of Nonlinear Complementarity Problems," Mathematical
44: Programming, 75, pages 407439, 1996.
45: Ferris, Kanzow, Munson, "Feasible Descent Algorithms for Mixed
46: Complementarity Problems," Mathematical Programming, 86,
47: pages 475497, 1999.
48: Fischer, "A Special Newton type Optimization Method," Optimization,
49: 24, 1992
50: Munson, Facchinei, Ferris, Fischer, Kanzow, "The Semismooth Algorithm
51: for Large Scale Complementarity Problems," Technical Report,
52: University of Wisconsin Madison, 1999.
53: */
55: static PetscErrorCode TaoSetUp_ASFLS(Tao tao)
56: {
57: TAO_SSLS *asls = (TAO_SSLS *)tao->data;
61: VecDuplicate(tao->solution,&tao->gradient);
62: VecDuplicate(tao->solution,&tao->stepdirection);
63: VecDuplicate(tao->solution,&asls->ff);
64: VecDuplicate(tao->solution,&asls->dpsi);
65: VecDuplicate(tao->solution,&asls->da);
66: VecDuplicate(tao->solution,&asls->db);
67: VecDuplicate(tao->solution,&asls->t1);
68: VecDuplicate(tao->solution,&asls->t2);
69: VecDuplicate(tao->solution, &asls->w);
70: asls->fixed = NULL;
71: asls->free = NULL;
72: asls->J_sub = NULL;
73: asls->Jpre_sub = NULL;
74: asls->r1 = NULL;
75: asls->r2 = NULL;
76: asls->r3 = NULL;
77: asls->dxfree = NULL;
78: return(0);
79: }
81: static PetscErrorCode Tao_ASLS_FunctionGradient(TaoLineSearch ls, Vec X, PetscReal *fcn, Vec G, void *ptr)
82: {
83: Tao tao = (Tao)ptr;
84: TAO_SSLS *asls = (TAO_SSLS *)tao->data;
88: TaoComputeConstraints(tao, X, tao->constraints);
89: VecFischer(X,tao->constraints,tao->XL,tao->XU,asls->ff);
90: VecNorm(asls->ff,NORM_2,&asls->merit);
91: *fcn = 0.5*asls->merit*asls->merit;
92: TaoComputeJacobian(tao,tao->solution,tao->jacobian,tao->jacobian_pre);
94: MatDFischer(tao->jacobian, tao->solution, tao->constraints,tao->XL, tao->XU, asls->t1, asls->t2,asls->da, asls->db);
95: VecPointwiseMult(asls->t1, asls->ff, asls->db);
96: MatMultTranspose(tao->jacobian,asls->t1,G);
97: VecPointwiseMult(asls->t1, asls->ff, asls->da);
98: VecAXPY(G,1.0,asls->t1);
99: return(0);
100: }
102: static PetscErrorCode TaoDestroy_ASFLS(Tao tao)
103: {
104: TAO_SSLS *ssls = (TAO_SSLS *)tao->data;
108: VecDestroy(&ssls->ff);
109: VecDestroy(&ssls->dpsi);
110: VecDestroy(&ssls->da);
111: VecDestroy(&ssls->db);
112: VecDestroy(&ssls->w);
113: VecDestroy(&ssls->t1);
114: VecDestroy(&ssls->t2);
115: VecDestroy(&ssls->r1);
116: VecDestroy(&ssls->r2);
117: VecDestroy(&ssls->r3);
118: VecDestroy(&ssls->dxfree);
119: MatDestroy(&ssls->J_sub);
120: MatDestroy(&ssls->Jpre_sub);
121: ISDestroy(&ssls->fixed);
122: ISDestroy(&ssls->free);
123: PetscFree(tao->data);
124: tao->data = NULL;
125: return(0);
126: }
128: static PetscErrorCode TaoSolve_ASFLS(Tao tao)
129: {
130: TAO_SSLS *asls = (TAO_SSLS *)tao->data;
131: PetscReal psi,ndpsi, normd, innerd, t=0;
132: PetscInt nf;
133: PetscErrorCode ierr;
134: TaoLineSearchConvergedReason ls_reason;
137: /* Assume that Setup has been called!
138: Set the structure for the Jacobian and create a linear solver. */
140: TaoComputeVariableBounds(tao);
141: TaoLineSearchSetObjectiveAndGradientRoutine(tao->linesearch,Tao_ASLS_FunctionGradient,tao);
142: TaoLineSearchSetObjectiveRoutine(tao->linesearch,Tao_SSLS_Function,tao);
143: TaoLineSearchSetVariableBounds(tao->linesearch,tao->XL,tao->XU);
145: VecMedian(tao->XL, tao->solution, tao->XU, tao->solution);
147: /* Calculate the function value and fischer function value at the
148: current iterate */
149: TaoLineSearchComputeObjectiveAndGradient(tao->linesearch,tao->solution,&psi,asls->dpsi);
150: VecNorm(asls->dpsi,NORM_2,&ndpsi);
152: tao->reason = TAO_CONTINUE_ITERATING;
153: while (1) {
154: /* Check the converged criteria */
155: PetscInfo3(tao,"iter %D, merit: %g, ||dpsi||: %g\n",tao->niter,(double)asls->merit,(double)ndpsi);
156: TaoLogConvergenceHistory(tao,asls->merit,ndpsi,0.0,tao->ksp_its);
157: TaoMonitor(tao,tao->niter,asls->merit,ndpsi,0.0,t);
158: (*tao->ops->convergencetest)(tao,tao->cnvP);
159: if (TAO_CONTINUE_ITERATING != tao->reason) break;
161: /* Call general purpose update function */
162: if (tao->ops->update) {
163: (*tao->ops->update)(tao, tao->niter, tao->user_update);
164: }
165: tao->niter++;
167: /* We are going to solve a linear system of equations. We need to
168: set the tolerances for the solve so that we maintain an asymptotic
169: rate of convergence that is superlinear.
170: Note: these tolerances are for the reduced system. We really need
171: to make sure that the full system satisfies the full-space conditions.
173: This rule gives superlinear asymptotic convergence
174: asls->atol = min(0.5, asls->merit*sqrt(asls->merit));
175: asls->rtol = 0.0;
177: This rule gives quadratic asymptotic convergence
178: asls->atol = min(0.5, asls->merit*asls->merit);
179: asls->rtol = 0.0;
181: Calculate a free and fixed set of variables. The fixed set of
182: variables are those for the d_b is approximately equal to zero.
183: The definition of approximately changes as we approach the solution
184: to the problem.
186: No one rule is guaranteed to work in all cases. The following
187: definition is based on the norm of the Jacobian matrix. If the
188: norm is large, the tolerance becomes smaller. */
189: MatNorm(tao->jacobian,NORM_1,&asls->identifier);
190: asls->identifier = PetscMin(asls->merit, 1e-2) / (1 + asls->identifier);
192: VecSet(asls->t1,-asls->identifier);
193: VecSet(asls->t2, asls->identifier);
195: ISDestroy(&asls->fixed);
196: ISDestroy(&asls->free);
197: VecWhichBetweenOrEqual(asls->t1, asls->db, asls->t2, &asls->fixed);
198: ISComplementVec(asls->fixed,asls->t1, &asls->free);
200: ISGetSize(asls->fixed,&nf);
201: PetscInfo1(tao,"Number of fixed variables: %D\n", nf);
203: /* We now have our partition. Now calculate the direction in the
204: fixed variable space. */
205: TaoVecGetSubVec(asls->ff, asls->fixed, tao->subset_type, 0.0, &asls->r1);
206: TaoVecGetSubVec(asls->da, asls->fixed, tao->subset_type, 1.0, &asls->r2);
207: VecPointwiseDivide(asls->r1,asls->r1,asls->r2);
208: VecSet(tao->stepdirection,0.0);
209: VecISAXPY(tao->stepdirection, asls->fixed, 1.0,asls->r1);
211: /* Our direction in the Fixed Variable Set is fixed. Calculate the
212: information needed for the step in the Free Variable Set. To
213: do this, we need to know the diagonal perturbation and the
214: right hand side. */
216: TaoVecGetSubVec(asls->da, asls->free, tao->subset_type, 0.0, &asls->r1);
217: TaoVecGetSubVec(asls->ff, asls->free, tao->subset_type, 0.0, &asls->r2);
218: TaoVecGetSubVec(asls->db, asls->free, tao->subset_type, 1.0, &asls->r3);
219: VecPointwiseDivide(asls->r1,asls->r1, asls->r3);
220: VecPointwiseDivide(asls->r2,asls->r2, asls->r3);
222: /* r1 is the diagonal perturbation
223: r2 is the right hand side
224: r3 is no longer needed
226: Now need to modify r2 for our direction choice in the fixed
227: variable set: calculate t1 = J*d, take the reduced vector
228: of t1 and modify r2. */
230: MatMult(tao->jacobian, tao->stepdirection, asls->t1);
231: TaoVecGetSubVec(asls->t1,asls->free,tao->subset_type,0.0,&asls->r3);
232: VecAXPY(asls->r2, -1.0, asls->r3);
234: /* Calculate the reduced problem matrix and the direction */
235: TaoMatGetSubMat(tao->jacobian, asls->free, asls->w, tao->subset_type,&asls->J_sub);
236: if (tao->jacobian != tao->jacobian_pre) {
237: TaoMatGetSubMat(tao->jacobian_pre, asls->free, asls->w, tao->subset_type, &asls->Jpre_sub);
238: } else {
239: MatDestroy(&asls->Jpre_sub);
240: asls->Jpre_sub = asls->J_sub;
241: PetscObjectReference((PetscObject)(asls->Jpre_sub));
242: }
243: MatDiagonalSet(asls->J_sub, asls->r1,ADD_VALUES);
244: TaoVecGetSubVec(tao->stepdirection, asls->free, tao->subset_type, 0.0, &asls->dxfree);
245: VecSet(asls->dxfree, 0.0);
247: /* Calculate the reduced direction. (Really negative of Newton
248: direction. Therefore, rest of the code uses -d.) */
249: KSPReset(tao->ksp);
250: KSPSetOperators(tao->ksp, asls->J_sub, asls->Jpre_sub);
251: KSPSolve(tao->ksp, asls->r2, asls->dxfree);
252: KSPGetIterationNumber(tao->ksp,&tao->ksp_its);
253: tao->ksp_tot_its+=tao->ksp_its;
255: /* Add the direction in the free variables back into the real direction. */
256: VecISAXPY(tao->stepdirection, asls->free, 1.0,asls->dxfree);
258: /* Check the projected real direction for descent and if not, use the negative
259: gradient direction. */
260: VecCopy(tao->stepdirection, asls->w);
261: VecScale(asls->w, -1.0);
262: VecBoundGradientProjection(asls->w, tao->solution, tao->XL, tao->XU, asls->w);
263: VecNorm(asls->w, NORM_2, &normd);
264: VecDot(asls->w, asls->dpsi, &innerd);
266: if (innerd >= -asls->delta*PetscPowReal(normd, asls->rho)) {
267: PetscInfo1(tao,"Gradient direction: %5.4e.\n", (double)innerd);
268: PetscInfo1(tao, "Iteration %D: newton direction not descent\n", tao->niter);
269: VecCopy(asls->dpsi, tao->stepdirection);
270: VecDot(asls->dpsi, tao->stepdirection, &innerd);
271: }
273: VecScale(tao->stepdirection, -1.0);
274: innerd = -innerd;
276: /* We now have a correct descent direction. Apply a linesearch to
277: find the new iterate. */
278: TaoLineSearchSetInitialStepLength(tao->linesearch, 1.0);
279: TaoLineSearchApply(tao->linesearch, tao->solution, &psi,asls->dpsi, tao->stepdirection, &t, &ls_reason);
280: VecNorm(asls->dpsi, NORM_2, &ndpsi);
281: }
282: return(0);
283: }
285: /* ---------------------------------------------------------- */
286: /*MC
287: TAOASFLS - Active-set feasible linesearch algorithm for solving
288: complementarity constraints
290: Options Database Keys:
291: + -tao_ssls_delta - descent test fraction
292: - -tao_ssls_rho - descent test power
294: Level: beginner
295: M*/
296: PETSC_EXTERN PetscErrorCode TaoCreate_ASFLS(Tao tao)
297: {
298: TAO_SSLS *asls;
300: const char *armijo_type = TAOLINESEARCHARMIJO;
303: PetscNewLog(tao,&asls);
304: tao->data = (void*)asls;
305: tao->ops->solve = TaoSolve_ASFLS;
306: tao->ops->setup = TaoSetUp_ASFLS;
307: tao->ops->view = TaoView_SSLS;
308: tao->ops->setfromoptions = TaoSetFromOptions_SSLS;
309: tao->ops->destroy = TaoDestroy_ASFLS;
310: tao->subset_type = TAO_SUBSET_SUBVEC;
311: asls->delta = 1e-10;
312: asls->rho = 2.1;
313: asls->fixed = NULL;
314: asls->free = NULL;
315: asls->J_sub = NULL;
316: asls->Jpre_sub = NULL;
317: asls->w = NULL;
318: asls->r1 = NULL;
319: asls->r2 = NULL;
320: asls->r3 = NULL;
321: asls->t1 = NULL;
322: asls->t2 = NULL;
323: asls->dxfree = NULL;
324: asls->identifier = 1e-5;
326: TaoLineSearchCreate(((PetscObject)tao)->comm, &tao->linesearch);
327: PetscObjectIncrementTabLevel((PetscObject)tao->linesearch, (PetscObject)tao, 1);
328: TaoLineSearchSetType(tao->linesearch, armijo_type);
329: TaoLineSearchSetOptionsPrefix(tao->linesearch,tao->hdr.prefix);
330: TaoLineSearchSetFromOptions(tao->linesearch);
332: KSPCreate(((PetscObject)tao)->comm, &tao->ksp);
333: PetscObjectIncrementTabLevel((PetscObject)tao->ksp, (PetscObject)tao, 1);
334: KSPSetOptionsPrefix(tao->ksp,tao->hdr.prefix);
335: KSPSetFromOptions(tao->ksp);
337: /* Override default settings (unless already changed) */
338: if (!tao->max_it_changed) tao->max_it = 2000;
339: if (!tao->max_funcs_changed) tao->max_funcs = 4000;
340: if (!tao->gttol_changed) tao->gttol = 0;
341: if (!tao->grtol_changed) tao->grtol = 0;
342: #if defined(PETSC_USE_REAL_SINGLE)
343: if (!tao->gatol_changed) tao->gatol = 1.0e-6;
344: if (!tao->fmin_changed) tao->fmin = 1.0e-4;
345: #else
346: if (!tao->gatol_changed) tao->gatol = 1.0e-16;
347: if (!tao->fmin_changed) tao->fmin = 1.0e-8;
348: #endif
349: return(0);
350: }