Actual source code: rosenbrock1.c

petsc-3.9.4 2018-09-11
Report Typos and Errors
  1: /* Program usage: mpiexec -n 1 rosenbrock1 [-help] [all TAO options] */

  3: /*  Include "petsctao.h" so we can use TAO solvers.  */
  4:  #include <petsctao.h>

  6: static  char help[] = "This example demonstrates use of the TAO package to \n\
  7: solve an unconstrained minimization problem on a single processor.  We \n\
  8: minimize the extended Rosenbrock function: \n\
  9:    sum_{i=0}^{n/2-1} ( alpha*(x_{2i+1}-x_{2i}^2)^2 + (1-x_{2i})^2 ) \n\
 10: or the chained Rosenbrock function:\n\
 11:    sum_{i=0}^{n-1} alpha*(x_{i+1} - x_i^2)^2 + (1 - x_i)^2\n";

 13: /*T
 14:    Concepts: TAO^Solving an unconstrained minimization problem
 15:    Routines: TaoCreate();
 16:    Routines: TaoSetType(); TaoSetObjectiveAndGradientRoutine();
 17:    Routines: TaoSetHessianRoutine();
 18:    Routines: TaoSetInitialVector();
 19:    Routines: TaoSetFromOptions();
 20:    Routines: TaoSolve();
 21:    Routines: TaoDestroy();
 22:    Processors: 1
 23: T*/




 28: /*
 29:    User-defined application context - contains data needed by the
 30:    application-provided call-back routines that evaluate the function,
 31:    gradient, and hessian.
 32: */
 33: typedef struct {
 34:   PetscInt  n;          /* dimension */
 35:   PetscReal alpha;   /* condition parameter */
 36:   PetscBool chained;
 37: } AppCtx;

 39: /* -------------- User-defined routines ---------- */
 40: PetscErrorCode FormFunctionGradient(Tao,Vec,PetscReal*,Vec,void*);
 41: PetscErrorCode FormHessian(Tao,Vec,Mat,Mat,void*);

 43: int main(int argc,char **argv)
 44: {
 45:   PetscErrorCode     ierr;                  /* used to check for functions returning nonzeros */
 46:   PetscReal          zero=0.0;
 47:   Vec                x;                     /* solution vector */
 48:   Mat                H;
 49:   Tao                tao;                   /* Tao solver context */
 50:   PetscBool          flg;
 51:   PetscMPIInt        size,rank;                  /* number of processes running */
 52:   AppCtx             user;                  /* user-defined application context */

 54:   /* Initialize TAO and PETSc */
 55:   PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
 56:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
 57:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
 58:   if (size >1) SETERRQ(PETSC_COMM_SELF,1,"Incorrect number of processors");

 60:   /* Initialize problem parameters */
 61:   user.n = 2; user.alpha = 99.0; user.chained = PETSC_FALSE;
 62:   /* Check for command line arguments to override defaults */
 63:   PetscOptionsGetInt(NULL,NULL,"-n",&user.n,&flg);
 64:   PetscOptionsGetReal(NULL,NULL,"-alpha",&user.alpha,&flg);
 65:   PetscOptionsGetBool(NULL,NULL,"-chained",&user.chained,&flg);

 67:   /* Allocate vectors for the solution and gradient */
 68:   VecCreateSeq(PETSC_COMM_SELF,user.n,&x);
 69:   MatCreateSeqBAIJ(PETSC_COMM_SELF,2,user.n,user.n,1,NULL,&H);

 71:   /* The TAO code begins here */

 73:   /* Create TAO solver with desired solution method */
 74:   TaoCreate(PETSC_COMM_SELF,&tao);
 75:   TaoSetType(tao,TAOLMVM);

 77:   /* Set solution vec and an initial guess */
 78:   VecSet(x, zero);
 79:   TaoSetInitialVector(tao,x);

 81:   /* Set routines for function, gradient, hessian evaluation */
 82:   TaoSetObjectiveAndGradientRoutine(tao,FormFunctionGradient,&user);
 83:   TaoSetHessianRoutine(tao,H,H,FormHessian,&user);

 85:   /* Check for TAO command line options */
 86:   TaoSetFromOptions(tao);

 88:   /* SOLVE THE APPLICATION */
 89:   TaoSolve(tao);

 91:   TaoDestroy(&tao);
 92:   VecDestroy(&x);
 93:   MatDestroy(&H);

 95:   PetscFinalize();
 96:   return ierr;
 97: }

 99: /* -------------------------------------------------------------------- */
100: /*
101:     FormFunctionGradient - Evaluates the function, f(X), and gradient, G(X).

103:     Input Parameters:
104: .   tao  - the Tao context
105: .   X    - input vector
106: .   ptr  - optional user-defined context, as set by TaoSetFunctionGradient()

108:     Output Parameters:
109: .   G - vector containing the newly evaluated gradient
110: .   f - function value

112:     Note:
113:     Some optimization methods ask for the function and the gradient evaluation
114:     at the same time.  Evaluating both at once may be more efficient that
115:     evaluating each separately.
116: */
117: PetscErrorCode FormFunctionGradient(Tao tao,Vec X,PetscReal *f, Vec G,void *ptr)
118: {
119:   AppCtx            *user = (AppCtx *) ptr;
120:   PetscInt          i,nn=user->n/2;
121:   PetscErrorCode    ierr;
122:   PetscReal         ff=0,t1,t2,alpha=user->alpha;
123:   PetscScalar       *g;
124:   const PetscScalar *x;

127:   /* Get pointers to vector data */
128:   VecGetArrayRead(X,&x);
129:   VecGetArray(G,&g);

131:   /* Compute G(X) */
132:   if (user->chained) {
133:     g[0] = 0;
134:     for (i=0; i<user->n-1; i++) {
135:       t1 = x[i+1] - x[i]*x[i];
136:       ff += PetscSqr(1 - x[i]) + alpha*t1*t1;
137:       g[i] += -2*(1 - x[i]) + 2*alpha*t1*(-2*x[i]);
138:       g[i+1] = 2*alpha*t1;
139:     }
140:   } else {
141:     for (i=0; i<nn; i++){
142:       t1 = x[2*i+1]-x[2*i]*x[2*i]; t2= 1-x[2*i];
143:       ff += alpha*t1*t1 + t2*t2;
144:       g[2*i] = -4*alpha*t1*x[2*i]-2.0*t2;
145:       g[2*i+1] = 2*alpha*t1;
146:     }
147:   }

149:   /* Restore vectors */
150:   VecRestoreArrayRead(X,&x);
151:   VecRestoreArray(G,&g);
152:   *f   = ff;

154:   PetscLogFlops(nn*15);
155:   return(0);
156: }

158: /* ------------------------------------------------------------------- */
159: /*
160:    FormHessian - Evaluates Hessian matrix.

162:    Input Parameters:
163: .  tao   - the Tao context
164: .  x     - input vector
165: .  ptr   - optional user-defined context, as set by TaoSetHessian()

167:    Output Parameters:
168: .  H     - Hessian matrix

170:    Note:  Providing the Hessian may not be necessary.  Only some solvers
171:    require this matrix.
172: */
173: PetscErrorCode FormHessian(Tao tao,Vec X,Mat H, Mat Hpre, void *ptr)
174: {
175:   AppCtx            *user = (AppCtx*)ptr;
176:   PetscErrorCode    ierr;
177:   PetscInt          i, ind[2];
178:   PetscReal         alpha=user->alpha;
179:   PetscReal         v[2][2];
180:   const PetscScalar *x;
181:   PetscBool         assembled;

184:   /* Zero existing matrix entries */
185:   MatAssembled(H,&assembled);
186:   if (assembled){MatZeroEntries(H); }

188:   /* Get a pointer to vector data */
189:   VecGetArrayRead(X,&x);

191:   /* Compute H(X) entries */
192:   if (user->chained) {
193:     MatZeroEntries(H);
194:     for (i=0; i<user->n-1; i++) {
195:       PetscScalar t1 = x[i+1] - x[i]*x[i];
196:       v[0][0] = 2 + 2*alpha*(t1*(-2) - 2*x[i]);
197:       v[0][1] = 2*alpha*(-2*x[i]);
198:       v[1][0] = 2*alpha*(-2*x[i]);
199:       v[1][1] = 2*alpha*t1;
200:       ind[0] = i; ind[1] = i+1;
201:       MatSetValues(H,2,ind,2,ind,v[0],ADD_VALUES);
202:     }
203:   } else {
204:     for (i=0; i<user->n/2; i++){
205:       v[1][1] = 2*alpha;
206:       v[0][0] = -4*alpha*(x[2*i+1]-3*x[2*i]*x[2*i]) + 2;
207:       v[1][0] = v[0][1] = -4.0*alpha*x[2*i];
208:       ind[0]=2*i; ind[1]=2*i+1;
209:       MatSetValues(H,2,ind,2,ind,v[0],INSERT_VALUES);
210:     }
211:   }
212:   VecRestoreArrayRead(X,&x);

214:   /* Assemble matrix */
215:   MatAssemblyBegin(H,MAT_FINAL_ASSEMBLY);
216:   MatAssemblyEnd(H,MAT_FINAL_ASSEMBLY);
217:   PetscLogFlops(9.0*user->n/2.0);
218:   return(0);
219: }


222: /*TEST

224:    build:
225:       requires: !complex

227:    test:
228:       args: -tao_smonitor -tao_type nls
229:       requires: !single

231:    test:
232:       suffix: 2
233:       args: -tao_smonitor -tao_type lmvm -tao_gttol 1.e-2

235:    test:
236:       suffix: 3
237:       args: -tao_smonitor -tao_type ntr
238:       requires: !single

240:    test:
241:       suffix: 4
242:       args: -tao_smonitor -tao_type ntr -tao_mf_hessian -tao_ntr_pc_type none

244: TEST*/