Actual source code: ex11.c

petsc-3.4.5 2014-06-29
  2: static char help[] ="Solves the 2-dim Bratu (SFI - solid fuel ignition) test problem, where\n\
  3: analytic formation of the Jacobian is the default.  \n\
  4: \n\
  5:   Solves the linear systems via 2 level additive Schwarz \n\
  6: \n\
  7: The command line\n\
  8: options are:\n\
  9:   -par <parameter>, where <parameter> indicates the problem's nonlinearity\n\
 10:      problem SFI:  <parameter> = Bratu parameter (0 <= par <= 6.81)\n\
 11:   -Mx <xg>, where <xg> = number of grid points in the x-direction on coarse grid\n\
 12:   -My <yg>, where <yg> = number of grid points in the y-direction on coarse grid\n\n";

 14: /*
 15:     1) Solid Fuel Ignition (SFI) problem.  This problem is modeled by
 16:     the partial differential equation

 18:             -Laplacian u - lambda*exp(u) = 0,  0 < x,y < 1 ,

 20:     with boundary conditions

 22:              u = 0  for  x = 0, x = 1, y = 0, y = 1.

 24:     A finite difference approximation with the usual 5-point stencil
 25:     is used to discretize the boundary value problem to obtain a nonlinear
 26:     system of equations.

 28:    The code has two cases for multilevel solver
 29:      I. the coarse grid Jacobian is computed in parallel
 30:      II. the coarse grid Jacobian is computed sequentially on each processor
 31:    in both cases the coarse problem is SOLVED redundantly.

 33: */

 35: #include <petscsnes.h>
 36: #include <petscdmda.h>

 38: /* User-defined application contexts */

 40: typedef struct {
 41:   PetscInt mx,my;               /* number grid points in x and y direction */
 42:   Vec      localX,localF;       /* local vectors with ghost region */
 43:   DM       da;
 44:   Vec      x,b,r;               /* global vectors */
 45:   Mat      J;                   /* Jacobian on grid */
 46: } GridCtx;

 48: typedef struct {
 49:   PetscReal  param;             /* test problem parameter */
 50:   GridCtx    fine;
 51:   GridCtx    coarse;
 52:   KSP        ksp_coarse;
 53:   KSP        ksp_fine;
 54:   PetscInt   ratio;
 55:   Mat        R;                 /* restriction fine to coarse */
 56:   Vec        Rscale;
 57:   PetscBool  redundant_build;   /* build coarse matrix redundantly */
 58:   Vec        localall;          /* contains entire coarse vector on each processor in NATURAL order*/
 59:   VecScatter tolocalall;        /* maps from parallel "global" coarse vector to localall */
 60:   VecScatter fromlocalall;      /* maps from localall vector back to global coarse vector */
 61: } AppCtx;

 63: #define COARSE_LEVEL 0
 64: #define FINE_LEVEL   1

 66: extern PetscErrorCode FormFunction(SNES,Vec,Vec,void*), FormInitialGuess1(AppCtx*,Vec);
 67: extern PetscErrorCode FormJacobian(SNES,Vec,Mat*,Mat*,MatStructure*,void*);
 68: extern PetscErrorCode FormInterpolation(AppCtx*);

 70: /*
 71:       Mm_ratio - ration of grid lines between fine and coarse grids.
 72: */
 75: int main(int argc, char **argv)
 76: {
 77:   SNES           snes;
 78:   AppCtx         user;
 80:   PetscInt       its, N, n, Nx = PETSC_DECIDE, Ny = PETSC_DECIDE, nlocal,Nlocal;
 81:   PetscMPIInt    size;
 82:   double         bratu_lambda_max = 6.81, bratu_lambda_min = 0.;
 83:   KSP            ksp;
 84:   PC             pc;

 86:   /*
 87:       Initialize PETSc, note that default options in ex11options can be
 88:       overridden at the command line
 89:   */
 90:   PetscInitialize(&argc, &argv,"ex11options",help);

 92:   user.ratio     = 2;
 93:   user.coarse.mx = 5; user.coarse.my = 5; user.param = 6.0;
 94:   PetscOptionsGetInt(NULL,"-Mx",&user.coarse.mx,NULL);
 95:   PetscOptionsGetInt(NULL,"-My",&user.coarse.my,NULL);
 96:   PetscOptionsGetInt(NULL,"-ratio",&user.ratio,NULL);
 97:   user.fine.mx   = user.ratio*(user.coarse.mx-1)+1; user.fine.my = user.ratio*(user.coarse.my-1)+1;

 99:   PetscOptionsHasName(NULL,"-redundant_build",&user.redundant_build);
100:   if (user.redundant_build) {
101:     PetscPrintf(PETSC_COMM_WORLD,"Building coarse Jacobian redundantly\n");
102:   }

104:   PetscPrintf(PETSC_COMM_WORLD,"Coarse grid size %D by %D\n",user.coarse.mx,user.coarse.my);
105:   PetscPrintf(PETSC_COMM_WORLD,"Fine grid size %D by %D\n",user.fine.mx,user.fine.my);

107:   PetscOptionsGetReal(NULL,"-par",&user.param,NULL);
108:   if (user.param >= bratu_lambda_max || user.param < bratu_lambda_min) SETERRQ(PETSC_COMM_SELF,1,"Lambda is out of range");
109:   n = user.fine.mx*user.fine.my; N = user.coarse.mx*user.coarse.my;

111:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
112:   PetscOptionsGetInt(NULL,"-Nx",&Nx,NULL);
113:   PetscOptionsGetInt(NULL,"-Ny",&Ny,NULL);

115:   /* Set up distributed array for fine grid */
116:   DMDACreate2d(PETSC_COMM_WORLD, DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.fine.mx,
117:                       user.fine.my,Nx,Ny,1,1,NULL,NULL,&user.fine.da);
118:   DMCreateGlobalVector(user.fine.da,&user.fine.x);
119:   VecDuplicate(user.fine.x,&user.fine.r);
120:   VecDuplicate(user.fine.x,&user.fine.b);
121:   VecGetLocalSize(user.fine.x,&nlocal);
122:   DMCreateLocalVector(user.fine.da,&user.fine.localX);
123:   VecDuplicate(user.fine.localX,&user.fine.localF);
124:   MatCreateAIJ(PETSC_COMM_WORLD,nlocal,nlocal,n,n,5,NULL,3,NULL,&user.fine.J);

126:   /* Set up distributed array for coarse grid */
127:   DMDACreate2d(PETSC_COMM_WORLD, DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.coarse.mx,
128:                       user.coarse.my,Nx,Ny,1,1,NULL,NULL,&user.coarse.da);
129:   DMCreateGlobalVector(user.coarse.da,&user.coarse.x);
130:   VecDuplicate(user.coarse.x,&user.coarse.b);
131:   if (user.redundant_build) {
132:     /* Create scatter from parallel global numbering to redundant with natural ordering */
133:     DMDAGlobalToNaturalAllCreate(user.coarse.da,&user.tolocalall);
134:     DMDANaturalAllToGlobalCreate(user.coarse.da,&user.fromlocalall);
135:     VecCreateSeq(PETSC_COMM_SELF,N,&user.localall);
136:     /* Create sequential matrix to hold entire coarse grid Jacobian on each processor */
137:     MatCreateSeqAIJ(PETSC_COMM_SELF,N,N,5,NULL,&user.coarse.J);
138:   } else {
139:     VecGetLocalSize(user.coarse.x,&Nlocal);
140:     DMCreateLocalVector(user.coarse.da,&user.coarse.localX);
141:     VecDuplicate(user.coarse.localX,&user.coarse.localF);
142:     /* We will compute the coarse Jacobian in parallel */
143:     MatCreateAIJ(PETSC_COMM_WORLD,Nlocal,Nlocal,N,N,5,NULL,3,NULL,&user.coarse.J);
144:   }

146:   /* Create nonlinear solver */
147:   SNESCreate(PETSC_COMM_WORLD,&snes);

149:   /* provide user function and Jacobian */
150:   SNESSetFunction(snes,user.fine.b,FormFunction,&user);
151:   SNESSetJacobian(snes,user.fine.J,user.fine.J,FormJacobian,&user);

153:   /* set two level additive Schwarz preconditioner */
154:   SNESGetKSP(snes,&ksp);
155:   KSPGetPC(ksp,&pc);
156:   PCSetType(pc,PCMG);
157:   PCMGSetLevels(pc,2,NULL);
158:   PCMGSetType(pc,PC_MG_ADDITIVE);

160:   /* always solve the coarse problem redundantly with direct LU solver */
161:   PetscOptionsSetValue("-coarse_pc_type","redundant");
162:   PetscOptionsSetValue("-coarse_redundant_pc_type","lu");
163:   PetscOptionsSetValue("-coarse_redundant_ksp_type","preonly");

165:   /* Create coarse level */
166:   PCMGGetCoarseSolve(pc,&user.ksp_coarse);
167:   KSPSetOptionsPrefix(user.ksp_coarse,"coarse_");
168:   KSPSetFromOptions(user.ksp_coarse);
169:   KSPSetOperators(user.ksp_coarse,user.coarse.J,user.coarse.J,DIFFERENT_NONZERO_PATTERN);
170:   PCMGSetX(pc,COARSE_LEVEL,user.coarse.x);
171:   PCMGSetRhs(pc,COARSE_LEVEL,user.coarse.b);
172:   if (user.redundant_build) {
173:     PC rpc;
174:     KSPGetPC(user.ksp_coarse,&rpc);
175:     PCRedundantSetScatter(rpc,user.tolocalall,user.fromlocalall);
176:   }

178:   /* Create fine level */
179:   PCMGGetSmoother(pc,FINE_LEVEL,&user.ksp_fine);
180:   KSPSetOptionsPrefix(user.ksp_fine,"fine_");
181:   KSPSetFromOptions(user.ksp_fine);
182:   KSPSetOperators(user.ksp_fine,user.fine.J,user.fine.J,DIFFERENT_NONZERO_PATTERN);
183:   PCMGSetR(pc,FINE_LEVEL,user.fine.r);
184:   PCMGSetResidual(pc,FINE_LEVEL,NULL,user.fine.J);

186:   /* Create interpolation between the levels */
187:   FormInterpolation(&user);
188:   PCMGSetInterpolation(pc,FINE_LEVEL,user.R);
189:   PCMGSetRestriction(pc,FINE_LEVEL,user.R);

191:   /* Set options, then solve nonlinear system */
192:   SNESSetFromOptions(snes);
193:   FormInitialGuess1(&user,user.fine.x);
194:   SNESSolve(snes,NULL,user.fine.x);
195:   SNESGetIterationNumber(snes,&its);
196:   PetscPrintf(PETSC_COMM_WORLD,"Number of SNES iterations = %D\n", its);

198:   /* Free data structures */
199:   if (user.redundant_build) {
200:     VecScatterDestroy(&user.tolocalall);
201:     VecScatterDestroy(&user.fromlocalall);
202:     VecDestroy(&user.localall);
203:   } else {
204:     VecDestroy(&user.coarse.localX);
205:     VecDestroy(&user.coarse.localF);
206:   }

208:   MatDestroy(&user.fine.J);
209:   VecDestroy(&user.fine.x);
210:   VecDestroy(&user.fine.r);
211:   VecDestroy(&user.fine.b);
212:   DMDestroy(&user.fine.da);
213:   VecDestroy(&user.fine.localX);
214:   VecDestroy(&user.fine.localF);

216:   MatDestroy(&user.coarse.J);
217:   VecDestroy(&user.coarse.x);
218:   VecDestroy(&user.coarse.b);
219:   DMDestroy(&user.coarse.da);

221:   SNESDestroy(&snes);
222:   MatDestroy(&user.R);
223:   VecDestroy(&user.Rscale);
224:   PetscFinalize();

226:   return 0;
227: } /* --------------------  Form initial approximation ----------------- */
230: PetscErrorCode FormInitialGuess1(AppCtx *user,Vec X)
231: {
232:   PetscInt       i, j, row, mx, my, xs, ys, xm, ym, Xm, Ym, Xs, Ys;
234:   double         one = 1.0, lambda, temp1, temp, hx, hy;
235:   /* double hxdhy, hydhx,sc; */
236:   PetscScalar *x;
237:   Vec         localX = user->fine.localX;

239:   mx = user->fine.mx;       my = user->fine.my;            lambda = user->param;
240:   hx = one/(double)(mx-1);  hy = one/(double)(my-1);
241:   /* sc = hx*hy*lambda;        hxdhy = hx/hy;            hydhx = hy/hx; */

243:   temp1 = lambda/(lambda + one);

245:   /* Get ghost points */
246:   DMDAGetCorners(user->fine.da,&xs,&ys,0,&xm,&ym,0);
247:   DMDAGetGhostCorners(user->fine.da,&Xs,&Ys,0,&Xm,&Ym,0);
248:   VecGetArray(localX,&x);

250:   /* Compute initial guess */
251:   for (j=ys; j<ys+ym; j++) {
252:     temp = (double)(PetscMin(j,my-j-1))*hy;
253:     for (i=xs; i<xs+xm; i++) {
254:       row = i - Xs + (j - Ys)*Xm;
255:       if (i == 0 || j == 0 || i == mx-1 || j == my-1) {
256:         x[row] = 0.0;
257:         continue;
258:       }
259:       x[row] = temp1*PetscSqrtReal(PetscMin((double)(PetscMin(i,mx-i-1))*hx,temp));
260:     }
261:   }
262:   VecRestoreArray(localX,&x);

264:   /* Insert values into global vector */
265:   DMLocalToGlobalBegin(user->fine.da,localX,INSERT_VALUES,X);
266:   DMLocalToGlobalEnd(user->fine.da,localX,INSERT_VALUES,X);
267:   return 0;
268: }

270: /* --------------------  Evaluate Function F(x) --------------------- */
273: PetscErrorCode FormFunction(SNES snes,Vec X,Vec F,void *ptr)
274: {
275:   AppCtx         *user = (AppCtx*) ptr;
276:   PetscInt       i, j, row, mx, my, xs, ys, xm, ym, Xs, Ys, Xm, Ym;
278:   double         two = 2.0, one = 1.0, lambda,hx, hy, hxdhy, hydhx,sc;
279:   PetscScalar    u, uxx, uyy, *x,*f;
280:   Vec            localX = user->fine.localX, localF = user->fine.localF;

282:   mx = user->fine.mx;       my = user->fine.my;       lambda = user->param;
283:   hx = one/(double)(mx-1);  hy = one/(double)(my-1);
284:   sc = hx*hy*lambda;        hxdhy = hx/hy;            hydhx = hy/hx;

286:   /* Get ghost points */
287:   DMGlobalToLocalBegin(user->fine.da,X,INSERT_VALUES,localX);
288:   DMGlobalToLocalEnd(user->fine.da,X,INSERT_VALUES,localX);
289:   DMDAGetCorners(user->fine.da,&xs,&ys,0,&xm,&ym,0);
290:   DMDAGetGhostCorners(user->fine.da,&Xs,&Ys,0,&Xm,&Ym,0);
291:   VecGetArray(localX,&x);
292:   VecGetArray(localF,&f);

294:   /* Evaluate function */
295:   for (j=ys; j<ys+ym; j++) {
296:     row = (j - Ys)*Xm + xs - Xs - 1;
297:     for (i=xs; i<xs+xm; i++) {
298:       row++;
299:       if (i > 0 && i < mx-1 && j > 0 && j < my-1) {
300:         u      = x[row];
301:         uxx    = (two*u - x[row-1] - x[row+1])*hydhx;
302:         uyy    = (two*u - x[row-Xm] - x[row+Xm])*hxdhy;
303:         f[row] = uxx + uyy - sc*exp(u);
304:       } else if ((i > 0 && i < mx-1) || (j > 0 && j < my-1)) {
305:         f[row] = .5*two*(hydhx + hxdhy)*x[row];
306:       } else {
307:         f[row] = .25*two*(hydhx + hxdhy)*x[row];
308:       }
309:     }
310:   }
311:   VecRestoreArray(localX,&x);
312:   VecRestoreArray(localF,&f);

314:   /* Insert values into global vector */
315:   DMLocalToGlobalBegin(user->fine.da,localF,INSERT_VALUES,F);
316:   DMLocalToGlobalEnd(user->fine.da,localF,INSERT_VALUES,F);
317:   PetscLogFlops(11.0*ym*xm);
318:   return 0;
319: }

321: /*
322:         Computes the part of the Jacobian associated with this processor
323: */
326: PetscErrorCode FormJacobian_Grid(AppCtx *user,GridCtx *grid,Vec X, Mat *J,Mat *B)
327: {
328:   Mat            jac = *J;
330:   PetscInt       i, j, row, mx, my, xs, ys, xm, ym, Xs, Ys, Xm, Ym, col[5], nloc, *ltog, grow;
331:   PetscScalar    two    = 2.0, one = 1.0, lambda, v[5], hx, hy, hxdhy, hydhx, sc, *x, value;
332:   Vec            localX = grid->localX;

334:   mx = grid->mx;            my = grid->my;            lambda = user->param;
335:   hx = one/(double)(mx-1);  hy = one/(double)(my-1);
336:   sc = hx*hy;               hxdhy = hx/hy;            hydhx = hy/hx;

338:   /* Get ghost points */
339:   DMGlobalToLocalBegin(grid->da,X,INSERT_VALUES,localX);
340:   DMGlobalToLocalEnd(grid->da,X,INSERT_VALUES,localX);
341:   DMDAGetCorners(grid->da,&xs,&ys,0,&xm,&ym,0);
342:   DMDAGetGhostCorners(grid->da,&Xs,&Ys,0,&Xm,&Ym,0);
343:   DMDAGetGlobalIndices(grid->da,&nloc,&ltog);
344:   VecGetArray(localX,&x);

346:   /* Evaluate Jacobian of function */
347:   for (j=ys; j<ys+ym; j++) {
348:     row = (j - Ys)*Xm + xs - Xs - 1;
349:     for (i=xs; i<xs+xm; i++) {
350:       row++;
351:       grow = ltog[row];
352:       if (i > 0 && i < mx-1 && j > 0 && j < my-1) {
353:         v[0] = -hxdhy; col[0] = ltog[row - Xm];
354:         v[1] = -hydhx; col[1] = ltog[row - 1];
355:         v[2] = two*(hydhx + hxdhy) - sc*lambda*exp(x[row]); col[2] = grow;
356:         v[3] = -hydhx; col[3] = ltog[row + 1];
357:         v[4] = -hxdhy; col[4] = ltog[row + Xm];
358:         MatSetValues(jac,1,&grow,5,col,v,INSERT_VALUES);
359:       } else if ((i > 0 && i < mx-1) || (j > 0 && j < my-1)) {
360:         value = .5*two*(hydhx + hxdhy);
361:         MatSetValues(jac,1,&grow,1,&grow,&value,INSERT_VALUES);
362:       } else {
363:         value = .25*two*(hydhx + hxdhy);
364:         MatSetValues(jac,1,&grow,1,&grow,&value,INSERT_VALUES);
365:       }
366:     }
367:   }
368:   MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);
369:   VecRestoreArray(localX,&x);
370:   MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);

372:   return 0;
373: }

375: /*
376:         Computes the ENTIRE Jacobian associated with the ENTIRE grid sequentially
377:     This is for generating the coarse grid redundantly.

379:           This is BAD code duplication, since the bulk of this routine is the
380:     same as the routine above

382:        Note the numbering of the rows/columns is the NATURAL numbering
383: */
386: PetscErrorCode FormJacobian_Coarse(AppCtx *user,GridCtx *grid,Vec X, Mat *J,Mat *B)
387: {
388:   Mat            jac = *J;
390:   PetscInt       i, j, row, mx, my, col[5];
391:   PetscScalar    two = 2.0, one = 1.0, lambda, v[5], hx, hy, hxdhy, hydhx, sc, *x, value;

393:   mx = grid->mx;            my = grid->my;            lambda = user->param;
394:   hx = one/(double)(mx-1);  hy = one/(double)(my-1);
395:   sc = hx*hy;               hxdhy = hx/hy;            hydhx = hy/hx;

397:   VecGetArray(X,&x);

399:   /* Evaluate Jacobian of function */
400:   for (j=0; j<my; j++) {
401:     row = j*mx - 1;
402:     for (i=0; i<mx; i++) {
403:       row++;
404:       if (i > 0 && i < mx-1 && j > 0 && j < my-1) {
405:         v[0] = -hxdhy; col[0] = row - mx;
406:         v[1] = -hydhx; col[1] = row - 1;
407:         v[2] = two*(hydhx + hxdhy) - sc*lambda*exp(x[row]); col[2] = row;
408:         v[3] = -hydhx; col[3] = row + 1;
409:         v[4] = -hxdhy; col[4] = row + mx;
410:         MatSetValues(jac,1,&row,5,col,v,INSERT_VALUES);
411:       } else if ((i > 0 && i < mx-1) || (j > 0 && j < my-1)) {
412:         value = .5*two*(hydhx + hxdhy);
413:         MatSetValues(jac,1,&row,1,&row,&value,INSERT_VALUES);
414:       } else {
415:         value = .25*two*(hydhx + hxdhy);
416:         MatSetValues(jac,1,&row,1,&row,&value,INSERT_VALUES);
417:       }
418:     }
419:   }
420:   MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);
421:   VecRestoreArray(X,&x);
422:   MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);

424:   return 0;
425: }

427: /* --------------------  Evaluate Jacobian F'(x) --------------------- */
430: PetscErrorCode FormJacobian(SNES snes,Vec X,Mat *J,Mat *B,MatStructure *flag,void *ptr)
431: {
432:   AppCtx         *user = (AppCtx*) ptr;
434:   KSP            ksp;
435:   PC             pc;
436:   PetscBool      ismg;

438:   *flag = SAME_NONZERO_PATTERN;
439:   FormJacobian_Grid(user,&user->fine,X,J,B);

441:   /* create coarse grid jacobian for preconditioner */
442:   SNESGetKSP(snes,&ksp);
443:   KSPGetPC(ksp,&pc);

445:   PetscObjectTypeCompare((PetscObject)pc,PCMG,&ismg);
446:   if (ismg) {

448:     KSPSetOperators(user->ksp_fine,user->fine.J,user->fine.J,SAME_NONZERO_PATTERN);

450:     /* restrict X to coarse grid */
451:     MatMult(user->R,X,user->coarse.x);
452:     VecPointwiseMult(user->coarse.x,user->coarse.x,user->Rscale);

454:     /* form Jacobian on coarse grid */
455:     if (user->redundant_build) {
456:       /* get copy of coarse X onto each processor */
457:       VecScatterBegin(user->tolocalall,user->coarse.x,user->localall,INSERT_VALUES,SCATTER_FORWARD);
458:       VecScatterEnd(user->tolocalall,user->coarse.x,user->localall,INSERT_VALUES,SCATTER_FORWARD);
459:       FormJacobian_Coarse(user,&user->coarse,user->localall,&user->coarse.J,&user->coarse.J);

461:     } else {
462:       /* coarse grid Jacobian computed in parallel */
463:       FormJacobian_Grid(user,&user->coarse,user->coarse.x,&user->coarse.J,&user->coarse.J);
464:     }
465:     KSPSetOperators(user->ksp_coarse,user->coarse.J,user->coarse.J,SAME_NONZERO_PATTERN);
466:   }

468:   return 0;
469: }


474: /*
475:       Forms the interpolation (and restriction) operator from
476: coarse grid to fine.
477: */
478: PetscErrorCode FormInterpolation(AppCtx *user)
479: {
481:   PetscInt       i,j,i_start,m_fine,j_start,m,n,*idx;
482:   PetscInt       m_ghost,n_ghost,*idx_c,m_ghost_c,n_ghost_c,m_coarse;
483:   PetscInt       row,i_start_ghost,j_start_ghost,cols[4], m_c;
484:   PetscInt       nc,ratio = user->ratio,m_c_local,m_fine_local;
485:   PetscInt       i_c,j_c,i_start_c,j_start_c,n_c,i_start_ghost_c,j_start_ghost_c,col;
486:   PetscScalar    v[4],x,y, one = 1.0;
487:   Mat            mat;
488:   Vec            Rscale;

490:   DMDAGetCorners(user->fine.da,&i_start,&j_start,0,&m,&n,0);
491:   DMDAGetGhostCorners(user->fine.da,&i_start_ghost,&j_start_ghost,0,&m_ghost,&n_ghost,0);
492:   DMDAGetGlobalIndices(user->fine.da,NULL,&idx);

494:   DMDAGetCorners(user->coarse.da,&i_start_c,&j_start_c,0,&m_c,&n_c,0);
495:   DMDAGetGhostCorners(user->coarse.da,&i_start_ghost_c,&j_start_ghost_c,0,&m_ghost_c,&n_ghost_c,0);
496:   DMDAGetGlobalIndices(user->coarse.da,NULL,&idx_c);

498:   /* create interpolation matrix */
499:   VecGetLocalSize(user->fine.x,&m_fine_local);
500:   VecGetLocalSize(user->coarse.x,&m_c_local);
501:   VecGetSize(user->fine.x,&m_fine);
502:   VecGetSize(user->coarse.x,&m_coarse);
503:   MatCreateAIJ(PETSC_COMM_WORLD,m_fine_local,m_c_local,m_fine,m_coarse,
504:                       5,0,3,0,&mat);

506:   /* loop over local fine grid nodes setting interpolation for those*/
507:   for (j=j_start; j<j_start+n; j++) {
508:     for (i=i_start; i<i_start+m; i++) {
509:       /* convert to local "natural" numbering and then to PETSc global numbering */
510:       row = idx[m_ghost*(j-j_start_ghost) + (i-i_start_ghost)];

512:       i_c = (i/ratio);    /* coarse grid node to left of fine grid node */
513:       j_c = (j/ratio);    /* coarse grid node below fine grid node */

515:       /*
516:          Only include those interpolation points that are truly
517:          nonzero. Note this is very important for final grid lines
518:          in x and y directions; since they have no right/top neighbors
519:       */
520:       x  = ((double)(i - i_c*ratio))/((double)ratio);
521:       y  = ((double)(j - j_c*ratio))/((double)ratio);
522:       nc = 0;
523:       /* one left and below; or we are right on it */
524:       if (j_c < j_start_ghost_c || j_c > j_start_ghost_c+n_ghost_c) SETERRQ3(PETSC_COMM_SELF,1,"Sorry j %D %D %D",j_c,j_start_ghost_c,j_start_ghost_c+n_ghost_c);
525:       if (i_c < i_start_ghost_c || i_c > i_start_ghost_c+m_ghost_c) SETERRQ3(PETSC_COMM_SELF,1,"Sorry i %D %D %D",i_c,i_start_ghost_c,i_start_ghost_c+m_ghost_c);
526:       col      = m_ghost_c*(j_c-j_start_ghost_c) + (i_c-i_start_ghost_c);
527:       cols[nc] = idx_c[col];
528:       v[nc++]  = x*y - x - y + 1.0;
529:       /* one right and below */
530:       if (i_c*ratio != i) {
531:         cols[nc] = idx_c[col+1];
532:         v[nc++]  = -x*y + x;
533:       }
534:       /* one left and above */
535:       if (j_c*ratio != j) {
536:         cols[nc] = idx_c[col+m_ghost_c];
537:         v[nc++]  = -x*y + y;
538:       }
539:       /* one right and above */
540:       if (j_c*ratio != j && i_c*ratio != i) {
541:         cols[nc] = idx_c[col+m_ghost_c+1];
542:         v[nc++]  = x*y;
543:       }
544:       MatSetValues(mat,1,&row,nc,cols,v,INSERT_VALUES);
545:     }
546:   }
547:   MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);
548:   MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);

550:   VecDuplicate(user->coarse.x,&Rscale);
551:   VecSet(user->fine.x,one);
552:   MatMultTranspose(mat,user->fine.x,Rscale);
553:   VecReciprocal(Rscale);
554:   user->Rscale = Rscale;
555:   user->R      = mat;
556:   return 0;
557: }