Actual source code: dainterp.c

petsc-3.8.4 2018-03-24
Report Typos and Errors

  2: /*
  3:   Code for interpolating between grids represented by DMDAs
  4: */

  6: /*
  7:       For linear elements there are two branches of code to compute the interpolation. They should compute the same results but may not. The "new version" does
  8:    not work for periodic domains, the old does. Change NEWVERSION to 1 to compile in the new version. Eventually when we are sure the two produce identical results
  9:    we will remove/merge the new version. Based on current tests, these both produce the same results. We are leaving NEWVERSION for now in the code since some
 10:    consider it cleaner, but old version is turned on since it handles periodic case.
 11: */
 12: #define NEWVERSION 0

 14:  #include <petsc/private/dmdaimpl.h>

 16: /*@
 17:     DMCreateInterpolationScale - Forms L = R*1/diag(R*1) - L.*v is like a coarse grid average of the
 18:       nearby fine grid points.

 20:   Input Parameters:
 21: +      dac - DM that defines a coarse mesh
 22: .      daf - DM that defines a fine mesh
 23: -      mat - the restriction (or interpolation operator) from fine to coarse

 25:   Output Parameter:
 26: .    scale - the scaled vector

 28:   Level: developer

 30: .seealso: DMCreateInterpolation()

 32: @*/
 33: PetscErrorCode  DMCreateInterpolationScale(DM dac,DM daf,Mat mat,Vec *scale)
 34: {
 36:   Vec            fine;
 37:   PetscScalar    one = 1.0;

 40:   DMCreateGlobalVector(daf,&fine);
 41:   DMCreateGlobalVector(dac,scale);
 42:   VecSet(fine,one);
 43:   MatRestrict(mat,fine,*scale);
 44:   VecDestroy(&fine);
 45:   VecReciprocal(*scale);
 46:   return(0);
 47: }

 49: PetscErrorCode DMCreateInterpolation_DA_1D_Q1(DM dac,DM daf,Mat *A)
 50: {
 51:   PetscErrorCode         ierr;
 52:   PetscInt               i,i_start,m_f,Mx;
 53:   const PetscInt         *idx_f,*idx_c;
 54:   PetscInt               m_ghost,m_ghost_c;
 55:   PetscInt               row,col,i_start_ghost,mx,m_c,nc,ratio;
 56:   PetscInt               i_c,i_start_c,i_start_ghost_c,cols[2],dof;
 57:   PetscScalar            v[2],x;
 58:   Mat                    mat;
 59:   DMBoundaryType         bx;
 60:   ISLocalToGlobalMapping ltog_f,ltog_c;


 64:   DMDAGetInfo(dac,0,&Mx,0,0,0,0,0,0,0,&bx,0,0,0);
 65:   DMDAGetInfo(daf,0,&mx,0,0,0,0,0,&dof,0,0,0,0,0);
 66:   if (bx == DM_BOUNDARY_PERIODIC) {
 67:     ratio = mx/Mx;
 68:     if (ratio*Mx != mx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: mx/Mx  must be integer: mx %D Mx %D",mx,Mx);
 69:   } else {
 70:     ratio = (mx-1)/(Mx-1);
 71:     if (ratio*(Mx-1) != mx-1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: (mx - 1)/(Mx - 1) must be integer: mx %D Mx %D",mx,Mx);
 72:   }

 74:   DMDAGetCorners(daf,&i_start,0,0,&m_f,0,0);
 75:   DMDAGetGhostCorners(daf,&i_start_ghost,0,0,&m_ghost,0,0);
 76:   DMGetLocalToGlobalMapping(daf,&ltog_f);
 77:   ISLocalToGlobalMappingGetBlockIndices(ltog_f,&idx_f);

 79:   DMDAGetCorners(dac,&i_start_c,0,0,&m_c,0,0);
 80:   DMDAGetGhostCorners(dac,&i_start_ghost_c,0,0,&m_ghost_c,0,0);
 81:   DMGetLocalToGlobalMapping(dac,&ltog_c);
 82:   ISLocalToGlobalMappingGetBlockIndices(ltog_c,&idx_c);

 84:   /* create interpolation matrix */
 85:   MatCreate(PetscObjectComm((PetscObject)dac),&mat);
 86:   MatSetSizes(mat,m_f,m_c,mx,Mx);
 87:   MatSetType(mat,MATAIJ);
 88:   MatSeqAIJSetPreallocation(mat,2,NULL);
 89:   MatMPIAIJSetPreallocation(mat,2,NULL,1,NULL);

 91:   /* loop over local fine grid nodes setting interpolation for those*/
 92:   if (!NEWVERSION) {

 94:     for (i=i_start; i<i_start+m_f; i++) {
 95:       /* convert to local "natural" numbering and then to PETSc global numbering */
 96:       row = idx_f[i-i_start_ghost];

 98:       i_c = (i/ratio);    /* coarse grid node to left of fine grid node */
 99:       if (i_c < i_start_ghost_c) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
100:                                           i_start %D i_c %D i_start_ghost_c %D",i_start,i_c,i_start_ghost_c);

102:       /*
103:        Only include those interpolation points that are truly
104:        nonzero. Note this is very important for final grid lines
105:        in x direction; since they have no right neighbor
106:        */
107:       x  = ((PetscReal)(i - i_c*ratio))/((PetscReal)ratio);
108:       nc = 0;
109:       /* one left and below; or we are right on it */
110:       col      = (i_c-i_start_ghost_c);
111:       cols[nc] = idx_c[col];
112:       v[nc++]  = -x + 1.0;
113:       /* one right? */
114:       if (i_c*ratio != i) {
115:         cols[nc] = idx_c[col+1];
116:         v[nc++]  = x;
117:       }
118:       MatSetValues(mat,1,&row,nc,cols,v,INSERT_VALUES);
119:     }

121:   } else {
122:     PetscScalar *xi;
123:     PetscInt    li,nxi,n;
124:     PetscScalar Ni[2];

126:     /* compute local coordinate arrays */
127:     nxi  = ratio + 1;
128:     PetscMalloc1(nxi,&xi);
129:     for (li=0; li<nxi; li++) {
130:       xi[li] = -1.0 + (PetscScalar)li*(2.0/(PetscScalar)(nxi-1));
131:     }

133:     for (i=i_start; i<i_start+m_f; i++) {
134:       /* convert to local "natural" numbering and then to PETSc global numbering */
135:       row = idx_f[(i-i_start_ghost)];

137:       i_c = (i/ratio);    /* coarse grid node to left of fine grid node */
138:       if (i_c < i_start_ghost_c) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
139:                                           i_start %D i_c %D i_start_ghost_c %D",i_start,i_c,i_start_ghost_c);

141:       /* remainders */
142:       li = i - ratio * (i/ratio);
143:       if (i==mx-1) li = nxi-1;

145:       /* corners */
146:       col     = (i_c-i_start_ghost_c);
147:       cols[0] = idx_c[col];
148:       Ni[0]   = 1.0;
149:       if ((li==0) || (li==nxi-1)) {
150:         MatSetValue(mat,row,cols[0],Ni[0],INSERT_VALUES);
151:         continue;
152:       }

154:       /* edges + interior */
155:       /* remainders */
156:       if (i==mx-1) i_c--;

158:       col     = (i_c-i_start_ghost_c);
159:       cols[0] = idx_c[col]; /* one left and below; or we are right on it */
160:       cols[1] = idx_c[col+1];

162:       Ni[0] = 0.5*(1.0-xi[li]);
163:       Ni[1] = 0.5*(1.0+xi[li]);
164:       for (n=0; n<2; n++) {
165:         if (PetscAbsScalar(Ni[n])<1.0e-32) cols[n]=-1;
166:       }
167:       MatSetValues(mat,1,&row,2,cols,Ni,INSERT_VALUES);
168:     }
169:     PetscFree(xi);
170:   }
171:   ISLocalToGlobalMappingRestoreBlockIndices(ltog_f,&idx_f);
172:   ISLocalToGlobalMappingRestoreBlockIndices(ltog_c,&idx_c);
173:   MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);
174:   MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);
175:   MatCreateMAIJ(mat,dof,A);
176:   MatDestroy(&mat);
177:   return(0);
178: }

180: PetscErrorCode DMCreateInterpolation_DA_1D_Q0(DM dac,DM daf,Mat *A)
181: {
182:   PetscErrorCode         ierr;
183:   PetscInt               i,i_start,m_f,Mx;
184:   const PetscInt         *idx_f,*idx_c;
185:   ISLocalToGlobalMapping ltog_f,ltog_c;
186:   PetscInt               m_ghost,m_ghost_c;
187:   PetscInt               row,col,i_start_ghost,mx,m_c,nc,ratio;
188:   PetscInt               i_c,i_start_c,i_start_ghost_c,cols[2],dof;
189:   PetscScalar            v[2],x;
190:   Mat                    mat;
191:   DMBoundaryType         bx;

194:   DMDAGetInfo(dac,0,&Mx,0,0,0,0,0,0,0,&bx,0,0,0);
195:   DMDAGetInfo(daf,0,&mx,0,0,0,0,0,&dof,0,0,0,0,0);
196:   if (bx == DM_BOUNDARY_PERIODIC) {
197:     ratio = mx/Mx;
198:     if (ratio*Mx != mx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: mx/Mx  must be integer: mx %D Mx %D",mx,Mx);
199:   } else {
200:     ratio = (mx-1)/(Mx-1);
201:     if (ratio*(Mx-1) != mx-1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: (mx - 1)/(Mx - 1) must be integer: mx %D Mx %D",mx,Mx);
202:   }

204:   DMDAGetCorners(daf,&i_start,0,0,&m_f,0,0);
205:   DMDAGetGhostCorners(daf,&i_start_ghost,0,0,&m_ghost,0,0);
206:   DMGetLocalToGlobalMapping(daf,&ltog_f);
207:   ISLocalToGlobalMappingGetBlockIndices(ltog_f,&idx_f);

209:   DMDAGetCorners(dac,&i_start_c,0,0,&m_c,0,0);
210:   DMDAGetGhostCorners(dac,&i_start_ghost_c,0,0,&m_ghost_c,0,0);
211:   DMGetLocalToGlobalMapping(dac,&ltog_c);
212:   ISLocalToGlobalMappingGetBlockIndices(ltog_c,&idx_c);

214:   /* create interpolation matrix */
215:   MatCreate(PetscObjectComm((PetscObject)dac),&mat);
216:   MatSetSizes(mat,m_f,m_c,mx,Mx);
217:   MatSetType(mat,MATAIJ);
218:   MatSeqAIJSetPreallocation(mat,2,NULL);
219:   MatMPIAIJSetPreallocation(mat,2,NULL,0,NULL);

221:   /* loop over local fine grid nodes setting interpolation for those*/
222:   for (i=i_start; i<i_start+m_f; i++) {
223:     /* convert to local "natural" numbering and then to PETSc global numbering */
224:     row = idx_f[(i-i_start_ghost)];

226:     i_c = (i/ratio);    /* coarse grid node to left of fine grid node */

228:     /*
229:          Only include those interpolation points that are truly
230:          nonzero. Note this is very important for final grid lines
231:          in x direction; since they have no right neighbor
232:     */
233:     x  = ((PetscReal)(i - i_c*ratio))/((PetscReal)ratio);
234:     nc = 0;
235:     /* one left and below; or we are right on it */
236:     col      = (i_c-i_start_ghost_c);
237:     cols[nc] = idx_c[col];
238:     v[nc++]  = -x + 1.0;
239:     /* one right? */
240:     if (i_c*ratio != i) {
241:       cols[nc] = idx_c[col+1];
242:       v[nc++]  = x;
243:     }
244:     MatSetValues(mat,1,&row,nc,cols,v,INSERT_VALUES);
245:   }
246:   ISLocalToGlobalMappingRestoreBlockIndices(ltog_f,&idx_f);
247:   ISLocalToGlobalMappingRestoreBlockIndices(ltog_c,&idx_c);
248:   MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);
249:   MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);
250:   MatCreateMAIJ(mat,dof,A);
251:   MatDestroy(&mat);
252:   PetscLogFlops(5.0*m_f);
253:   return(0);
254: }

256: PetscErrorCode DMCreateInterpolation_DA_2D_Q1(DM dac,DM daf,Mat *A)
257: {
258:   PetscErrorCode         ierr;
259:   PetscInt               i,j,i_start,j_start,m_f,n_f,Mx,My,dof;
260:   const PetscInt         *idx_c,*idx_f;
261:   ISLocalToGlobalMapping ltog_f,ltog_c;
262:   PetscInt               m_ghost,n_ghost,m_ghost_c,n_ghost_c,*dnz,*onz;
263:   PetscInt               row,col,i_start_ghost,j_start_ghost,cols[4],mx,m_c,my,nc,ratioi,ratioj;
264:   PetscInt               i_c,j_c,i_start_c,j_start_c,n_c,i_start_ghost_c,j_start_ghost_c,col_shift,col_scale;
265:   PetscMPIInt            size_c,size_f,rank_f;
266:   PetscScalar            v[4],x,y;
267:   Mat                    mat;
268:   DMBoundaryType         bx,by;

271:   DMDAGetInfo(dac,0,&Mx,&My,0,0,0,0,0,0,&bx,&by,0,0);
272:   DMDAGetInfo(daf,0,&mx,&my,0,0,0,0,&dof,0,0,0,0,0);
273:   if (bx == DM_BOUNDARY_PERIODIC) {
274:     ratioi = mx/Mx;
275:     if (ratioi*Mx != mx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: mx/Mx  must be integer: mx %D Mx %D",mx,Mx);
276:   } else {
277:     ratioi = (mx-1)/(Mx-1);
278:     if (ratioi*(Mx-1) != mx-1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: (mx - 1)/(Mx - 1) must be integer: mx %D Mx %D",mx,Mx);
279:   }
280:   if (by == DM_BOUNDARY_PERIODIC) {
281:     ratioj = my/My;
282:     if (ratioj*My != my) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: my/My  must be integer: my %D My %D",my,My);
283:   } else {
284:     ratioj = (my-1)/(My-1);
285:     if (ratioj*(My-1) != my-1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: (my - 1)/(My - 1) must be integer: my %D My %D",my,My);
286:   }


289:   DMDAGetCorners(daf,&i_start,&j_start,0,&m_f,&n_f,0);
290:   DMDAGetGhostCorners(daf,&i_start_ghost,&j_start_ghost,0,&m_ghost,&n_ghost,0);
291:   DMGetLocalToGlobalMapping(daf,&ltog_f);
292:   ISLocalToGlobalMappingGetBlockIndices(ltog_f,&idx_f);

294:   DMDAGetCorners(dac,&i_start_c,&j_start_c,0,&m_c,&n_c,0);
295:   DMDAGetGhostCorners(dac,&i_start_ghost_c,&j_start_ghost_c,0,&m_ghost_c,&n_ghost_c,0);
296:   DMGetLocalToGlobalMapping(dac,&ltog_c);
297:   ISLocalToGlobalMappingGetBlockIndices(ltog_c,&idx_c);

299:   /*
300:    Used for handling a coarse DMDA that lives on 1/4 the processors of the fine DMDA.
301:    The coarse vector is then duplicated 4 times (each time it lives on 1/4 of the
302:    processors). It's effective length is hence 4 times its normal length, this is
303:    why the col_scale is multiplied by the interpolation matrix column sizes.
304:    sol_shift allows each set of 1/4 processors do its own interpolation using ITS
305:    copy of the coarse vector. A bit of a hack but you do better.

307:    In the standard case when size_f == size_c col_scale == 1 and col_shift == 0
308:    */
309:   MPI_Comm_size(PetscObjectComm((PetscObject)dac),&size_c);
310:   MPI_Comm_size(PetscObjectComm((PetscObject)daf),&size_f);
311:   MPI_Comm_rank(PetscObjectComm((PetscObject)daf),&rank_f);
312:   col_scale = size_f/size_c;
313:   col_shift = Mx*My*(rank_f/size_c);

315:   MatPreallocateInitialize(PetscObjectComm((PetscObject)daf),m_f*n_f,col_scale*m_c*n_c,dnz,onz);
316:   for (j=j_start; j<j_start+n_f; j++) {
317:     for (i=i_start; i<i_start+m_f; i++) {
318:       /* convert to local "natural" numbering and then to PETSc global numbering */
319:       row = idx_f[(m_ghost*(j-j_start_ghost) + (i-i_start_ghost))];

321:       i_c = (i/ratioi);    /* coarse grid node to left of fine grid node */
322:       j_c = (j/ratioj);    /* coarse grid node below fine grid node */

324:       if (j_c < j_start_ghost_c) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
325:                                           j_start %D j_c %D j_start_ghost_c %D",j_start,j_c,j_start_ghost_c);
326:       if (i_c < i_start_ghost_c) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
327:                                           i_start %D i_c %D i_start_ghost_c %D",i_start,i_c,i_start_ghost_c);

329:       /*
330:        Only include those interpolation points that are truly
331:        nonzero. Note this is very important for final grid lines
332:        in x and y directions; since they have no right/top neighbors
333:        */
334:       nc = 0;
335:       /* one left and below; or we are right on it */
336:       col        = (m_ghost_c*(j_c-j_start_ghost_c) + (i_c-i_start_ghost_c));
337:       cols[nc++] = col_shift + idx_c[col];
338:       /* one right and below */
339:       if (i_c*ratioi != i) cols[nc++] = col_shift + idx_c[col+1];
340:       /* one left and above */
341:       if (j_c*ratioj != j) cols[nc++] = col_shift + idx_c[col+m_ghost_c];
342:       /* one right and above */
343:       if (i_c*ratioi != i && j_c*ratioj != j) cols[nc++] = col_shift + idx_c[col+(m_ghost_c+1)];
344:       MatPreallocateSet(row,nc,cols,dnz,onz);
345:     }
346:   }
347:   MatCreate(PetscObjectComm((PetscObject)daf),&mat);
348:   MatSetSizes(mat,m_f*n_f,col_scale*m_c*n_c,mx*my,col_scale*Mx*My);
349:   MatSetType(mat,MATAIJ);
350:   MatSeqAIJSetPreallocation(mat,0,dnz);
351:   MatMPIAIJSetPreallocation(mat,0,dnz,0,onz);
352:   MatPreallocateFinalize(dnz,onz);

354:   /* loop over local fine grid nodes setting interpolation for those*/
355:   if (!NEWVERSION) {

357:     for (j=j_start; j<j_start+n_f; j++) {
358:       for (i=i_start; i<i_start+m_f; i++) {
359:         /* convert to local "natural" numbering and then to PETSc global numbering */
360:         row = idx_f[(m_ghost*(j-j_start_ghost) + (i-i_start_ghost))];

362:         i_c = (i/ratioi);    /* coarse grid node to left of fine grid node */
363:         j_c = (j/ratioj);    /* coarse grid node below fine grid node */

365:         /*
366:          Only include those interpolation points that are truly
367:          nonzero. Note this is very important for final grid lines
368:          in x and y directions; since they have no right/top neighbors
369:          */
370:         x = ((PetscReal)(i - i_c*ratioi))/((PetscReal)ratioi);
371:         y = ((PetscReal)(j - j_c*ratioj))/((PetscReal)ratioj);

373:         nc = 0;
374:         /* one left and below; or we are right on it */
375:         col      = (m_ghost_c*(j_c-j_start_ghost_c) + (i_c-i_start_ghost_c));
376:         cols[nc] = col_shift + idx_c[col];
377:         v[nc++]  = x*y - x - y + 1.0;
378:         /* one right and below */
379:         if (i_c*ratioi != i) {
380:           cols[nc] = col_shift + idx_c[col+1];
381:           v[nc++]  = -x*y + x;
382:         }
383:         /* one left and above */
384:         if (j_c*ratioj != j) {
385:           cols[nc] = col_shift + idx_c[col+m_ghost_c];
386:           v[nc++]  = -x*y + y;
387:         }
388:         /* one right and above */
389:         if (j_c*ratioj != j && i_c*ratioi != i) {
390:           cols[nc] = col_shift + idx_c[col+(m_ghost_c+1)];
391:           v[nc++]  = x*y;
392:         }
393:         MatSetValues(mat,1,&row,nc,cols,v,INSERT_VALUES);
394:       }
395:     }

397:   } else {
398:     PetscScalar Ni[4];
399:     PetscScalar *xi,*eta;
400:     PetscInt    li,nxi,lj,neta;

402:     /* compute local coordinate arrays */
403:     nxi  = ratioi + 1;
404:     neta = ratioj + 1;
405:     PetscMalloc1(nxi,&xi);
406:     PetscMalloc1(neta,&eta);
407:     for (li=0; li<nxi; li++) {
408:       xi[li] = -1.0 + (PetscScalar)li*(2.0/(PetscScalar)(nxi-1));
409:     }
410:     for (lj=0; lj<neta; lj++) {
411:       eta[lj] = -1.0 + (PetscScalar)lj*(2.0/(PetscScalar)(neta-1));
412:     }

414:     /* loop over local fine grid nodes setting interpolation for those*/
415:     for (j=j_start; j<j_start+n_f; j++) {
416:       for (i=i_start; i<i_start+m_f; i++) {
417:         /* convert to local "natural" numbering and then to PETSc global numbering */
418:         row = idx_f[(m_ghost*(j-j_start_ghost) + (i-i_start_ghost))];

420:         i_c = (i/ratioi);    /* coarse grid node to left of fine grid node */
421:         j_c = (j/ratioj);    /* coarse grid node below fine grid node */

423:         /* remainders */
424:         li = i - ratioi * (i/ratioi);
425:         if (i==mx-1) li = nxi-1;
426:         lj = j - ratioj * (j/ratioj);
427:         if (j==my-1) lj = neta-1;

429:         /* corners */
430:         col     = (m_ghost_c*(j_c-j_start_ghost_c) + (i_c-i_start_ghost_c));
431:         cols[0] = col_shift + idx_c[col]; /* left, below */
432:         Ni[0]   = 1.0;
433:         if ((li==0) || (li==nxi-1)) {
434:           if ((lj==0) || (lj==neta-1)) {
435:             MatSetValue(mat,row,cols[0],Ni[0],INSERT_VALUES);
436:             continue;
437:           }
438:         }

440:         /* edges + interior */
441:         /* remainders */
442:         if (i==mx-1) i_c--;
443:         if (j==my-1) j_c--;

445:         col     = (m_ghost_c*(j_c-j_start_ghost_c) + (i_c-i_start_ghost_c));
446:         cols[0] = col_shift + idx_c[col]; /* left, below */
447:         cols[1] = col_shift + idx_c[col+1]; /* right, below */
448:         cols[2] = col_shift + idx_c[col+m_ghost_c]; /* left, above */
449:         cols[3] = col_shift + idx_c[col+(m_ghost_c+1)]; /* right, above */

451:         Ni[0] = 0.25*(1.0-xi[li])*(1.0-eta[lj]);
452:         Ni[1] = 0.25*(1.0+xi[li])*(1.0-eta[lj]);
453:         Ni[2] = 0.25*(1.0-xi[li])*(1.0+eta[lj]);
454:         Ni[3] = 0.25*(1.0+xi[li])*(1.0+eta[lj]);

456:         nc = 0;
457:         if (PetscAbsScalar(Ni[0])<1.0e-32) cols[0]=-1;
458:         if (PetscAbsScalar(Ni[1])<1.0e-32) cols[1]=-1;
459:         if (PetscAbsScalar(Ni[2])<1.0e-32) cols[2]=-1;
460:         if (PetscAbsScalar(Ni[3])<1.0e-32) cols[3]=-1;

462:         MatSetValues(mat,1,&row,4,cols,Ni,INSERT_VALUES);
463:       }
464:     }
465:     PetscFree(xi);
466:     PetscFree(eta);
467:   }
468:   ISLocalToGlobalMappingRestoreBlockIndices(ltog_f,&idx_f);
469:   ISLocalToGlobalMappingRestoreBlockIndices(ltog_c,&idx_c);
470:   MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);
471:   MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);
472:   MatCreateMAIJ(mat,dof,A);
473:   MatDestroy(&mat);
474:   return(0);
475: }

477: /*
478:        Contributed by Andrei Draganescu <aidraga@sandia.gov>
479: */
480: PetscErrorCode DMCreateInterpolation_DA_2D_Q0(DM dac,DM daf,Mat *A)
481: {
482:   PetscErrorCode         ierr;
483:   PetscInt               i,j,i_start,j_start,m_f,n_f,Mx,My,dof;
484:   const PetscInt         *idx_c,*idx_f;
485:   ISLocalToGlobalMapping ltog_f,ltog_c;
486:   PetscInt               m_ghost,n_ghost,m_ghost_c,n_ghost_c,*dnz,*onz;
487:   PetscInt               row,col,i_start_ghost,j_start_ghost,cols[4],mx,m_c,my,nc,ratioi,ratioj;
488:   PetscInt               i_c,j_c,i_start_c,j_start_c,n_c,i_start_ghost_c,j_start_ghost_c,col_shift,col_scale;
489:   PetscMPIInt            size_c,size_f,rank_f;
490:   PetscScalar            v[4];
491:   Mat                    mat;
492:   DMBoundaryType         bx,by;

495:   DMDAGetInfo(dac,0,&Mx,&My,0,0,0,0,0,0,&bx,&by,0,0);
496:   DMDAGetInfo(daf,0,&mx,&my,0,0,0,0,&dof,0,0,0,0,0);
497:   ratioi = mx/Mx;
498:   ratioj = my/My;
499:   if (ratioi*Mx != mx) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_WRONG,"Fine grid points must be multiple of coarse grid points in x");
500:   if (ratioj*My != my) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_WRONG,"Fine grid points must be multiple of coarse grid points in y");
501:   if (ratioi != 2) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_WRONG,"Coarsening factor in x must be 2");
502:   if (ratioj != 2) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_WRONG,"Coarsening factor in y must be 2");

504:   DMDAGetCorners(daf,&i_start,&j_start,0,&m_f,&n_f,0);
505:   DMDAGetGhostCorners(daf,&i_start_ghost,&j_start_ghost,0,&m_ghost,&n_ghost,0);
506:   DMGetLocalToGlobalMapping(daf,&ltog_f);
507:   ISLocalToGlobalMappingGetBlockIndices(ltog_f,&idx_f);

509:   DMDAGetCorners(dac,&i_start_c,&j_start_c,0,&m_c,&n_c,0);
510:   DMDAGetGhostCorners(dac,&i_start_ghost_c,&j_start_ghost_c,0,&m_ghost_c,&n_ghost_c,0);
511:   DMGetLocalToGlobalMapping(dac,&ltog_c);
512:   ISLocalToGlobalMappingGetBlockIndices(ltog_c,&idx_c);

514:   /*
515:      Used for handling a coarse DMDA that lives on 1/4 the processors of the fine DMDA.
516:      The coarse vector is then duplicated 4 times (each time it lives on 1/4 of the
517:      processors). It's effective length is hence 4 times its normal length, this is
518:      why the col_scale is multiplied by the interpolation matrix column sizes.
519:      sol_shift allows each set of 1/4 processors do its own interpolation using ITS
520:      copy of the coarse vector. A bit of a hack but you do better.

522:      In the standard case when size_f == size_c col_scale == 1 and col_shift == 0
523:   */
524:   MPI_Comm_size(PetscObjectComm((PetscObject)dac),&size_c);
525:   MPI_Comm_size(PetscObjectComm((PetscObject)daf),&size_f);
526:   MPI_Comm_rank(PetscObjectComm((PetscObject)daf),&rank_f);
527:   col_scale = size_f/size_c;
528:   col_shift = Mx*My*(rank_f/size_c);

530:   MatPreallocateInitialize(PetscObjectComm((PetscObject)daf),m_f*n_f,col_scale*m_c*n_c,dnz,onz);
531:   for (j=j_start; j<j_start+n_f; j++) {
532:     for (i=i_start; i<i_start+m_f; i++) {
533:       /* convert to local "natural" numbering and then to PETSc global numbering */
534:       row = idx_f[(m_ghost*(j-j_start_ghost) + (i-i_start_ghost))];

536:       i_c = (i/ratioi);    /* coarse grid node to left of fine grid node */
537:       j_c = (j/ratioj);    /* coarse grid node below fine grid node */

539:       if (j_c < j_start_ghost_c) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
540:     j_start %D j_c %D j_start_ghost_c %D",j_start,j_c,j_start_ghost_c);
541:       if (i_c < i_start_ghost_c) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
542:     i_start %D i_c %D i_start_ghost_c %D",i_start,i_c,i_start_ghost_c);

544:       /*
545:          Only include those interpolation points that are truly
546:          nonzero. Note this is very important for final grid lines
547:          in x and y directions; since they have no right/top neighbors
548:       */
549:       nc = 0;
550:       /* one left and below; or we are right on it */
551:       col        = (m_ghost_c*(j_c-j_start_ghost_c) + (i_c-i_start_ghost_c));
552:       cols[nc++] = col_shift + idx_c[col];
553:       MatPreallocateSet(row,nc,cols,dnz,onz);
554:     }
555:   }
556:   MatCreate(PetscObjectComm((PetscObject)daf),&mat);
557:   MatSetSizes(mat,m_f*n_f,col_scale*m_c*n_c,mx*my,col_scale*Mx*My);
558:   MatSetType(mat,MATAIJ);
559:   MatSeqAIJSetPreallocation(mat,0,dnz);
560:   MatMPIAIJSetPreallocation(mat,0,dnz,0,onz);
561:   MatPreallocateFinalize(dnz,onz);

563:   /* loop over local fine grid nodes setting interpolation for those*/
564:   for (j=j_start; j<j_start+n_f; j++) {
565:     for (i=i_start; i<i_start+m_f; i++) {
566:       /* convert to local "natural" numbering and then to PETSc global numbering */
567:       row = idx_f[(m_ghost*(j-j_start_ghost) + (i-i_start_ghost))];

569:       i_c = (i/ratioi);    /* coarse grid node to left of fine grid node */
570:       j_c = (j/ratioj);    /* coarse grid node below fine grid node */
571:       nc  = 0;
572:       /* one left and below; or we are right on it */
573:       col      = (m_ghost_c*(j_c-j_start_ghost_c) + (i_c-i_start_ghost_c));
574:       cols[nc] = col_shift + idx_c[col];
575:       v[nc++]  = 1.0;

577:       MatSetValues(mat,1,&row,nc,cols,v,INSERT_VALUES);
578:     }
579:   }
580:   ISLocalToGlobalMappingRestoreBlockIndices(ltog_f,&idx_f);
581:   ISLocalToGlobalMappingRestoreBlockIndices(ltog_c,&idx_c);
582:   MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);
583:   MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);
584:   MatCreateMAIJ(mat,dof,A);
585:   MatDestroy(&mat);
586:   PetscLogFlops(13.0*m_f*n_f);
587:   return(0);
588: }

590: /*
591:        Contributed by Jianming Yang <jianming-yang@uiowa.edu>
592: */
593: PetscErrorCode DMCreateInterpolation_DA_3D_Q0(DM dac,DM daf,Mat *A)
594: {
595:   PetscErrorCode         ierr;
596:   PetscInt               i,j,l,i_start,j_start,l_start,m_f,n_f,p_f,Mx,My,Mz,dof;
597:   const PetscInt         *idx_c,*idx_f;
598:   ISLocalToGlobalMapping ltog_f,ltog_c;
599:   PetscInt               m_ghost,n_ghost,p_ghost,m_ghost_c,n_ghost_c,p_ghost_c,nc,*dnz,*onz;
600:   PetscInt               row,col,i_start_ghost,j_start_ghost,l_start_ghost,cols[8],mx,m_c,my,n_c,mz,p_c,ratioi,ratioj,ratiol;
601:   PetscInt               i_c,j_c,l_c,i_start_c,j_start_c,l_start_c,i_start_ghost_c,j_start_ghost_c,l_start_ghost_c,col_shift,col_scale;
602:   PetscMPIInt            size_c,size_f,rank_f;
603:   PetscScalar            v[8];
604:   Mat                    mat;
605:   DMBoundaryType         bx,by,bz;

608:   DMDAGetInfo(dac,0,&Mx,&My,&Mz,0,0,0,0,0,&bx,&by,&bz,0);
609:   DMDAGetInfo(daf,0,&mx,&my,&mz,0,0,0,&dof,0,0,0,0,0);
610:   ratioi = mx/Mx;
611:   ratioj = my/My;
612:   ratiol = mz/Mz;
613:   if (ratioi*Mx != mx) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_WRONG,"Fine grid points must be multiple of coarse grid points in x");
614:   if (ratioj*My != my) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_WRONG,"Fine grid points must be multiple of coarse grid points in y");
615:   if (ratiol*Mz != mz) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_WRONG,"Fine grid points must be multiple of coarse grid points in z");
616:   if (ratioi != 2 && ratioi != 1) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_WRONG,"Coarsening factor in x must be 1 or 2");
617:   if (ratioj != 2 && ratioj != 1) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_WRONG,"Coarsening factor in y must be 1 or 2");
618:   if (ratiol != 2 && ratiol != 1) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_WRONG,"Coarsening factor in z must be 1 or 2");

620:   DMDAGetCorners(daf,&i_start,&j_start,&l_start,&m_f,&n_f,&p_f);
621:   DMDAGetGhostCorners(daf,&i_start_ghost,&j_start_ghost,&l_start_ghost,&m_ghost,&n_ghost,&p_ghost);
622:   DMGetLocalToGlobalMapping(daf,&ltog_f);
623:   ISLocalToGlobalMappingGetBlockIndices(ltog_f,&idx_f);

625:   DMDAGetCorners(dac,&i_start_c,&j_start_c,&l_start_c,&m_c,&n_c,&p_c);
626:   DMDAGetGhostCorners(dac,&i_start_ghost_c,&j_start_ghost_c,&l_start_ghost_c,&m_ghost_c,&n_ghost_c,&p_ghost_c);
627:   DMGetLocalToGlobalMapping(dac,&ltog_c);
628:   ISLocalToGlobalMappingGetBlockIndices(ltog_c,&idx_c);

630:   /*
631:      Used for handling a coarse DMDA that lives on 1/4 the processors of the fine DMDA.
632:      The coarse vector is then duplicated 4 times (each time it lives on 1/4 of the
633:      processors). It's effective length is hence 4 times its normal length, this is
634:      why the col_scale is multiplied by the interpolation matrix column sizes.
635:      sol_shift allows each set of 1/4 processors do its own interpolation using ITS
636:      copy of the coarse vector. A bit of a hack but you do better.

638:      In the standard case when size_f == size_c col_scale == 1 and col_shift == 0
639:   */
640:   MPI_Comm_size(PetscObjectComm((PetscObject)dac),&size_c);
641:   MPI_Comm_size(PetscObjectComm((PetscObject)daf),&size_f);
642:   MPI_Comm_rank(PetscObjectComm((PetscObject)daf),&rank_f);
643:   col_scale = size_f/size_c;
644:   col_shift = Mx*My*Mz*(rank_f/size_c);

646:   MatPreallocateInitialize(PetscObjectComm((PetscObject)daf),m_f*n_f*p_f,col_scale*m_c*n_c*p_c,dnz,onz);
647:   for (l=l_start; l<l_start+p_f; l++) {
648:     for (j=j_start; j<j_start+n_f; j++) {
649:       for (i=i_start; i<i_start+m_f; i++) {
650:         /* convert to local "natural" numbering and then to PETSc global numbering */
651:         row = idx_f[(m_ghost*n_ghost*(l-l_start_ghost) + m_ghost*(j-j_start_ghost) + (i-i_start_ghost))];

653:         i_c = (i/ratioi);    /* coarse grid node to left of fine grid node */
654:         j_c = (j/ratioj);    /* coarse grid node below fine grid node */
655:         l_c = (l/ratiol);

657:         if (l_c < l_start_ghost_c) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
658:     l_start %D l_c %D l_start_ghost_c %D",l_start,l_c,l_start_ghost_c);
659:         if (j_c < j_start_ghost_c) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
660:     j_start %D j_c %D j_start_ghost_c %D",j_start,j_c,j_start_ghost_c);
661:         if (i_c < i_start_ghost_c) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
662:     i_start %D i_c %D i_start_ghost_c %D",i_start,i_c,i_start_ghost_c);

664:         /*
665:            Only include those interpolation points that are truly
666:            nonzero. Note this is very important for final grid lines
667:            in x and y directions; since they have no right/top neighbors
668:         */
669:         nc = 0;
670:         /* one left and below; or we are right on it */
671:         col        = (m_ghost_c*n_ghost_c*(l_c-l_start_ghost_c) + m_ghost_c*(j_c-j_start_ghost_c) + (i_c-i_start_ghost_c));
672:         cols[nc++] = col_shift + idx_c[col];
673:         MatPreallocateSet(row,nc,cols,dnz,onz);
674:       }
675:     }
676:   }
677:   MatCreate(PetscObjectComm((PetscObject)daf),&mat);
678:   MatSetSizes(mat,m_f*n_f*p_f,col_scale*m_c*n_c*p_c,mx*my*mz,col_scale*Mx*My*Mz);
679:   MatSetType(mat,MATAIJ);
680:   MatSeqAIJSetPreallocation(mat,0,dnz);
681:   MatMPIAIJSetPreallocation(mat,0,dnz,0,onz);
682:   MatPreallocateFinalize(dnz,onz);

684:   /* loop over local fine grid nodes setting interpolation for those*/
685:   for (l=l_start; l<l_start+p_f; l++) {
686:     for (j=j_start; j<j_start+n_f; j++) {
687:       for (i=i_start; i<i_start+m_f; i++) {
688:         /* convert to local "natural" numbering and then to PETSc global numbering */
689:         row = idx_f[(m_ghost*n_ghost*(l-l_start_ghost) + m_ghost*(j-j_start_ghost) + (i-i_start_ghost))];

691:         i_c = (i/ratioi);    /* coarse grid node to left of fine grid node */
692:         j_c = (j/ratioj);    /* coarse grid node below fine grid node */
693:         l_c = (l/ratiol);
694:         nc  = 0;
695:         /* one left and below; or we are right on it */
696:         col      = (m_ghost_c*n_ghost_c*(l_c-l_start_ghost_c) + m_ghost_c*(j_c-j_start_ghost_c) + (i_c-i_start_ghost_c));
697:         cols[nc] = col_shift + idx_c[col];
698:         v[nc++]  = 1.0;

700:         MatSetValues(mat,1,&row,nc,cols,v,INSERT_VALUES);
701:       }
702:     }
703:   }
704:   ISLocalToGlobalMappingRestoreBlockIndices(ltog_f,&idx_f);
705:   ISLocalToGlobalMappingRestoreBlockIndices(ltog_c,&idx_c);
706:   MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);
707:   MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);
708:   MatCreateMAIJ(mat,dof,A);
709:   MatDestroy(&mat);
710:   PetscLogFlops(13.0*m_f*n_f*p_f);
711:   return(0);
712: }

714: PetscErrorCode DMCreateInterpolation_DA_3D_Q1(DM dac,DM daf,Mat *A)
715: {
716:   PetscErrorCode         ierr;
717:   PetscInt               i,j,i_start,j_start,m_f,n_f,Mx,My,dof,l;
718:   const PetscInt         *idx_c,*idx_f;
719:   ISLocalToGlobalMapping ltog_f,ltog_c;
720:   PetscInt               m_ghost,n_ghost,m_ghost_c,n_ghost_c,Mz,mz;
721:   PetscInt               row,col,i_start_ghost,j_start_ghost,cols[8],mx,m_c,my,nc,ratioi,ratioj,ratiok;
722:   PetscInt               i_c,j_c,i_start_c,j_start_c,n_c,i_start_ghost_c,j_start_ghost_c;
723:   PetscInt               l_start,p_f,l_start_ghost,p_ghost,l_start_c,p_c;
724:   PetscInt               l_start_ghost_c,p_ghost_c,l_c,*dnz,*onz;
725:   PetscScalar            v[8],x,y,z;
726:   Mat                    mat;
727:   DMBoundaryType         bx,by,bz;

730:   DMDAGetInfo(dac,0,&Mx,&My,&Mz,0,0,0,0,0,&bx,&by,&bz,0);
731:   DMDAGetInfo(daf,0,&mx,&my,&mz,0,0,0,&dof,0,0,0,0,0);
732:   if (mx == Mx) {
733:     ratioi = 1;
734:   } else if (bx == DM_BOUNDARY_PERIODIC) {
735:     ratioi = mx/Mx;
736:     if (ratioi*Mx != mx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: mx/Mx  must be integer: mx %D Mx %D",mx,Mx);
737:   } else {
738:     ratioi = (mx-1)/(Mx-1);
739:     if (ratioi*(Mx-1) != mx-1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: (mx - 1)/(Mx - 1) must be integer: mx %D Mx %D",mx,Mx);
740:   }
741:   if (my == My) {
742:     ratioj = 1;
743:   } else if (by == DM_BOUNDARY_PERIODIC) {
744:     ratioj = my/My;
745:     if (ratioj*My != my) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: my/My  must be integer: my %D My %D",my,My);
746:   } else {
747:     ratioj = (my-1)/(My-1);
748:     if (ratioj*(My-1) != my-1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: (my - 1)/(My - 1) must be integer: my %D My %D",my,My);
749:   }
750:   if (mz == Mz) {
751:     ratiok = 1;
752:   } else if (bz == DM_BOUNDARY_PERIODIC) {
753:     ratiok = mz/Mz;
754:     if (ratiok*Mz != mz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: mz/Mz  must be integer: mz %D Mz %D",mz,Mz);
755:   } else {
756:     ratiok = (mz-1)/(Mz-1);
757:     if (ratiok*(Mz-1) != mz-1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: (mz - 1)/(Mz - 1) must be integer: mz %D Mz %D",mz,Mz);
758:   }

760:   DMDAGetCorners(daf,&i_start,&j_start,&l_start,&m_f,&n_f,&p_f);
761:   DMDAGetGhostCorners(daf,&i_start_ghost,&j_start_ghost,&l_start_ghost,&m_ghost,&n_ghost,&p_ghost);
762:   DMGetLocalToGlobalMapping(daf,&ltog_f);
763:   ISLocalToGlobalMappingGetBlockIndices(ltog_f,&idx_f);

765:   DMDAGetCorners(dac,&i_start_c,&j_start_c,&l_start_c,&m_c,&n_c,&p_c);
766:   DMDAGetGhostCorners(dac,&i_start_ghost_c,&j_start_ghost_c,&l_start_ghost_c,&m_ghost_c,&n_ghost_c,&p_ghost_c);
767:   DMGetLocalToGlobalMapping(dac,&ltog_c);
768:   ISLocalToGlobalMappingGetBlockIndices(ltog_c,&idx_c);

770:   /* create interpolation matrix, determining exact preallocation */
771:   MatPreallocateInitialize(PetscObjectComm((PetscObject)dac),m_f*n_f*p_f,m_c*n_c*p_c,dnz,onz);
772:   /* loop over local fine grid nodes counting interpolating points */
773:   for (l=l_start; l<l_start+p_f; l++) {
774:     for (j=j_start; j<j_start+n_f; j++) {
775:       for (i=i_start; i<i_start+m_f; i++) {
776:         /* convert to local "natural" numbering and then to PETSc global numbering */
777:         row = idx_f[(m_ghost*n_ghost*(l-l_start_ghost) + m_ghost*(j-j_start_ghost) + (i-i_start_ghost))];
778:         i_c = (i/ratioi);
779:         j_c = (j/ratioj);
780:         l_c = (l/ratiok);
781:         if (l_c < l_start_ghost_c) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
782:                                             l_start %D l_c %D l_start_ghost_c %D",l_start,l_c,l_start_ghost_c);
783:         if (j_c < j_start_ghost_c) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
784:                                             j_start %D j_c %D j_start_ghost_c %D",j_start,j_c,j_start_ghost_c);
785:         if (i_c < i_start_ghost_c) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
786:                                             i_start %D i_c %D i_start_ghost_c %D",i_start,i_c,i_start_ghost_c);

788:         /*
789:          Only include those interpolation points that are truly
790:          nonzero. Note this is very important for final grid lines
791:          in x and y directions; since they have no right/top neighbors
792:          */
793:         nc         = 0;
794:         col        = (m_ghost_c*n_ghost_c*(l_c-l_start_ghost_c) + m_ghost_c*(j_c-j_start_ghost_c) + (i_c-i_start_ghost_c));
795:         cols[nc++] = idx_c[col];
796:         if (i_c*ratioi != i) {
797:           cols[nc++] = idx_c[col+1];
798:         }
799:         if (j_c*ratioj != j) {
800:           cols[nc++] = idx_c[col+m_ghost_c];
801:         }
802:         if (l_c*ratiok != l) {
803:           cols[nc++] = idx_c[col+m_ghost_c*n_ghost_c];
804:         }
805:         if (j_c*ratioj != j && i_c*ratioi != i) {
806:           cols[nc++] = idx_c[col+(m_ghost_c+1)];
807:         }
808:         if (j_c*ratioj != j && l_c*ratiok != l) {
809:           cols[nc++] = idx_c[col+(m_ghost_c*n_ghost_c+m_ghost_c)];
810:         }
811:         if (i_c*ratioi != i && l_c*ratiok != l) {
812:           cols[nc++] = idx_c[col+(m_ghost_c*n_ghost_c+1)];
813:         }
814:         if (i_c*ratioi != i && l_c*ratiok != l && j_c*ratioj != j) {
815:           cols[nc++] = idx_c[col+(m_ghost_c*n_ghost_c+m_ghost_c+1)];
816:         }
817:         MatPreallocateSet(row,nc,cols,dnz,onz);
818:       }
819:     }
820:   }
821:   MatCreate(PetscObjectComm((PetscObject)dac),&mat);
822:   MatSetSizes(mat,m_f*n_f*p_f,m_c*n_c*p_c,mx*my*mz,Mx*My*Mz);
823:   MatSetType(mat,MATAIJ);
824:   MatSeqAIJSetPreallocation(mat,0,dnz);
825:   MatMPIAIJSetPreallocation(mat,0,dnz,0,onz);
826:   MatPreallocateFinalize(dnz,onz);

828:   /* loop over local fine grid nodes setting interpolation for those*/
829:   if (!NEWVERSION) {

831:     for (l=l_start; l<l_start+p_f; l++) {
832:       for (j=j_start; j<j_start+n_f; j++) {
833:         for (i=i_start; i<i_start+m_f; i++) {
834:           /* convert to local "natural" numbering and then to PETSc global numbering */
835:           row = idx_f[(m_ghost*n_ghost*(l-l_start_ghost) + m_ghost*(j-j_start_ghost) + (i-i_start_ghost))];

837:           i_c = (i/ratioi);
838:           j_c = (j/ratioj);
839:           l_c = (l/ratiok);

841:           /*
842:            Only include those interpolation points that are truly
843:            nonzero. Note this is very important for final grid lines
844:            in x and y directions; since they have no right/top neighbors
845:            */
846:           x = ((PetscReal)(i - i_c*ratioi))/((PetscReal)ratioi);
847:           y = ((PetscReal)(j - j_c*ratioj))/((PetscReal)ratioj);
848:           z = ((PetscReal)(l - l_c*ratiok))/((PetscReal)ratiok);

850:           nc = 0;
851:           /* one left and below; or we are right on it */
852:           col = (m_ghost_c*n_ghost_c*(l_c-l_start_ghost_c)+m_ghost_c*(j_c-j_start_ghost_c)+(i_c-i_start_ghost_c));

854:           cols[nc] = idx_c[col];
855:           v[nc++]  = .125*(1. - (2.0*x-1.))*(1. - (2.0*y-1.))*(1. - (2.0*z-1.));

857:           if (i_c*ratioi != i) {
858:             cols[nc] = idx_c[col+1];
859:             v[nc++]  = .125*(1. + (2.0*x-1.))*(1. - (2.0*y-1.))*(1. - (2.0*z-1.));
860:           }

862:           if (j_c*ratioj != j) {
863:             cols[nc] = idx_c[col+m_ghost_c];
864:             v[nc++]  = .125*(1. - (2.0*x-1.))*(1. + (2.0*y-1.))*(1. - (2.0*z-1.));
865:           }

867:           if (l_c*ratiok != l) {
868:             cols[nc] = idx_c[col+m_ghost_c*n_ghost_c];
869:             v[nc++]  = .125*(1. - (2.0*x-1.))*(1. - (2.0*y-1.))*(1. + (2.0*z-1.));
870:           }

872:           if (j_c*ratioj != j && i_c*ratioi != i) {
873:             cols[nc] = idx_c[col+(m_ghost_c+1)];
874:             v[nc++]  = .125*(1. + (2.0*x-1.))*(1. + (2.0*y-1.))*(1. - (2.0*z-1.));
875:           }

877:           if (j_c*ratioj != j && l_c*ratiok != l) {
878:             cols[nc] = idx_c[col+(m_ghost_c*n_ghost_c+m_ghost_c)];
879:             v[nc++]  = .125*(1. - (2.0*x-1.))*(1. + (2.0*y-1.))*(1. + (2.0*z-1.));
880:           }

882:           if (i_c*ratioi != i && l_c*ratiok != l) {
883:             cols[nc] = idx_c[col+(m_ghost_c*n_ghost_c+1)];
884:             v[nc++]  = .125*(1. + (2.0*x-1.))*(1. - (2.0*y-1.))*(1. + (2.0*z-1.));
885:           }

887:           if (i_c*ratioi != i && l_c*ratiok != l && j_c*ratioj != j) {
888:             cols[nc] = idx_c[col+(m_ghost_c*n_ghost_c+m_ghost_c+1)];
889:             v[nc++]  = .125*(1. + (2.0*x-1.))*(1. + (2.0*y-1.))*(1. + (2.0*z-1.));
890:           }
891:           MatSetValues(mat,1,&row,nc,cols,v,INSERT_VALUES);
892:         }
893:       }
894:     }

896:   } else {
897:     PetscScalar *xi,*eta,*zeta;
898:     PetscInt    li,nxi,lj,neta,lk,nzeta,n;
899:     PetscScalar Ni[8];

901:     /* compute local coordinate arrays */
902:     nxi   = ratioi + 1;
903:     neta  = ratioj + 1;
904:     nzeta = ratiok + 1;
905:     PetscMalloc1(nxi,&xi);
906:     PetscMalloc1(neta,&eta);
907:     PetscMalloc1(nzeta,&zeta);
908:     for (li=0; li<nxi; li++) xi[li] = -1.0 + (PetscScalar)li*(2.0/(PetscScalar)(nxi-1));
909:     for (lj=0; lj<neta; lj++) eta[lj] = -1.0 + (PetscScalar)lj*(2.0/(PetscScalar)(neta-1));
910:     for (lk=0; lk<nzeta; lk++) zeta[lk] = -1.0 + (PetscScalar)lk*(2.0/(PetscScalar)(nzeta-1));

912:     for (l=l_start; l<l_start+p_f; l++) {
913:       for (j=j_start; j<j_start+n_f; j++) {
914:         for (i=i_start; i<i_start+m_f; i++) {
915:           /* convert to local "natural" numbering and then to PETSc global numbering */
916:           row = idx_f[(m_ghost*n_ghost*(l-l_start_ghost) + m_ghost*(j-j_start_ghost) + (i-i_start_ghost))];

918:           i_c = (i/ratioi);
919:           j_c = (j/ratioj);
920:           l_c = (l/ratiok);

922:           /* remainders */
923:           li = i - ratioi * (i/ratioi);
924:           if (i==mx-1) li = nxi-1;
925:           lj = j - ratioj * (j/ratioj);
926:           if (j==my-1) lj = neta-1;
927:           lk = l - ratiok * (l/ratiok);
928:           if (l==mz-1) lk = nzeta-1;

930:           /* corners */
931:           col     = (m_ghost_c*n_ghost_c*(l_c-l_start_ghost_c)+m_ghost_c*(j_c-j_start_ghost_c)+(i_c-i_start_ghost_c));
932:           cols[0] = idx_c[col];
933:           Ni[0]   = 1.0;
934:           if ((li==0) || (li==nxi-1)) {
935:             if ((lj==0) || (lj==neta-1)) {
936:               if ((lk==0) || (lk==nzeta-1)) {
937:                 MatSetValue(mat,row,cols[0],Ni[0],INSERT_VALUES);
938:                 continue;
939:               }
940:             }
941:           }

943:           /* edges + interior */
944:           /* remainders */
945:           if (i==mx-1) i_c--;
946:           if (j==my-1) j_c--;
947:           if (l==mz-1) l_c--;

949:           col     = (m_ghost_c*n_ghost_c*(l_c-l_start_ghost_c) + m_ghost_c*(j_c-j_start_ghost_c) + (i_c-i_start_ghost_c));
950:           cols[0] = idx_c[col]; /* one left and below; or we are right on it */
951:           cols[1] = idx_c[col+1]; /* one right and below */
952:           cols[2] = idx_c[col+m_ghost_c];  /* one left and above */
953:           cols[3] = idx_c[col+(m_ghost_c+1)]; /* one right and above */

955:           cols[4] = idx_c[col+m_ghost_c*n_ghost_c]; /* one left and below and front; or we are right on it */
956:           cols[5] = idx_c[col+(m_ghost_c*n_ghost_c+1)]; /* one right and below, and front */
957:           cols[6] = idx_c[col+(m_ghost_c*n_ghost_c+m_ghost_c)]; /* one left and above and front*/
958:           cols[7] = idx_c[col+(m_ghost_c*n_ghost_c+m_ghost_c+1)]; /* one right and above and front */

960:           Ni[0] = 0.125*(1.0-xi[li])*(1.0-eta[lj])*(1.0-zeta[lk]);
961:           Ni[1] = 0.125*(1.0+xi[li])*(1.0-eta[lj])*(1.0-zeta[lk]);
962:           Ni[2] = 0.125*(1.0-xi[li])*(1.0+eta[lj])*(1.0-zeta[lk]);
963:           Ni[3] = 0.125*(1.0+xi[li])*(1.0+eta[lj])*(1.0-zeta[lk]);

965:           Ni[4] = 0.125*(1.0-xi[li])*(1.0-eta[lj])*(1.0+zeta[lk]);
966:           Ni[5] = 0.125*(1.0+xi[li])*(1.0-eta[lj])*(1.0+zeta[lk]);
967:           Ni[6] = 0.125*(1.0-xi[li])*(1.0+eta[lj])*(1.0+zeta[lk]);
968:           Ni[7] = 0.125*(1.0+xi[li])*(1.0+eta[lj])*(1.0+zeta[lk]);

970:           for (n=0; n<8; n++) {
971:             if (PetscAbsScalar(Ni[n])<1.0e-32) cols[n]=-1;
972:           }
973:           MatSetValues(mat,1,&row,8,cols,Ni,INSERT_VALUES);

975:         }
976:       }
977:     }
978:     PetscFree(xi);
979:     PetscFree(eta);
980:     PetscFree(zeta);
981:   }
982:   ISLocalToGlobalMappingRestoreBlockIndices(ltog_f,&idx_f);
983:   ISLocalToGlobalMappingRestoreBlockIndices(ltog_c,&idx_c);
984:   MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);
985:   MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);

987:   MatCreateMAIJ(mat,dof,A);
988:   MatDestroy(&mat);
989:   return(0);
990: }

992: PetscErrorCode  DMCreateInterpolation_DA(DM dac,DM daf,Mat *A,Vec *scale)
993: {
994:   PetscErrorCode   ierr;
995:   PetscInt         dimc,Mc,Nc,Pc,mc,nc,pc,dofc,sc,dimf,Mf,Nf,Pf,mf,nf,pf,doff,sf;
996:   DMBoundaryType   bxc,byc,bzc,bxf,byf,bzf;
997:   DMDAStencilType  stc,stf;
998:   DM_DA            *ddc = (DM_DA*)dac->data;


1006:   DMDAGetInfo(dac,&dimc,&Mc,&Nc,&Pc,&mc,&nc,&pc,&dofc,&sc,&bxc,&byc,&bzc,&stc);
1007:   DMDAGetInfo(daf,&dimf,&Mf,&Nf,&Pf,&mf,&nf,&pf,&doff,&sf,&bxf,&byf,&bzf,&stf);
1008:   if (dimc != dimf) SETERRQ2(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"Dimensions of DMDA do not match %D %D",dimc,dimf);
1009:   if (dofc != doff) SETERRQ2(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"DOF of DMDA do not match %D %D",dofc,doff);
1010:   if (sc != sf) SETERRQ2(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"Stencil width of DMDA do not match %D %D",sc,sf);
1011:   if (bxc != bxf || byc != byf || bzc != bzf) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"Boundary type different in two DMDAs");
1012:   if (stc != stf) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"Stencil type different in two DMDAs");
1013:   if (Mc < 2 && Mf > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Coarse grid requires at least 2 points in x direction");
1014:   if (dimc > 1 && Nc < 2 && Nf > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Coarse grid requires at least 2 points in y direction");
1015:   if (dimc > 2 && Pc < 2 && Pf > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Coarse grid requires at least 2 points in z direction");

1017:   if (ddc->interptype == DMDA_Q1) {
1018:     if (dimc == 1) {
1019:       DMCreateInterpolation_DA_1D_Q1(dac,daf,A);
1020:     } else if (dimc == 2) {
1021:       DMCreateInterpolation_DA_2D_Q1(dac,daf,A);
1022:     } else if (dimc == 3) {
1023:       DMCreateInterpolation_DA_3D_Q1(dac,daf,A);
1024:     } else SETERRQ2(PetscObjectComm((PetscObject)daf),PETSC_ERR_SUP,"No support for this DMDA dimension %D for interpolation type %d",dimc,(int)ddc->interptype);
1025:   } else if (ddc->interptype == DMDA_Q0) {
1026:     if (dimc == 1) {
1027:       DMCreateInterpolation_DA_1D_Q0(dac,daf,A);
1028:     } else if (dimc == 2) {
1029:       DMCreateInterpolation_DA_2D_Q0(dac,daf,A);
1030:     } else if (dimc == 3) {
1031:       DMCreateInterpolation_DA_3D_Q0(dac,daf,A);
1032:     } else SETERRQ2(PetscObjectComm((PetscObject)daf),PETSC_ERR_SUP,"No support for this DMDA dimension %D for interpolation type %d",dimc,(int)ddc->interptype);
1033:   }
1034:   if (scale) {
1035:     DMCreateInterpolationScale((DM)dac,(DM)daf,*A,scale);
1036:   }
1037:   return(0);
1038: }

1040: PetscErrorCode DMCreateInjection_DA_1D(DM dac,DM daf,VecScatter *inject)
1041: {
1042:   PetscErrorCode         ierr;
1043:   PetscInt               i,i_start,m_f,Mx,dof;
1044:   const PetscInt         *idx_f;
1045:   ISLocalToGlobalMapping ltog_f;
1046:   PetscInt               m_ghost,m_ghost_c;
1047:   PetscInt               row,i_start_ghost,mx,m_c,nc,ratioi;
1048:   PetscInt               i_start_c,i_start_ghost_c;
1049:   PetscInt               *cols;
1050:   DMBoundaryType         bx;
1051:   Vec                    vecf,vecc;
1052:   IS                     isf;

1055:   DMDAGetInfo(dac,0,&Mx,0,0,0,0,0,0,0,&bx,0,0,0);
1056:   DMDAGetInfo(daf,0,&mx,0,0,0,0,0,&dof,0,0,0,0,0);
1057:   if (bx == DM_BOUNDARY_PERIODIC) {
1058:     ratioi = mx/Mx;
1059:     if (ratioi*Mx != mx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: mx/Mx  must be integer: mx %D Mx %D",mx,Mx);
1060:   } else {
1061:     ratioi = (mx-1)/(Mx-1);
1062:     if (ratioi*(Mx-1) != mx-1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: (mx - 1)/(Mx - 1) must be integer: mx %D Mx %D",mx,Mx);
1063:   }

1065:   DMDAGetCorners(daf,&i_start,0,0,&m_f,0,0);
1066:   DMDAGetGhostCorners(daf,&i_start_ghost,0,0,&m_ghost,0,0);
1067:   DMGetLocalToGlobalMapping(daf,&ltog_f);
1068:   ISLocalToGlobalMappingGetBlockIndices(ltog_f,&idx_f);

1070:   DMDAGetCorners(dac,&i_start_c,0,0,&m_c,0,0);
1071:   DMDAGetGhostCorners(dac,&i_start_ghost_c,0,0,&m_ghost_c,0,0);


1074:   /* loop over local fine grid nodes setting interpolation for those*/
1075:   nc   = 0;
1076:   PetscMalloc1(m_f,&cols);


1079:   for (i=i_start_c; i<i_start_c+m_c; i++) {
1080:     PetscInt i_f = i*ratioi;

1082:     if (i_f < i_start_ghost || i_f >= i_start_ghost+m_ghost) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\ni_c %D i_f %D fine ghost range [%D,%D]",i,i_f,i_start_ghost,i_start_ghost+m_ghost);

1084:     row        = idx_f[(i_f-i_start_ghost)];
1085:     cols[nc++] = row;
1086:   }

1088:   ISLocalToGlobalMappingRestoreBlockIndices(ltog_f,&idx_f);
1089:   ISCreateBlock(PetscObjectComm((PetscObject)daf),dof,nc,cols,PETSC_OWN_POINTER,&isf);
1090:   DMGetGlobalVector(dac,&vecc);
1091:   DMGetGlobalVector(daf,&vecf);
1092:   VecScatterCreate(vecf,isf,vecc,NULL,inject);
1093:   DMRestoreGlobalVector(dac,&vecc);
1094:   DMRestoreGlobalVector(daf,&vecf);
1095:   ISDestroy(&isf);
1096:   return(0);
1097: }

1099: PetscErrorCode DMCreateInjection_DA_2D(DM dac,DM daf,VecScatter *inject)
1100: {
1101:   PetscErrorCode         ierr;
1102:   PetscInt               i,j,i_start,j_start,m_f,n_f,Mx,My,dof;
1103:   const PetscInt         *idx_c,*idx_f;
1104:   ISLocalToGlobalMapping ltog_f,ltog_c;
1105:   PetscInt               m_ghost,n_ghost,m_ghost_c,n_ghost_c;
1106:   PetscInt               row,i_start_ghost,j_start_ghost,mx,m_c,my,nc,ratioi,ratioj;
1107:   PetscInt               i_start_c,j_start_c,n_c,i_start_ghost_c,j_start_ghost_c;
1108:   PetscInt               *cols;
1109:   DMBoundaryType         bx,by;
1110:   Vec                    vecf,vecc;
1111:   IS                     isf;

1114:   DMDAGetInfo(dac,0,&Mx,&My,0,0,0,0,0,0,&bx,&by,0,0);
1115:   DMDAGetInfo(daf,0,&mx,&my,0,0,0,0,&dof,0,0,0,0,0);
1116:   if (bx == DM_BOUNDARY_PERIODIC) {
1117:     ratioi = mx/Mx;
1118:     if (ratioi*Mx != mx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: mx/Mx  must be integer: mx %D Mx %D",mx,Mx);
1119:   } else {
1120:     ratioi = (mx-1)/(Mx-1);
1121:     if (ratioi*(Mx-1) != mx-1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: (mx - 1)/(Mx - 1) must be integer: mx %D Mx %D",mx,Mx);
1122:   }
1123:   if (by == DM_BOUNDARY_PERIODIC) {
1124:     ratioj = my/My;
1125:     if (ratioj*My != my) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: my/My  must be integer: my %D My %D",my,My);
1126:   } else {
1127:     ratioj = (my-1)/(My-1);
1128:     if (ratioj*(My-1) != my-1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: (my - 1)/(My - 1) must be integer: my %D My %D",my,My);
1129:   }

1131:   DMDAGetCorners(daf,&i_start,&j_start,0,&m_f,&n_f,0);
1132:   DMDAGetGhostCorners(daf,&i_start_ghost,&j_start_ghost,0,&m_ghost,&n_ghost,0);
1133:   DMGetLocalToGlobalMapping(daf,&ltog_f);
1134:   ISLocalToGlobalMappingGetBlockIndices(ltog_f,&idx_f);

1136:   DMDAGetCorners(dac,&i_start_c,&j_start_c,0,&m_c,&n_c,0);
1137:   DMDAGetGhostCorners(dac,&i_start_ghost_c,&j_start_ghost_c,0,&m_ghost_c,&n_ghost_c,0);
1138:   DMGetLocalToGlobalMapping(dac,&ltog_c);
1139:   ISLocalToGlobalMappingGetBlockIndices(ltog_c,&idx_c);

1141:   /* loop over local fine grid nodes setting interpolation for those*/
1142:   nc   = 0;
1143:   PetscMalloc1(n_f*m_f,&cols);
1144:   for (j=j_start_c; j<j_start_c+n_c; j++) {
1145:     for (i=i_start_c; i<i_start_c+m_c; i++) {
1146:       PetscInt i_f = i*ratioi,j_f = j*ratioj;
1147:       if (j_f < j_start_ghost || j_f >= j_start_ghost+n_ghost) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
1148:     j_c %D j_f %D fine ghost range [%D,%D]",j,j_f,j_start_ghost,j_start_ghost+n_ghost);
1149:       if (i_f < i_start_ghost || i_f >= i_start_ghost+m_ghost) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA\n\
1150:     i_c %D i_f %D fine ghost range [%D,%D]",i,i_f,i_start_ghost,i_start_ghost+m_ghost);
1151:       row        = idx_f[(m_ghost*(j_f-j_start_ghost) + (i_f-i_start_ghost))];
1152:       cols[nc++] = row;
1153:     }
1154:   }
1155:   ISLocalToGlobalMappingRestoreBlockIndices(ltog_f,&idx_f);
1156:   ISLocalToGlobalMappingRestoreBlockIndices(ltog_c,&idx_c);

1158:   ISCreateBlock(PetscObjectComm((PetscObject)daf),dof,nc,cols,PETSC_OWN_POINTER,&isf);
1159:   DMGetGlobalVector(dac,&vecc);
1160:   DMGetGlobalVector(daf,&vecf);
1161:   VecScatterCreate(vecf,isf,vecc,NULL,inject);
1162:   DMRestoreGlobalVector(dac,&vecc);
1163:   DMRestoreGlobalVector(daf,&vecf);
1164:   ISDestroy(&isf);
1165:   return(0);
1166: }

1168: PetscErrorCode DMCreateInjection_DA_3D(DM dac,DM daf,VecScatter *inject)
1169: {
1170:   PetscErrorCode         ierr;
1171:   PetscInt               i,j,k,i_start,j_start,k_start,m_f,n_f,p_f,Mx,My,Mz;
1172:   PetscInt               m_ghost,n_ghost,p_ghost,m_ghost_c,n_ghost_c,p_ghost_c;
1173:   PetscInt               i_start_ghost,j_start_ghost,k_start_ghost;
1174:   PetscInt               mx,my,mz,ratioi,ratioj,ratiok;
1175:   PetscInt               i_start_c,j_start_c,k_start_c;
1176:   PetscInt               m_c,n_c,p_c;
1177:   PetscInt               i_start_ghost_c,j_start_ghost_c,k_start_ghost_c;
1178:   PetscInt               row,nc,dof;
1179:   const PetscInt         *idx_c,*idx_f;
1180:   ISLocalToGlobalMapping ltog_f,ltog_c;
1181:   PetscInt               *cols;
1182:   DMBoundaryType         bx,by,bz;
1183:   Vec                    vecf,vecc;
1184:   IS                     isf;

1187:   DMDAGetInfo(dac,0,&Mx,&My,&Mz,0,0,0,0,0,&bx,&by,&bz,0);
1188:   DMDAGetInfo(daf,0,&mx,&my,&mz,0,0,0,&dof,0,0,0,0,0);

1190:   if (bx == DM_BOUNDARY_PERIODIC) {
1191:     ratioi = mx/Mx;
1192:     if (ratioi*Mx != mx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: mx/Mx  must be integer: mx %D Mx %D",mx,Mx);
1193:   } else {
1194:     ratioi = (mx-1)/(Mx-1);
1195:     if (ratioi*(Mx-1) != mx-1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: (mx - 1)/(Mx - 1) must be integer: mx %D Mx %D",mx,Mx);
1196:   }
1197:   if (by == DM_BOUNDARY_PERIODIC) {
1198:     ratioj = my/My;
1199:     if (ratioj*My != my) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: my/My  must be integer: my %D My %D",my,My);
1200:   } else {
1201:     ratioj = (my-1)/(My-1);
1202:     if (ratioj*(My-1) != my-1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: (my - 1)/(My - 1) must be integer: my %D My %D",my,My);
1203:   }
1204:   if (bz == DM_BOUNDARY_PERIODIC) {
1205:     ratiok = mz/Mz;
1206:     if (ratiok*Mz != mz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: mz/Mz  must be integer: mz %D My %D",mz,Mz);
1207:   } else {
1208:     ratiok = (mz-1)/(Mz-1);
1209:     if (ratiok*(Mz-1) != mz-1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Ratio between levels: (mz - 1)/(Mz - 1) must be integer: mz %D Mz %D",mz,Mz);
1210:   }

1212:   DMDAGetCorners(daf,&i_start,&j_start,&k_start,&m_f,&n_f,&p_f);
1213:   DMDAGetGhostCorners(daf,&i_start_ghost,&j_start_ghost,&k_start_ghost,&m_ghost,&n_ghost,&p_ghost);
1214:   DMGetLocalToGlobalMapping(daf,&ltog_f);
1215:   ISLocalToGlobalMappingGetBlockIndices(ltog_f,&idx_f);

1217:   DMDAGetCorners(dac,&i_start_c,&j_start_c,&k_start_c,&m_c,&n_c,&p_c);
1218:   DMDAGetGhostCorners(dac,&i_start_ghost_c,&j_start_ghost_c,&k_start_ghost_c,&m_ghost_c,&n_ghost_c,&p_ghost_c);
1219:   DMGetLocalToGlobalMapping(dac,&ltog_c);
1220:   ISLocalToGlobalMappingGetBlockIndices(ltog_c,&idx_c);


1223:   /* loop over local fine grid nodes setting interpolation for those*/
1224:   nc   = 0;
1225:   PetscMalloc1(n_f*m_f*p_f,&cols);
1226:   for (k=k_start_c; k<k_start_c+p_c; k++) {
1227:     for (j=j_start_c; j<j_start_c+n_c; j++) {
1228:       for (i=i_start_c; i<i_start_c+m_c; i++) {
1229:         PetscInt i_f = i*ratioi,j_f = j*ratioj,k_f = k*ratiok;
1230:         if (k_f < k_start_ghost || k_f >= k_start_ghost+p_ghost) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA  "
1231:                                                                           "k_c %D k_f %D fine ghost range [%D,%D]",k,k_f,k_start_ghost,k_start_ghost+p_ghost);
1232:         if (j_f < j_start_ghost || j_f >= j_start_ghost+n_ghost) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA  "
1233:                                                                           "j_c %D j_f %D fine ghost range [%D,%D]",j,j_f,j_start_ghost,j_start_ghost+n_ghost);
1234:         if (i_f < i_start_ghost || i_f >= i_start_ghost+m_ghost) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Processor's coarse DMDA must lie over fine DMDA  "
1235:                                                                           "i_c %D i_f %D fine ghost range [%D,%D]",i,i_f,i_start_ghost,i_start_ghost+m_ghost);
1236:         row        = idx_f[(m_ghost*n_ghost*(k_f-k_start_ghost) + m_ghost*(j_f-j_start_ghost) + (i_f-i_start_ghost))];
1237:         cols[nc++] = row;
1238:       }
1239:     }
1240:   }
1241:   ISLocalToGlobalMappingRestoreBlockIndices(ltog_f,&idx_f);
1242:   ISLocalToGlobalMappingRestoreBlockIndices(ltog_c,&idx_c);

1244:   ISCreateBlock(PetscObjectComm((PetscObject)daf),dof,nc,cols,PETSC_OWN_POINTER,&isf);
1245:   DMGetGlobalVector(dac,&vecc);
1246:   DMGetGlobalVector(daf,&vecf);
1247:   VecScatterCreate(vecf,isf,vecc,NULL,inject);
1248:   DMRestoreGlobalVector(dac,&vecc);
1249:   DMRestoreGlobalVector(daf,&vecf);
1250:   ISDestroy(&isf);
1251:   return(0);
1252: }

1254: PetscErrorCode  DMCreateInjection_DA(DM dac,DM daf,Mat *mat)
1255: {
1256:   PetscErrorCode  ierr;
1257:   PetscInt        dimc,Mc,Nc,Pc,mc,nc,pc,dofc,sc,dimf,Mf,Nf,Pf,mf,nf,pf,doff,sf;
1258:   DMBoundaryType  bxc,byc,bzc,bxf,byf,bzf;
1259:   DMDAStencilType stc,stf;
1260:   VecScatter      inject = NULL;


1267:   DMDAGetInfo(dac,&dimc,&Mc,&Nc,&Pc,&mc,&nc,&pc,&dofc,&sc,&bxc,&byc,&bzc,&stc);
1268:   DMDAGetInfo(daf,&dimf,&Mf,&Nf,&Pf,&mf,&nf,&pf,&doff,&sf,&bxf,&byf,&bzf,&stf);
1269:   if (dimc != dimf) SETERRQ2(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"Dimensions of DMDA do not match %D %D",dimc,dimf);
1270:   if (dofc != doff) SETERRQ2(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"DOF of DMDA do not match %D %D",dofc,doff);
1271:   if (sc != sf) SETERRQ2(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"Stencil width of DMDA do not match %D %D",sc,sf);
1272:   if (bxc != bxf || byc != byf || bzc != bzf) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"Boundary type different in two DMDAs");
1273:   if (stc != stf) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"Stencil type different in two DMDAs");
1274:   if (Mc < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Coarse grid requires at least 2 points in x direction");
1275:   if (dimc > 1 && Nc < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Coarse grid requires at least 2 points in y direction");
1276:   if (dimc > 2 && Pc < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Coarse grid requires at least 2 points in z direction");

1278:   if (dimc == 1) {
1279:     DMCreateInjection_DA_1D(dac,daf,&inject);
1280:   } else if (dimc == 2) {
1281:     DMCreateInjection_DA_2D(dac,daf,&inject);
1282:   } else if (dimc == 3) {
1283:     DMCreateInjection_DA_3D(dac,daf,&inject);
1284:   }
1285:   MatCreateScatter(PetscObjectComm((PetscObject)inject), inject, mat);
1286:   VecScatterDestroy(&inject);
1287:   return(0);
1288: }

1290: PetscErrorCode  DMCreateAggregates_DA(DM dac,DM daf,Mat *rest)
1291: {
1292:   PetscErrorCode         ierr;
1293:   PetscInt               dimc,Mc,Nc,Pc,mc,nc,pc,dofc,sc;
1294:   PetscInt               dimf,Mf,Nf,Pf,mf,nf,pf,doff,sf;
1295:   DMBoundaryType         bxc,byc,bzc,bxf,byf,bzf;
1296:   DMDAStencilType        stc,stf;
1297:   PetscInt               i,j,l;
1298:   PetscInt               i_start,j_start,l_start, m_f,n_f,p_f;
1299:   PetscInt               i_start_ghost,j_start_ghost,l_start_ghost,m_ghost,n_ghost,p_ghost;
1300:   const PetscInt         *idx_f;
1301:   PetscInt               i_c,j_c,l_c;
1302:   PetscInt               i_start_c,j_start_c,l_start_c, m_c,n_c,p_c;
1303:   PetscInt               i_start_ghost_c,j_start_ghost_c,l_start_ghost_c,m_ghost_c,n_ghost_c,p_ghost_c;
1304:   const PetscInt         *idx_c;
1305:   PetscInt               d;
1306:   PetscInt               a;
1307:   PetscInt               max_agg_size;
1308:   PetscInt               *fine_nodes;
1309:   PetscScalar            *one_vec;
1310:   PetscInt               fn_idx;
1311:   ISLocalToGlobalMapping ltogmf,ltogmc;


1318:   DMDAGetInfo(dac,&dimc,&Mc,&Nc,&Pc,&mc,&nc,&pc,&dofc,&sc,&bxc,&byc,&bzc,&stc);
1319:   DMDAGetInfo(daf,&dimf,&Mf,&Nf,&Pf,&mf,&nf,&pf,&doff,&sf,&bxf,&byf,&bzf,&stf);
1320:   if (dimc != dimf) SETERRQ2(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"Dimensions of DMDA do not match %D %D",dimc,dimf);
1321:   if (dofc != doff) SETERRQ2(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"DOF of DMDA do not match %D %D",dofc,doff);
1322:   if (sc != sf) SETERRQ2(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"Stencil width of DMDA do not match %D %D",sc,sf);
1323:   if (bxc != bxf || byc != byf || bzc != bzf) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"Boundary type different in two DMDAs");
1324:   if (stc != stf) SETERRQ(PetscObjectComm((PetscObject)daf),PETSC_ERR_ARG_INCOMP,"Stencil type different in two DMDAs");

1326:   if (Mf < Mc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Coarse grid has more points than fine grid, Mc %D, Mf %D", Mc, Mf);
1327:   if (Nf < Nc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Coarse grid has more points than fine grid, Nc %D, Nf %D", Nc, Nf);
1328:   if (Pf < Pc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Coarse grid has more points than fine grid, Pc %D, Pf %D", Pc, Pf);

1330:   if (Pc < 0) Pc = 1;
1331:   if (Pf < 0) Pf = 1;
1332:   if (Nc < 0) Nc = 1;
1333:   if (Nf < 0) Nf = 1;

1335:   DMDAGetCorners(daf,&i_start,&j_start,&l_start,&m_f,&n_f,&p_f);
1336:   DMDAGetGhostCorners(daf,&i_start_ghost,&j_start_ghost,&l_start_ghost,&m_ghost,&n_ghost,&p_ghost);

1338:   DMGetLocalToGlobalMapping(daf,&ltogmf);
1339:   ISLocalToGlobalMappingGetIndices(ltogmf,&idx_f);

1341:   DMDAGetCorners(dac,&i_start_c,&j_start_c,&l_start_c,&m_c,&n_c,&p_c);
1342:   DMDAGetGhostCorners(dac,&i_start_ghost_c,&j_start_ghost_c,&l_start_ghost_c,&m_ghost_c,&n_ghost_c,&p_ghost_c);

1344:   DMGetLocalToGlobalMapping(dac,&ltogmc);
1345:   ISLocalToGlobalMappingGetIndices(ltogmc,&idx_c);

1347:   /*
1348:      Basic idea is as follows. Here's a 2D example, suppose r_x, r_y are the ratios
1349:      for dimension 1 and 2 respectively.
1350:      Let (i,j) be a coarse grid node. All the fine grid nodes between r_x*i and r_x*(i+1)
1351:      and r_y*j and r_y*(j+1) will be grouped into the same coarse grid agregate.
1352:      Each specific dof on the fine grid is mapped to one dof on the coarse grid.
1353:   */

1355:   max_agg_size = (Mf/Mc+1)*(Nf/Nc+1)*(Pf/Pc+1);

1357:   /* create the matrix that will contain the restriction operator */
1358:   MatCreateAIJ(PetscObjectComm((PetscObject)daf), m_c*n_c*p_c*dofc, m_f*n_f*p_f*doff, Mc*Nc*Pc*dofc, Mf*Nf*Pf*doff,
1359:                       max_agg_size, NULL, max_agg_size, NULL, rest);

1361:   /* store nodes in the fine grid here */
1362:   PetscMalloc2(max_agg_size, &one_vec,max_agg_size, &fine_nodes);
1363:   for (i=0; i<max_agg_size; i++) one_vec[i] = 1.0;

1365:   /* loop over all coarse nodes */
1366:   for (l_c=l_start_c; l_c<l_start_c+p_c; l_c++) {
1367:     for (j_c=j_start_c; j_c<j_start_c+n_c; j_c++) {
1368:       for (i_c=i_start_c; i_c<i_start_c+m_c; i_c++) {
1369:         for (d=0; d<dofc; d++) {
1370:           /* convert to local "natural" numbering and then to PETSc global numbering */
1371:           a = idx_c[dofc*(m_ghost_c*n_ghost_c*(l_c-l_start_ghost_c) + m_ghost_c*(j_c-j_start_ghost_c) + (i_c-i_start_ghost_c))] + d;

1373:           fn_idx = 0;
1374:           /* Corresponding fine points are all points (i_f, j_f, l_f) such that
1375:              i_c*Mf/Mc <= i_f < (i_c+1)*Mf/Mc
1376:              (same for other dimensions)
1377:           */
1378:           for (l=l_c*Pf/Pc; l<PetscMin((l_c+1)*Pf/Pc,Pf); l++) {
1379:             for (j=j_c*Nf/Nc; j<PetscMin((j_c+1)*Nf/Nc,Nf); j++) {
1380:               for (i=i_c*Mf/Mc; i<PetscMin((i_c+1)*Mf/Mc,Mf); i++) {
1381:                 fine_nodes[fn_idx] = idx_f[doff*(m_ghost*n_ghost*(l-l_start_ghost) + m_ghost*(j-j_start_ghost) + (i-i_start_ghost))] + d;
1382:                 fn_idx++;
1383:               }
1384:             }
1385:           }
1386:           /* add all these points to one aggregate */
1387:           MatSetValues(*rest, 1, &a, fn_idx, fine_nodes, one_vec, INSERT_VALUES);
1388:         }
1389:       }
1390:     }
1391:   }
1392:   ISLocalToGlobalMappingRestoreIndices(ltogmf,&idx_f);
1393:   ISLocalToGlobalMappingRestoreIndices(ltogmc,&idx_c);
1394:   PetscFree2(one_vec,fine_nodes);
1395:   MatAssemblyBegin(*rest, MAT_FINAL_ASSEMBLY);
1396:   MatAssemblyEnd(*rest, MAT_FINAL_ASSEMBLY);
1397:   return(0);
1398: }