Actual source code: telescope_dmda.c

petsc-3.7.3 2016-08-01
Report Typos and Errors

  3: #include <petsc/private/matimpl.h>
  4: #include <petsc/private/pcimpl.h>
  5: #include <petsc/private/dmimpl.h>
  6: #include <petscksp.h>           /*I "petscksp.h" I*/
  7: #include <petscdm.h>
  8: #include <petscdmda.h>

 10:  #include ../src/ksp/pc/impls/telescope/telescope.h

 14: PetscErrorCode _DMDADetermineRankFromGlobalIJK(PetscInt dim,PetscInt i,PetscInt j,PetscInt k,
 15:                                                PetscInt Mp,PetscInt Np,PetscInt Pp,
 16:                                                PetscInt start_i[],PetscInt start_j[],PetscInt start_k[],
 17:                                                PetscInt span_i[],PetscInt span_j[],PetscInt span_k[],
 18:                                                PetscMPIInt *_pi,PetscMPIInt *_pj,PetscMPIInt *_pk,PetscMPIInt *rank_re)
 19: {
 20:   PetscInt pi,pj,pk,n;

 23:   pi = pj = pk = -1;
 24:   if (_pi) {
 25:     for (n=0; n<Mp; n++) {
 26:       if ( (i >= start_i[n]) && (i < start_i[n]+span_i[n]) ) {
 27:         pi = n;
 28:         break;
 29:       }
 30:     }
 31:     if (pi == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmda-ijk] pi cannot be determined : range %D, val %D",Mp,i);
 32:     *_pi = pi;
 33:   }

 35:   if (_pj) {
 36:     for (n=0; n<Np; n++) {
 37:       if ( (j >= start_j[n]) && (j < start_j[n]+span_j[n]) ) {
 38:         pj = n;
 39:         break;
 40:       }
 41:     }
 42:     if (pj == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmda-ijk] pj cannot be determined : range %D, val %D",Np,j);
 43:     *_pj = pj;
 44:   }

 46:   if (_pk) {
 47:     for (n=0; n<Pp; n++) {
 48:       if ( (k >= start_k[n]) && (k < start_k[n]+span_k[n]) ) {
 49:         pk = n;
 50:         break;
 51:       }
 52:     }
 53:     if (pk == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmda-ijk] pk cannot be determined : range %D, val %D",Pp,k);
 54:     *_pk = pk;
 55:   }

 57:   switch (dim) {
 58:     case 1:
 59:       *rank_re = pi;
 60:       break;
 61:     case 2:
 62:       *rank_re = pi + pj * Mp;
 63:       break;
 64:     case 3:
 65:       *rank_re = pi + pj * Mp + pk * (Mp*Np);
 66:       break;
 67:   }
 68:   return(0);
 69: }

 73: PetscErrorCode _DMDADetermineGlobalS0(PetscInt dim,PetscMPIInt rank_re,PetscInt Mp_re,PetscInt Np_re,PetscInt Pp_re,
 74:                                       PetscInt range_i_re[],PetscInt range_j_re[],PetscInt range_k_re[],PetscInt *s0)
 75: {
 76:   PetscInt i,j,k,start_IJK = 0;
 77:   PetscInt rank_ijk;

 80:   switch (dim) {
 81:     case 1:
 82:       for (i=0; i<Mp_re; i++) {
 83:         rank_ijk = i;
 84:         if (rank_ijk < rank_re) {
 85:           start_IJK += range_i_re[i];
 86:         }
 87:       }
 88:       break;
 89:     case 2:
 90:       for (j=0; j<Np_re; j++) {
 91:         for (i=0; i<Mp_re; i++) {
 92:           rank_ijk = i + j*Mp_re;
 93:           if (rank_ijk < rank_re) {
 94:             start_IJK += range_i_re[i]*range_j_re[j];
 95:           }
 96:         }
 97:       }
 98:       break;
 99:     case 3:
100:       for (k=0; k<Pp_re; k++) {
101:         for (j=0; j<Np_re; j++) {
102:           for (i=0; i<Mp_re; i++) {
103:             rank_ijk = i + j*Mp_re + k*Mp_re*Np_re;
104:             if (rank_ijk < rank_re) {
105:               start_IJK += range_i_re[i]*range_j_re[j]*range_k_re[k];
106:             }
107:           }
108:         }
109:       }
110:       break;
111:   }
112:   *s0 = start_IJK;
113:   return(0);
114: }

118: PetscErrorCode PCTelescopeSetUp_dmda_repart_coors2d(PetscSubcomm psubcomm,DM dm,DM subdm)
119: {
121:   DM             cdm;
122:   Vec            coor,coor_natural,perm_coors;
123:   PetscInt       i,j,si,sj,ni,nj,M,N,Ml,Nl,c,nidx;
124:   PetscInt       *fine_indices;
125:   IS             is_fine,is_local;
126:   VecScatter     sctx;

129:   DMGetCoordinates(dm,&coor);
130:   if (!coor) return(0);
131:   if (isActiveRank(psubcomm)) {
132:     DMDASetUniformCoordinates(subdm,0.0,1.0,0.0,1.0,0.0,1.0);
133:   }
134:   /* Get the coordinate vector from the distributed array */
135:   DMGetCoordinateDM(dm,&cdm);
136:   DMDACreateNaturalVector(cdm,&coor_natural);

138:   DMDAGlobalToNaturalBegin(cdm,coor,INSERT_VALUES,coor_natural);
139:   DMDAGlobalToNaturalEnd(cdm,coor,INSERT_VALUES,coor_natural);

141:   /* get indices of the guys I want to grab */
142:   DMDAGetInfo(dm,NULL,&M,&N,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
143:   if (isActiveRank(psubcomm)) {
144:     DMDAGetCorners(subdm,&si,&sj,NULL,&ni,&nj,NULL);
145:     Ml = ni;
146:     Nl = nj;
147:   } else {
148:     si = sj = 0;
149:     ni = nj = 0;
150:     Ml = Nl = 0;
151:   }

153:   PetscMalloc1(Ml*Nl*2,&fine_indices);
154:   c = 0;
155:   if (isActiveRank(psubcomm)) {
156:     for (j=sj; j<sj+nj; j++) {
157:       for (i=si; i<si+ni; i++) {
158:         nidx = (i) + (j)*M;
159:         fine_indices[c  ] = 2 * nidx     ;
160:         fine_indices[c+1] = 2 * nidx + 1 ;
161:         c = c + 2;
162:       }
163:     }
164:     if (c != Ml*Nl*2) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"c %D should equal 2 * Ml %D * Nl %D",c,Ml,Nl);
165:   }

167:   /* generate scatter */
168:   ISCreateGeneral(PetscObjectComm((PetscObject)dm),Ml*Nl*2,fine_indices,PETSC_USE_POINTER,&is_fine);
169:   ISCreateStride(PETSC_COMM_SELF,Ml*Nl*2,0,1,&is_local);

171:   /* scatter */
172:   VecCreate(PETSC_COMM_SELF,&perm_coors);
173:   VecSetSizes(perm_coors,PETSC_DECIDE,Ml*Nl*2);
174:   VecSetType(perm_coors,VECSEQ);

176:   VecScatterCreate(coor_natural,is_fine,perm_coors,is_local,&sctx);
177:   VecScatterBegin(sctx,coor_natural,perm_coors,INSERT_VALUES,SCATTER_FORWARD);
178:   VecScatterEnd(  sctx,coor_natural,perm_coors,INSERT_VALUES,SCATTER_FORWARD);
179:   /* access */
180:   if (isActiveRank(psubcomm)) {
181:     Vec               _coors;
182:     const PetscScalar *LA_perm;
183:     PetscScalar       *LA_coors;

185:     DMGetCoordinates(subdm,&_coors);
186:     VecGetArrayRead(perm_coors,&LA_perm);
187:     VecGetArray(_coors,&LA_coors);
188:     for (i=0; i<Ml*Nl*2; i++) {
189:       LA_coors[i] = LA_perm[i];
190:     }
191:     VecRestoreArray(_coors,&LA_coors);
192:     VecRestoreArrayRead(perm_coors,&LA_perm);
193:   }

195:   /* update local coords */
196:   if (isActiveRank(psubcomm)) {
197:     DM  _dmc;
198:     Vec _coors,_coors_local;
199:     DMGetCoordinateDM(subdm,&_dmc);
200:     DMGetCoordinates(subdm,&_coors);
201:     DMGetCoordinatesLocal(subdm,&_coors_local);
202:     DMGlobalToLocalBegin(_dmc,_coors,INSERT_VALUES,_coors_local);
203:     DMGlobalToLocalEnd(_dmc,_coors,INSERT_VALUES,_coors_local);
204:   }
205:   VecScatterDestroy(&sctx);
206:   ISDestroy(&is_fine);
207:   PetscFree(fine_indices);
208:   ISDestroy(&is_local);
209:   VecDestroy(&perm_coors);
210:   VecDestroy(&coor_natural);
211:   return(0);
212: }

216: PetscErrorCode PCTelescopeSetUp_dmda_repart_coors3d(PetscSubcomm psubcomm,DM dm,DM subdm)
217: {
219:   DM             cdm;
220:   Vec            coor,coor_natural,perm_coors;
221:   PetscInt       i,j,k,si,sj,sk,ni,nj,nk,M,N,P,Ml,Nl,Pl,c,nidx;
222:   PetscInt       *fine_indices;
223:   IS             is_fine,is_local;
224:   VecScatter     sctx;

227:   DMGetCoordinates(dm,&coor);
228:   if (!coor) return(0);

230:   if (isActiveRank(psubcomm)) {
231:     DMDASetUniformCoordinates(subdm,0.0,1.0,0.0,1.0,0.0,1.0);
232:   }

234:   /* Get the coordinate vector from the distributed array */
235:   DMGetCoordinateDM(dm,&cdm);
236:   DMDACreateNaturalVector(cdm,&coor_natural);
237:   DMDAGlobalToNaturalBegin(cdm,coor,INSERT_VALUES,coor_natural);
238:   DMDAGlobalToNaturalEnd(cdm,coor,INSERT_VALUES,coor_natural);

240:   /* get indices of the guys I want to grab */
241:   DMDAGetInfo(dm,NULL,&M,&N,&P,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);

243:   if (isActiveRank(psubcomm)) {
244:     DMDAGetCorners(subdm,&si,&sj,&sk,&ni,&nj,&nk);
245:     Ml = ni;
246:     Nl = nj;
247:     Pl = nk;
248:   } else {
249:     si = sj = sk = 0;
250:     ni = nj = nk = 0;
251:     Ml = Nl = Pl = 0;
252:   }

254:   PetscMalloc1(Ml*Nl*Pl*3,&fine_indices);

256:   c = 0;
257:   if (isActiveRank(psubcomm)) {
258:     for (k=sk; k<sk+nk; k++) {
259:       for (j=sj; j<sj+nj; j++) {
260:         for (i=si; i<si+ni; i++) {
261:           nidx = (i) + (j)*M + (k)*M*N;
262:           fine_indices[c  ] = 3 * nidx     ;
263:           fine_indices[c+1] = 3 * nidx + 1 ;
264:           fine_indices[c+2] = 3 * nidx + 2 ;
265:           c = c + 3;
266:         }
267:       }
268:     }
269:   }

271:   /* generate scatter */
272:   ISCreateGeneral(PetscObjectComm((PetscObject)dm),Ml*Nl*Pl*3,fine_indices,PETSC_USE_POINTER,&is_fine);
273:   ISCreateStride(PETSC_COMM_SELF,Ml*Nl*Pl*3,0,1,&is_local);

275:   /* scatter */
276:   VecCreate(PETSC_COMM_SELF,&perm_coors);
277:   VecSetSizes(perm_coors,PETSC_DECIDE,Ml*Nl*Pl*3);
278:   VecSetType(perm_coors,VECSEQ);
279:   VecScatterCreate(coor_natural,is_fine,perm_coors,is_local,&sctx);
280:   VecScatterBegin(sctx,coor_natural,perm_coors,INSERT_VALUES,SCATTER_FORWARD);
281:   VecScatterEnd(  sctx,coor_natural,perm_coors,INSERT_VALUES,SCATTER_FORWARD);

283:   /* access */
284:   if (isActiveRank(psubcomm)) {
285:     Vec               _coors;
286:     const PetscScalar *LA_perm;
287:     PetscScalar       *LA_coors;

289:     DMGetCoordinates(subdm,&_coors);
290:     VecGetArrayRead(perm_coors,&LA_perm);
291:     VecGetArray(_coors,&LA_coors);
292:     for (i=0; i<Ml*Nl*Pl*3; i++) {
293:       LA_coors[i] = LA_perm[i];
294:     }
295:     VecRestoreArray(_coors,&LA_coors);
296:     VecRestoreArrayRead(perm_coors,&LA_perm);
297:   }

299:   /* update local coords */
300:   if (isActiveRank(psubcomm)) {
301:     DM  _dmc;
302:     Vec _coors,_coors_local;

304:     DMGetCoordinateDM(subdm,&_dmc);
305:     DMGetCoordinates(subdm,&_coors);
306:     DMGetCoordinatesLocal(subdm,&_coors_local);
307:     DMGlobalToLocalBegin(_dmc,_coors,INSERT_VALUES,_coors_local);
308:     DMGlobalToLocalEnd(_dmc,_coors,INSERT_VALUES,_coors_local);
309:   }

311:   VecScatterDestroy(&sctx);
312:   ISDestroy(&is_fine);
313:   PetscFree(fine_indices);
314:   ISDestroy(&is_local);
315:   VecDestroy(&perm_coors);
316:   VecDestroy(&coor_natural);
317:   return(0);
318: }

322: PetscErrorCode PCTelescopeSetUp_dmda_repart_coors(PC pc,PC_Telescope sred,PC_Telescope_DMDACtx *ctx)
323: {
324:   PetscInt       dim;
325:   DM             dm,subdm;
326:   PetscSubcomm   psubcomm;
327:   MPI_Comm       comm;
328:   Vec            coor;

332:   PCGetDM(pc,&dm);
333:   DMGetCoordinates(dm,&coor);
334:   if (!coor) return(0);
335:   psubcomm = sred->psubcomm;
336:   comm = PetscSubcommParent(psubcomm);
337:   subdm = ctx->dmrepart;


340:   PetscInfo(pc,"PCTelescope: setting up the coordinates (DMDA)\n");
341:   DMDAGetInfo(dm,&dim,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
342:   switch (dim) {
343:   case 1: SETERRQ(comm,PETSC_ERR_SUP,"Telescope: DMDA (1D) repartitioning not provided");
344:     break;
345:   case 2: PCTelescopeSetUp_dmda_repart_coors2d(psubcomm,dm,subdm);
346:     break;
347:   case 3: PCTelescopeSetUp_dmda_repart_coors3d(psubcomm,dm,subdm);
348:     break;
349:   }
350:   return(0);
351: }

353: /* setup repartitioned dm */
356: PetscErrorCode PCTelescopeSetUp_dmda_repart(PC pc,PC_Telescope sred,PC_Telescope_DMDACtx *ctx)
357: {
358:   PetscErrorCode  ierr;
359:   DM              dm;
360:   PetscInt        dim,nx,ny,nz,ndof,nsw,sum,k;
361:   DMBoundaryType  bx,by,bz;
362:   DMDAStencilType stencil;
363:   const PetscInt  *_range_i_re;
364:   const PetscInt  *_range_j_re;
365:   const PetscInt  *_range_k_re;
366:   DMDAInterpolationType itype;
367:   PetscInt              refine_x,refine_y,refine_z;
368:   MPI_Comm              comm,subcomm;
369:   const char            *prefix;

372:   comm = PetscSubcommParent(sred->psubcomm);
373:   subcomm = PetscSubcommChild(sred->psubcomm);
374:   PCGetDM(pc,&dm);

376:   DMDAGetInfo(dm,&dim,&nx,&ny,&nz,NULL,NULL,NULL,&ndof,&nsw,&bx,&by,&bz,&stencil);
377:   DMDAGetInterpolationType(dm,&itype);
378:   DMDAGetRefinementFactor(dm,&refine_x,&refine_y,&refine_z);

380:   ctx->dmrepart = NULL;
381:   _range_i_re = _range_j_re = _range_k_re = NULL;
382:   /* Create DMDA on the child communicator */
383:   if (isActiveRank(sred->psubcomm)) {
384:     switch (dim) {
385:     case 1:
386:       PetscInfo(pc,"PCTelescope: setting up the DMDA on comm subset (1D)\n");
387:       /*DMDACreate1d(subcomm,bx,nx,ndof,nsw,NULL,&ctx->dmrepart);*/
388:       ny = nz = 1;
389:       by = bz = DM_BOUNDARY_NONE;
390:       break;
391:     case 2:
392:       PetscInfo(pc,"PCTelescope: setting up the DMDA on comm subset (2D)\n");
393:       /*DMDACreate2d(subcomm,bx,by,stencil,nx,ny, PETSC_DECIDE,PETSC_DECIDE, ndof,nsw, NULL,NULL,&ctx->dmrepart);*/
394:       nz = 1;
395:       bz = DM_BOUNDARY_NONE;
396:       break;
397:     case 3:
398:       PetscInfo(pc,"PCTelescope: setting up the DMDA on comm subset (3D)\n");
399:       /*DMDACreate3d(subcomm,bx,by,bz,stencil,nx,ny,nz, PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE, ndof,nsw, NULL,NULL,NULL,&ctx->dmrepart);*/
400:       break;
401:     }
402:     /*
403:      The API DMDACreate1d(), DMDACreate2d(), DMDACreate3d() does not allow us to set/append
404:      a unique option prefix for the DM, thus I prefer to expose the contents of these API's here.
405:      This allows users to control the partitioning of the subDM.
406:     */
407:     DMDACreate(subcomm,&ctx->dmrepart);
408:     /* Set unique option prefix name */
409:     KSPGetOptionsPrefix(sred->ksp,&prefix);
410:     DMSetOptionsPrefix(ctx->dmrepart,prefix);
411:     DMAppendOptionsPrefix(ctx->dmrepart,"repart_");
412:     /* standard setup from DMDACreate{1,2,3}d() */
413:     DMSetDimension(ctx->dmrepart,dim);
414:     DMDASetSizes(ctx->dmrepart,nx,ny,nz);
415:     DMDASetNumProcs(ctx->dmrepart,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE);
416:     DMDASetBoundaryType(ctx->dmrepart,bx,by,bz);
417:     DMDASetDof(ctx->dmrepart,ndof);
418:     DMDASetStencilType(ctx->dmrepart,stencil);
419:     DMDASetStencilWidth(ctx->dmrepart,nsw);
420:     DMDASetOwnershipRanges(ctx->dmrepart,NULL,NULL,NULL);
421:     DMSetFromOptions(ctx->dmrepart);
422:     DMSetUp(ctx->dmrepart);
423:     /* Set refinement factors and interpolation type from the partent */
424:     DMDASetRefinementFactor(ctx->dmrepart,refine_x,refine_y,refine_z);
425:     DMDASetInterpolationType(ctx->dmrepart,itype);

427:     DMDAGetInfo(ctx->dmrepart,NULL,NULL,NULL,NULL,&ctx->Mp_re,&ctx->Np_re,&ctx->Pp_re,NULL,NULL,NULL,NULL,NULL,NULL);
428:     DMDAGetOwnershipRanges(ctx->dmrepart,&_range_i_re,&_range_j_re,&_range_k_re);

430:     ctx->dmrepart->ops->creatematrix = dm->ops->creatematrix;
431:     ctx->dmrepart->ops->createdomaindecomposition = dm->ops->createdomaindecomposition;
432:   }

434:   /* generate ranges for repartitioned dm */
435:   /* note - assume rank 0 always participates */
436:   MPI_Bcast(&ctx->Mp_re,1,MPIU_INT,0,comm);
437:   MPI_Bcast(&ctx->Np_re,1,MPIU_INT,0,comm);
438:   MPI_Bcast(&ctx->Pp_re,1,MPIU_INT,0,comm);

440:   PetscCalloc1(ctx->Mp_re,&ctx->range_i_re);
441:   PetscCalloc1(ctx->Np_re,&ctx->range_j_re);
442:   PetscCalloc1(ctx->Pp_re,&ctx->range_k_re);

444:   if (_range_i_re) {PetscMemcpy(ctx->range_i_re,_range_i_re,sizeof(PetscInt)*ctx->Mp_re);}
445:   if (_range_j_re) {PetscMemcpy(ctx->range_j_re,_range_j_re,sizeof(PetscInt)*ctx->Np_re);}
446:   if (_range_k_re) {PetscMemcpy(ctx->range_k_re,_range_k_re,sizeof(PetscInt)*ctx->Pp_re);}

448:   MPI_Bcast(ctx->range_i_re,ctx->Mp_re,MPIU_INT,0,comm);
449:   MPI_Bcast(ctx->range_j_re,ctx->Np_re,MPIU_INT,0,comm);
450:   MPI_Bcast(ctx->range_k_re,ctx->Pp_re,MPIU_INT,0,comm);

452:   PetscMalloc1(ctx->Mp_re,&ctx->start_i_re);
453:   PetscMalloc1(ctx->Np_re,&ctx->start_j_re);
454:   PetscMalloc1(ctx->Pp_re,&ctx->start_k_re);

456:   sum = 0;
457:   for (k=0; k<ctx->Mp_re; k++) {
458:     ctx->start_i_re[k] = sum;
459:     sum += ctx->range_i_re[k];
460:   }

462:   sum = 0;
463:   for (k=0; k<ctx->Np_re; k++) {
464:     ctx->start_j_re[k] = sum;
465:     sum += ctx->range_j_re[k];
466:   }

468:   sum = 0;
469:   for (k=0; k<ctx->Pp_re; k++) {
470:     ctx->start_k_re[k] = sum;
471:     sum += ctx->range_k_re[k];
472:   }

474:   /* attach repartitioned dm to child ksp */
475:   {
476:     PetscErrorCode (*dmksp_func)(KSP,Mat,Mat,void*);
477:     void           *dmksp_ctx;

479:     DMKSPGetComputeOperators(dm,&dmksp_func,&dmksp_ctx);

481:     /* attach dm to ksp on sub communicator */
482:     if (isActiveRank(sred->psubcomm)) {
483:       KSPSetDM(sred->ksp,ctx->dmrepart);

485:       if (!dmksp_func || sred->ignore_kspcomputeoperators) {
486:         KSPSetDMActive(sred->ksp,PETSC_FALSE);
487:       } else {
488:         /* sub ksp inherits dmksp_func and context provided by user */
489:         KSPSetComputeOperators(sred->ksp,dmksp_func,dmksp_ctx);
490:         KSPSetDMActive(sred->ksp,PETSC_TRUE);
491:       }
492:     }
493:   }
494:   return(0);
495: }

499: PetscErrorCode PCTelescopeSetUp_dmda_permutation_3d(PC pc,PC_Telescope sred,PC_Telescope_DMDACtx *ctx)
500: {
502:   DM             dm;
503:   MPI_Comm       comm;
504:   Mat            Pscalar,P;
505:   PetscInt       ndof;
506:   PetscInt       i,j,k,location,startI[3],endI[3],lenI[3],nx,ny,nz;
507:   PetscInt       sr,er,Mr;
508:   Vec            V;

511:   PetscInfo(pc,"PCTelescope: setting up the permutation matrix (DMDA-3D)\n");
512:   PetscObjectGetComm((PetscObject)pc,&comm);

514:   PCGetDM(pc,&dm);
515:   DMDAGetInfo(dm,NULL,&nx,&ny,&nz,NULL,NULL,NULL,&ndof,NULL,NULL,NULL,NULL,NULL);

517:   DMGetGlobalVector(dm,&V);
518:   VecGetSize(V,&Mr);
519:   VecGetOwnershipRange(V,&sr,&er);
520:   DMRestoreGlobalVector(dm,&V);
521:   sr = sr / ndof;
522:   er = er / ndof;
523:   Mr = Mr / ndof;

525:   MatCreate(comm,&Pscalar);
526:   MatSetSizes(Pscalar,(er-sr),(er-sr),Mr,Mr);
527:   MatSetType(Pscalar,MATAIJ);
528:   MatSeqAIJSetPreallocation(Pscalar,1,NULL);
529:   MatMPIAIJSetPreallocation(Pscalar,1,NULL,1,NULL);

531:   DMDAGetCorners(dm,NULL,NULL,NULL,&lenI[0],&lenI[1],&lenI[2]);
532:   DMDAGetCorners(dm,&startI[0],&startI[1],&startI[2],&endI[0],&endI[1],&endI[2]);
533:   endI[0] += startI[0];
534:   endI[1] += startI[1];
535:   endI[2] += startI[2];

537:   for (k=startI[2]; k<endI[2]; k++) {
538:     for (j=startI[1]; j<endI[1]; j++) {
539:       for (i=startI[0]; i<endI[0]; i++) {
540:         PetscMPIInt rank_ijk_re,rank_reI[3];
541:         PetscInt    s0_re;
542:         PetscInt    ii,jj,kk,local_ijk_re,mapped_ijk;
543:         PetscInt    lenI_re[3];

545:         location = (i - startI[0]) + (j - startI[1])*lenI[0] + (k - startI[2])*lenI[0]*lenI[1];
546:         _DMDADetermineRankFromGlobalIJK(3,i,j,k,   ctx->Mp_re,ctx->Np_re,ctx->Pp_re,
547:                                                ctx->start_i_re,ctx->start_j_re,ctx->start_k_re,
548:                                                ctx->range_i_re,ctx->range_j_re,ctx->range_k_re,
549:                                                &rank_reI[0],&rank_reI[1],&rank_reI[2],&rank_ijk_re);
550:         _DMDADetermineGlobalS0(3,rank_ijk_re, ctx->Mp_re,ctx->Np_re,ctx->Pp_re, ctx->range_i_re,ctx->range_j_re,ctx->range_k_re, &s0_re);
551:         ii = i - ctx->start_i_re[ rank_reI[0] ];
552:         if (ii < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmdarepart-perm3d] index error ii");
553:         jj = j - ctx->start_j_re[ rank_reI[1] ];
554:         if (jj < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmdarepart-perm3d] index error jj");
555:         kk = k - ctx->start_k_re[ rank_reI[2] ];
556:         if (kk < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmdarepart-perm3d] index error kk");
557:         lenI_re[0] = ctx->range_i_re[ rank_reI[0] ];
558:         lenI_re[1] = ctx->range_j_re[ rank_reI[1] ];
559:         lenI_re[2] = ctx->range_k_re[ rank_reI[2] ];
560:         local_ijk_re = ii + jj * lenI_re[0] + kk * lenI_re[0] * lenI_re[1];
561:         mapped_ijk = s0_re + local_ijk_re;
562:         MatSetValue(Pscalar,sr+location,mapped_ijk,1.0,INSERT_VALUES);
563:       }
564:     }
565:   }
566:   MatAssemblyBegin(Pscalar,MAT_FINAL_ASSEMBLY);
567:   MatAssemblyEnd(Pscalar,MAT_FINAL_ASSEMBLY);
568:   MatCreateMAIJ(Pscalar,ndof,&P);
569:   MatDestroy(&Pscalar);
570:   ctx->permutation = P;
571:   return(0);
572: }

576: PetscErrorCode PCTelescopeSetUp_dmda_permutation_2d(PC pc,PC_Telescope sred,PC_Telescope_DMDACtx *ctx)
577: {
579:   DM             dm;
580:   MPI_Comm       comm;
581:   Mat            Pscalar,P;
582:   PetscInt       ndof;
583:   PetscInt       i,j,location,startI[2],endI[2],lenI[2],nx,ny,nz;
584:   PetscInt       sr,er,Mr;
585:   Vec            V;

588:   PetscInfo(pc,"PCTelescope: setting up the permutation matrix (DMDA-2D)\n");
589:   PetscObjectGetComm((PetscObject)pc,&comm);
590:   PCGetDM(pc,&dm);
591:   DMDAGetInfo(dm,NULL,&nx,&ny,&nz,NULL,NULL,NULL,&ndof,NULL,NULL,NULL,NULL,NULL);
592:   DMGetGlobalVector(dm,&V);
593:   VecGetSize(V,&Mr);
594:   VecGetOwnershipRange(V,&sr,&er);
595:   DMRestoreGlobalVector(dm,&V);
596:   sr = sr / ndof;
597:   er = er / ndof;
598:   Mr = Mr / ndof;

600:   MatCreate(comm,&Pscalar);
601:   MatSetSizes(Pscalar,(er-sr),(er-sr),Mr,Mr);
602:   MatSetType(Pscalar,MATAIJ);
603:   MatSeqAIJSetPreallocation(Pscalar,1,NULL);
604:   MatMPIAIJSetPreallocation(Pscalar,1,NULL,1,NULL);

606:   DMDAGetCorners(dm,NULL,NULL,NULL,&lenI[0],&lenI[1],NULL);
607:   DMDAGetCorners(dm,&startI[0],&startI[1],NULL,&endI[0],&endI[1],NULL);
608:   endI[0] += startI[0];
609:   endI[1] += startI[1];

611:   for (j=startI[1]; j<endI[1]; j++) {
612:     for (i=startI[0]; i<endI[0]; i++) {
613:       PetscMPIInt rank_ijk_re,rank_reI[3];
614:       PetscInt    s0_re;
615:       PetscInt    ii,jj,local_ijk_re,mapped_ijk;
616:       PetscInt    lenI_re[3];

618:       location = (i - startI[0]) + (j - startI[1])*lenI[0];
619:       _DMDADetermineRankFromGlobalIJK(2,i,j,0,   ctx->Mp_re,ctx->Np_re,ctx->Pp_re,
620:                                              ctx->start_i_re,ctx->start_j_re,ctx->start_k_re,
621:                                              ctx->range_i_re,ctx->range_j_re,ctx->range_k_re,
622:                                              &rank_reI[0],&rank_reI[1],NULL,&rank_ijk_re);

624:       _DMDADetermineGlobalS0(2,rank_ijk_re, ctx->Mp_re,ctx->Np_re,ctx->Pp_re, ctx->range_i_re,ctx->range_j_re,ctx->range_k_re, &s0_re);

626:       ii = i - ctx->start_i_re[ rank_reI[0] ];
627:       if (ii < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmdarepart-perm2d] index error ii");
628:       jj = j - ctx->start_j_re[ rank_reI[1] ];
629:       if (jj < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmdarepart-perm2d] index error jj");

631:       lenI_re[0] = ctx->range_i_re[ rank_reI[0] ];
632:       lenI_re[1] = ctx->range_j_re[ rank_reI[1] ];
633:       local_ijk_re = ii + jj * lenI_re[0];
634:       mapped_ijk = s0_re + local_ijk_re;
635:       MatSetValue(Pscalar,sr+location,mapped_ijk,1.0,INSERT_VALUES);
636:     }
637:   }
638:   MatAssemblyBegin(Pscalar,MAT_FINAL_ASSEMBLY);
639:   MatAssemblyEnd(Pscalar,MAT_FINAL_ASSEMBLY);
640:   MatCreateMAIJ(Pscalar,ndof,&P);
641:   MatDestroy(&Pscalar);
642:   ctx->permutation = P;
643:   return(0);
644: }

648: PetscErrorCode PCTelescopeSetUp_dmda_scatters(PC pc,PC_Telescope sred,PC_Telescope_DMDACtx *ctx)
649: {
651:   Vec            xred,yred,xtmp,x,xp;
652:   VecScatter     scatter;
653:   IS             isin;
654:   Mat            B;
655:   PetscInt       m,bs,st,ed;
656:   MPI_Comm       comm;

659:   PetscObjectGetComm((PetscObject)pc,&comm);
660:   PCGetOperators(pc,NULL,&B);
661:   MatCreateVecs(B,&x,NULL);
662:   MatGetBlockSize(B,&bs);
663:   VecDuplicate(x,&xp);
664:   m = 0;
665:   xred = NULL;
666:   yred = NULL;
667:   if (isActiveRank(sred->psubcomm)) {
668:     DMCreateGlobalVector(ctx->dmrepart,&xred);
669:     VecDuplicate(xred,&yred);
670:     VecGetOwnershipRange(xred,&st,&ed);
671:     ISCreateStride(comm,ed-st,st,1,&isin);
672:     VecGetLocalSize(xred,&m);
673:   } else {
674:     VecGetOwnershipRange(x,&st,&ed);
675:     ISCreateStride(comm,0,st,1,&isin);
676:   }
677:   ISSetBlockSize(isin,bs);
678:   VecCreate(comm,&xtmp);
679:   VecSetSizes(xtmp,m,PETSC_DECIDE);
680:   VecSetBlockSize(xtmp,bs);
681:   VecSetType(xtmp,((PetscObject)x)->type_name);
682:   VecScatterCreate(x,isin,xtmp,NULL,&scatter);
683:   sred->xred    = xred;
684:   sred->yred    = yred;
685:   sred->isin    = isin;
686:   sred->scatter = scatter;
687:   sred->xtmp    = xtmp;

689:   ctx->xp       = xp;
690:   VecDestroy(&x);
691:   return(0);
692: }

696: PetscErrorCode PCTelescopeSetUp_dmda(PC pc,PC_Telescope sred)
697: {
698:   PC_Telescope_DMDACtx *ctx;
699:   PetscInt                 dim;
700:   DM                       dm;
701:   MPI_Comm                 comm;
702:   PetscErrorCode           ierr;

705:   PetscInfo(pc,"PCTelescope: setup (DMDA)\n");
706:   PetscMalloc1(1,&ctx);
707:   PetscMemzero(ctx,sizeof(PC_Telescope_DMDACtx));
708:   sred->dm_ctx = (void*)ctx;

710:   PetscObjectGetComm((PetscObject)pc,&comm);
711:   PCGetDM(pc,&dm);
712:   DMDAGetInfo(dm,&dim,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);

714:   PCTelescopeSetUp_dmda_repart(pc,sred,ctx);
715:   PCTelescopeSetUp_dmda_repart_coors(pc,sred,ctx);
716:   switch (dim) {
717:   case 1: SETERRQ(comm,PETSC_ERR_SUP,"Telescope: DMDA (1D) repartitioning not provided");
718:     break;
719:   case 2: PCTelescopeSetUp_dmda_permutation_2d(pc,sred,ctx);
720:     break;
721:   case 3: PCTelescopeSetUp_dmda_permutation_3d(pc,sred,ctx);
722:     break;
723:   }
724:   PCTelescopeSetUp_dmda_scatters(pc,sred,ctx);
725:   return(0);
726: }

730: PetscErrorCode PCTelescopeMatCreate_dmda_dmactivefalse(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A)
731: {
733:   PC_Telescope_DMDACtx *ctx;
734:   MPI_Comm       comm,subcomm;
735:   Mat            Bperm,Bred,B,P;
736:   PetscInt       nr,nc;
737:   IS             isrow,iscol;
738:   Mat            Blocal,*_Blocal;

741:   PetscInfo(pc,"PCTelescope: updating the redundant preconditioned operator (DMDA)\n");
742:   PetscObjectGetComm((PetscObject)pc,&comm);
743:   subcomm = PetscSubcommChild(sred->psubcomm);
744:   ctx = (PC_Telescope_DMDACtx*)sred->dm_ctx;

746:   PCGetOperators(pc,NULL,&B);
747:   MatGetSize(B,&nr,&nc);

749:   P = ctx->permutation;
750:   MatPtAP(B,P,MAT_INITIAL_MATRIX,1.1,&Bperm);

752:   /* Get submatrices */
753:   isrow = sred->isin;
754:   ISCreateStride(comm,nc,0,1,&iscol);

756:   MatGetSubMatrices(Bperm,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&_Blocal);
757:   Blocal = *_Blocal;
758:   Bred = NULL;
759:   if (isActiveRank(sred->psubcomm)) {
760:     PetscInt mm;

762:     if (reuse != MAT_INITIAL_MATRIX) {Bred = *A;}
763:     MatGetSize(Blocal,&mm,NULL);
764:     /* MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,PETSC_DECIDE,reuse,&Bred); */
765:     MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,mm,reuse,&Bred);
766:   }
767:   *A = Bred;

769:   ISDestroy(&iscol);
770:   MatDestroy(&Bperm);
771:   MatDestroyMatrices(1,&_Blocal);
772:   return(0);
773: }

777: PetscErrorCode PCTelescopeMatCreate_dmda(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A)
778: {
780:   DM             dm;
781:   PetscErrorCode (*dmksp_func)(KSP,Mat,Mat,void*);
782:   void           *dmksp_ctx;

785:   PCGetDM(pc,&dm);
786:   DMKSPGetComputeOperators(dm,&dmksp_func,&dmksp_ctx);
787:   /* We assume that dmksp_func = NULL, is equivalent to dmActive = PETSC_FALSE */
788:   if (dmksp_func && !sred->ignore_kspcomputeoperators) {
789:     DM  dmrepart;
790:     Mat Ak;

792:     *A = NULL;
793:     if (isActiveRank(sred->psubcomm)) {
794:       KSPGetDM(sred->ksp,&dmrepart);
795:       if (reuse == MAT_INITIAL_MATRIX) {
796:         DMCreateMatrix(dmrepart,&Ak);
797:         *A = Ak;
798:       } else if (reuse == MAT_REUSE_MATRIX) {
799:         Ak = *A;
800:       }
801:       /*
802:        There is no need to explicitly assemble the operator now,
803:        the sub-KSP will call the method provided to KSPSetComputeOperators() during KSPSetUp()
804:       */
805:     }
806:   } else {
807:     PCTelescopeMatCreate_dmda_dmactivefalse(pc,sred,reuse,A);
808:   }
809:   return(0);
810: }

814: PetscErrorCode PCTelescopeMatNullSpaceCreate_dmda(PC pc,PC_Telescope sred,Mat sub_mat)
815: {
816:   PetscErrorCode   ierr;
817:   MatNullSpace     nullspace,sub_nullspace;
818:   Mat              A,B;
819:   PetscBool        has_const;
820:   PetscInt         i,k,n = 0;
821:   const Vec        *vecs;
822:   Vec              *sub_vecs = NULL;
823:   MPI_Comm         subcomm;
824:   PC_Telescope_DMDACtx *ctx;

827:   PCGetOperators(pc,&A,&B);
828:   MatGetNullSpace(B,&nullspace);
829:   if (!nullspace) return(0);

831:   PetscInfo(pc,"PCTelescope: generating nullspace (DMDA)\n");
832:   ctx = (PC_Telescope_DMDACtx*)sred->dm_ctx;
833:   subcomm = PetscSubcommChild(sred->psubcomm);
834:   MatNullSpaceGetVecs(nullspace,&has_const,&n,&vecs);

836:   if (isActiveRank(sred->psubcomm)) {
837:     /* create new vectors */
838:     if (n) {
839:       VecDuplicateVecs(sred->xred,n,&sub_vecs);
840:     }
841:   }

843:   /* copy entries */
844:   for (k=0; k<n; k++) {
845:     const PetscScalar *x_array;
846:     PetscScalar       *LA_sub_vec;
847:     PetscInt          st,ed;

849:     /* permute vector into ordering associated with re-partitioned dmda */
850:     MatMultTranspose(ctx->permutation,vecs[k],ctx->xp);

852:     /* pull in vector x->xtmp */
853:     VecScatterBegin(sred->scatter,ctx->xp,sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);
854:     VecScatterEnd(sred->scatter,ctx->xp,sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);

856:     /* copy vector entires into xred */
857:     VecGetArrayRead(sred->xtmp,&x_array);
858:     if (sub_vecs) {
859:       if (sub_vecs[k]) {
860:         VecGetOwnershipRange(sub_vecs[k],&st,&ed);
861:         VecGetArray(sub_vecs[k],&LA_sub_vec);
862:         for (i=0; i<ed-st; i++) {
863:           LA_sub_vec[i] = x_array[i];
864:         }
865:         VecRestoreArray(sub_vecs[k],&LA_sub_vec);
866:       }
867:     }
868:     VecRestoreArrayRead(sred->xtmp,&x_array);
869:   }

871:   if (isActiveRank(sred->psubcomm)) {
872:     /* create new nullspace for redundant object */
873:     MatNullSpaceCreate(subcomm,has_const,n,sub_vecs,&sub_nullspace);
874:     sub_nullspace->remove = nullspace->remove;
875:     sub_nullspace->rmctx = nullspace->rmctx;

877:     /* attach redundant nullspace to Bred */
878:     MatSetNullSpace(sub_mat,sub_nullspace);
879:     VecDestroyVecs(n,&sub_vecs);
880:   }
881:   return(0);
882: }

886: PetscErrorCode PCApply_Telescope_dmda(PC pc,Vec x,Vec y)
887: {
888:   PC_Telescope      sred = (PC_Telescope)pc->data;
889:   PetscErrorCode    ierr;
890:   Mat               perm;
891:   Vec               xtmp,xp,xred,yred;
892:   PetscInt          i,st,ed;
893:   VecScatter        scatter;
894:   PetscScalar       *array;
895:   const PetscScalar *x_array;
896:   PC_Telescope_DMDACtx *ctx;

898:   ctx = (PC_Telescope_DMDACtx*)sred->dm_ctx;
899:   xtmp    = sred->xtmp;
900:   scatter = sred->scatter;
901:   xred    = sred->xred;
902:   yred    = sred->yred;
903:   perm  = ctx->permutation;
904:   xp    = ctx->xp;


908:   /* permute vector into ordering associated with re-partitioned dmda */
909:   MatMultTranspose(perm,x,xp);

911:   /* pull in vector x->xtmp */
912:   VecScatterBegin(scatter,xp,xtmp,INSERT_VALUES,SCATTER_FORWARD);
913:   VecScatterEnd(scatter,xp,xtmp,INSERT_VALUES,SCATTER_FORWARD);

915:   /* copy vector entires into xred */
916:   VecGetArrayRead(xtmp,&x_array);
917:   if (xred) {
918:     PetscScalar *LA_xred;
919:     VecGetOwnershipRange(xred,&st,&ed);

921:     VecGetArray(xred,&LA_xred);
922:     for (i=0; i<ed-st; i++) {
923:       LA_xred[i] = x_array[i];
924:     }
925:     VecRestoreArray(xred,&LA_xred);
926:   }
927:   VecRestoreArrayRead(xtmp,&x_array);

929:   /* solve */
930:   if (isActiveRank(sred->psubcomm)) {
931:     KSPSolve(sred->ksp,xred,yred);
932:   }

934:   /* return vector */
935:   VecGetArray(xtmp,&array);
936:   if (yred) {
937:     const PetscScalar *LA_yred;
938:     VecGetOwnershipRange(yred,&st,&ed);
939:     VecGetArrayRead(yred,&LA_yred);
940:     for (i=0; i<ed-st; i++) {
941:       array[i] = LA_yred[i];
942:     }
943:     VecRestoreArrayRead(yred,&LA_yred);
944:   }
945:   VecRestoreArray(xtmp,&array);
946:   VecScatterBegin(scatter,xtmp,xp,INSERT_VALUES,SCATTER_REVERSE);
947:   VecScatterEnd(scatter,xtmp,xp,INSERT_VALUES,SCATTER_REVERSE);
948:   MatMult(perm,xp,y);
949:   return(0);
950: }

954: PetscErrorCode PCApplyRichardson_Telescope_dmda(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt *outits,PCRichardsonConvergedReason *reason)
955: {
956:   PC_Telescope      sred = (PC_Telescope)pc->data;
957:   PetscErrorCode    ierr;
958:   Mat               perm;
959:   Vec               xtmp,xp,yred;
960:   PetscInt          i,st,ed;
961:   VecScatter        scatter;
962:   const PetscScalar *x_array;
963:   PetscBool         default_init_guess_value = PETSC_FALSE;
964:   PC_Telescope_DMDACtx *ctx;

966:   ctx = (PC_Telescope_DMDACtx*)sred->dm_ctx;
967:   xtmp    = sred->xtmp;
968:   scatter = sred->scatter;
969:   yred    = sred->yred;
970:   perm  = ctx->permutation;
971:   xp    = ctx->xp;

973:   if (its > 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PCApplyRichardson_Telescope_dmda only supports max_it = 1");
974:   *reason = (PCRichardsonConvergedReason)0;

976:   if (!zeroguess) {
977:     PetscInfo(pc,"PCTelescopeDMDA: Scattering y for non-zero-initial guess\n");
978:     /* permute vector into ordering associated with re-partitioned dmda */
979:     MatMultTranspose(perm,y,xp);

981:     /* pull in vector x->xtmp */
982:     VecScatterBegin(scatter,xp,xtmp,INSERT_VALUES,SCATTER_FORWARD);
983:     VecScatterEnd(scatter,xp,xtmp,INSERT_VALUES,SCATTER_FORWARD);

985:     /* copy vector entires into xred */
986:     VecGetArrayRead(xtmp,&x_array);
987:     if (yred) {
988:       PetscScalar *LA_yred;
989:       VecGetOwnershipRange(yred,&st,&ed);
990:       VecGetArray(yred,&LA_yred);
991:       for (i=0; i<ed-st; i++) {
992:         LA_yred[i] = x_array[i];
993:       }
994:       VecRestoreArray(yred,&LA_yred);
995:     }
996:     VecRestoreArrayRead(xtmp,&x_array);
997:   }

999:   if (isActiveRank(sred->psubcomm)) {
1000:     KSPGetInitialGuessNonzero(sred->ksp,&default_init_guess_value);
1001:     if (!zeroguess) KSPSetInitialGuessNonzero(sred->ksp,PETSC_TRUE);
1002:   }

1004:   PCApply_Telescope_dmda(pc,x,y);

1006:   if (isActiveRank(sred->psubcomm)) {
1007:     KSPSetInitialGuessNonzero(sred->ksp,default_init_guess_value);
1008:   }

1010:   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
1011:   *outits = 1;
1012:   return(0);
1013: }

1017: PetscErrorCode PCReset_Telescope_dmda(PC pc)
1018: {
1019:   PetscErrorCode       ierr;
1020:   PC_Telescope         sred = (PC_Telescope)pc->data;
1021:   PC_Telescope_DMDACtx *ctx;

1024:   ctx = (PC_Telescope_DMDACtx*)sred->dm_ctx;
1025:   VecDestroy(&ctx->xp);
1026:   MatDestroy(&ctx->permutation);
1027:   DMDestroy(&ctx->dmrepart);
1028:   PetscFree(ctx->range_i_re);
1029:   PetscFree(ctx->range_j_re);
1030:   PetscFree(ctx->range_k_re);
1031:   PetscFree(ctx->start_i_re);
1032:   PetscFree(ctx->start_j_re);
1033:   PetscFree(ctx->start_k_re);
1034:   return(0);
1035: }

1039: PetscErrorCode DMView_DMDAShort_3d(DM dm,PetscViewer v)
1040: {
1041:   PetscInt       M,N,P,m,n,p,ndof,nsw;
1042:   MPI_Comm       comm;
1043:   PetscMPIInt    size;
1044:   const char*    prefix;

1048:   PetscObjectGetComm((PetscObject)dm,&comm);
1049:   MPI_Comm_size(comm,&size);
1050:   DMGetOptionsPrefix(dm,&prefix);
1051:   DMDAGetInfo(dm,NULL,&M,&N,&P,&m,&n,&p,&ndof,&nsw,NULL,NULL,NULL,NULL);
1052:   if (prefix) {PetscViewerASCIIPrintf(v,"DMDA Object:    (%s)    %d MPI processes\n",prefix,size);}
1053:   else {PetscViewerASCIIPrintf(v,"DMDA Object:    %d MPI processes\n",size);}
1054:   PetscViewerASCIIPrintf(v,"  M %D N %D P %D m %D n %D p %D dof %D overlap %D\n",M,N,P,m,n,p,ndof,nsw);
1055:   return(0);
1056: }

1060: PetscErrorCode DMView_DMDAShort_2d(DM dm,PetscViewer v)
1061: {
1062:   PetscInt       M,N,m,n,ndof,nsw;
1063:   MPI_Comm       comm;
1064:   PetscMPIInt    size;
1065:   const char*    prefix;

1069:   PetscObjectGetComm((PetscObject)dm,&comm);
1070:   MPI_Comm_size(comm,&size);
1071:   DMGetOptionsPrefix(dm,&prefix);
1072:   DMDAGetInfo(dm,NULL,&M,&N,NULL,&m,&n,NULL,&ndof,&nsw,NULL,NULL,NULL,NULL);
1073:   if (prefix) {PetscViewerASCIIPrintf(v,"DMDA Object:    (%s)    %d MPI processes\n",prefix,size);}
1074:   else {PetscViewerASCIIPrintf(v,"DMDA Object:    %d MPI processes\n",size);}
1075:   PetscViewerASCIIPrintf(v,"  M %D N %D m %D n %D dof %D overlap %D\n",M,N,m,n,ndof,nsw);
1076:   return(0);
1077: }

1081: PetscErrorCode DMView_DMDAShort(DM dm,PetscViewer v)
1082: {
1084:   PetscInt       dim;

1087:   DMDAGetInfo(dm,&dim,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
1088:   switch (dim) {
1089:   case 2: DMView_DMDAShort_2d(dm,v);
1090:     break;
1091:   case 3: DMView_DMDAShort_3d(dm,v);
1092:     break;
1093:   }
1094:   return(0);
1095: }