Actual source code: redundant.c

petsc-3.5.4 2015-05-23
Report Typos and Errors
  2: /*
  3:   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
  4: */
  5: #include <petsc-private/pcimpl.h>
  6: #include <petscksp.h>           /*I "petscksp.h" I*/

  8: typedef struct {
  9:   KSP          ksp;
 10:   PC           pc;                   /* actual preconditioner used on each processor */
 11:   Vec          xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
 12:   Vec          xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
 13:   Mat          pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
 14:   VecScatter   scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
 15:   PetscBool    useparallelmat;
 16:   PetscSubcomm psubcomm;
 17:   PetscInt     nsubcomm;           /* num of data structure PetscSubcomm */
 18: } PC_Redundant;

 22: static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
 23: {
 24:   PC_Redundant   *red = (PC_Redundant*)pc->data;
 26:   PetscBool      iascii,isstring;
 27:   PetscViewer    subviewer;

 30:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
 31:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
 32:   if (iascii) {
 33:     if (!red->psubcomm) {
 34:       PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: Not yet setup\n");
 35:     } else {
 36:       PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: First (color=0) of %D PCs follows\n",red->nsubcomm);
 37:       PetscViewerGetSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);
 38:       if (!red->psubcomm->color) { /* only view first redundant pc */
 39:         PetscViewerASCIIPushTab(viewer);
 40:         KSPView(red->ksp,subviewer);
 41:         PetscViewerASCIIPopTab(viewer);
 42:       }
 43:       PetscViewerRestoreSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);
 44:     }
 45:   } else if (isstring) {
 46:     PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");
 47:   }
 48:   return(0);
 49: }

 53: static PetscErrorCode PCSetUp_Redundant(PC pc)
 54: {
 55:   PC_Redundant   *red = (PC_Redundant*)pc->data;
 57:   PetscInt       mstart,mend,mlocal,M;
 58:   PetscMPIInt    size;
 59:   MPI_Comm       comm,subcomm;
 60:   Vec            x;
 61:   const char     *prefix;

 64:   PetscObjectGetComm((PetscObject)pc,&comm);
 65: 
 66:   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
 67:   MPI_Comm_size(comm,&size);
 68:   if (size == 1) red->useparallelmat = PETSC_FALSE;

 70:   if (!pc->setupcalled) {
 71:     PetscInt mloc_sub;
 72:     if (!red->psubcomm) {
 73:       PetscSubcommCreate(comm,&red->psubcomm);
 74:       PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);
 75:       PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);
 76:       /* enable runtime switch of psubcomm type, e.g., '-psubcomm_type interlaced */
 77:       PetscSubcommSetFromOptions(red->psubcomm);
 78:       PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));

 80:       /* create a new PC that processors in each subcomm have copy of */
 81:       subcomm = red->psubcomm->comm;

 83:       KSPCreate(subcomm,&red->ksp);
 84:       PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);
 85:       PetscLogObjectParent((PetscObject)pc,(PetscObject)red->ksp);
 86:       KSPSetType(red->ksp,KSPPREONLY);
 87:       KSPGetPC(red->ksp,&red->pc);
 88:       PCSetType(red->pc,PCLU);

 90:       PCGetOptionsPrefix(pc,&prefix);
 91:       KSPSetOptionsPrefix(red->ksp,prefix);
 92:       KSPAppendOptionsPrefix(red->ksp,"redundant_");
 93:     } else {
 94:       subcomm = red->psubcomm->comm;
 95:     }

 97:     if (red->useparallelmat) {
 98:       /* grab the parallel matrix and put it into processors of a subcomminicator */
 99:       MatGetRedundantMatrix(pc->pmat,red->psubcomm->n,subcomm,MAT_INITIAL_MATRIX,&red->pmats);
100:       KSPSetOperators(red->ksp,red->pmats,red->pmats);
101: 
102:       /* get working vectors xsub and ysub */
103:       MatGetVecs(red->pmats,&red->xsub,&red->ysub);

105:       /* create working vectors xdup and ydup.
106:        xdup concatenates all xsub's contigously to form a mpi vector over dupcomm  (see PetscSubcommCreate_interlaced())
107:        ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
108:        Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
109:       MatGetLocalSize(red->pmats,&mloc_sub,NULL);
110:       VecCreateMPI(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,&red->xdup);
111:       VecCreateMPIWithArray(red->psubcomm->dupparent,1,mloc_sub,PETSC_DECIDE,NULL,&red->ydup);

113:       /* create vecscatters */
114:       if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
115:         IS       is1,is2;
116:         PetscInt *idx1,*idx2,i,j,k;

118:         MatGetVecs(pc->pmat,&x,0);
119:         VecGetSize(x,&M);
120:         VecGetOwnershipRange(x,&mstart,&mend);
121:         mlocal = mend - mstart;
122:         PetscMalloc2(red->psubcomm->n*mlocal,&idx1,red->psubcomm->n*mlocal,&idx2);
123:         j    = 0;
124:         for (k=0; k<red->psubcomm->n; k++) {
125:           for (i=mstart; i<mend; i++) {
126:             idx1[j]   = i;
127:             idx2[j++] = i + M*k;
128:           }
129:         }
130:         ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);
131:         ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);
132:         VecScatterCreate(x,is1,red->xdup,is2,&red->scatterin);
133:         ISDestroy(&is1);
134:         ISDestroy(&is2);

136:         /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
137:         ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*M,1,&is1);
138:         ISCreateStride(comm,mlocal,mstart,1,&is2);
139:         VecScatterCreate(red->xdup,is1,x,is2,&red->scatterout);
140:         ISDestroy(&is1);
141:         ISDestroy(&is2);
142:         PetscFree2(idx1,idx2);
143:         VecDestroy(&x);
144:       }
145:     } else { /* !red->useparallelmat */
146:       KSPSetOperators(red->ksp,pc->mat,pc->pmat);
147:     }
148:   } else { /* pc->setupcalled */
149:     if (red->useparallelmat) {
150:       MatReuse       reuse;
151:       /* grab the parallel matrix and put it into processors of a subcomminicator */
152:       /*--------------------------------------------------------------------------*/
153:       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
154:         /* destroy old matrices */
155:         MatDestroy(&red->pmats);
156:         reuse = MAT_INITIAL_MATRIX;
157:       } else {
158:         reuse = MAT_REUSE_MATRIX;
159:       }
160:       MatGetRedundantMatrix(pc->pmat,red->psubcomm->n,red->psubcomm->comm,reuse,&red->pmats);
161:       KSPSetOperators(red->ksp,red->pmats,red->pmats);
162:     } else { /* !red->useparallelmat */
163:       KSPSetOperators(red->ksp,pc->mat,pc->pmat);
164:     }
165:   }

167:   if (pc->setfromoptionscalled) {
168:     KSPSetFromOptions(red->ksp);
169:   }
170:   KSPSetUp(red->ksp);
171:   return(0);
172: }

176: static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
177: {
178:   PC_Redundant   *red = (PC_Redundant*)pc->data;
180:   PetscScalar    *array;

183:   if (!red->useparallelmat) {
184:     KSPSolve(red->ksp,x,y);
185:     return(0);
186:   }

188:   /* scatter x to xdup */
189:   VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);
190:   VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);

192:   /* place xdup's local array into xsub */
193:   VecGetArray(red->xdup,&array);
194:   VecPlaceArray(red->xsub,(const PetscScalar*)array);

196:   /* apply preconditioner on each processor */
197:   KSPSolve(red->ksp,red->xsub,red->ysub);
198:   VecResetArray(red->xsub);
199:   VecRestoreArray(red->xdup,&array);

201:   /* place ysub's local array into ydup */
202:   VecGetArray(red->ysub,&array);
203:   VecPlaceArray(red->ydup,(const PetscScalar*)array);

205:   /* scatter ydup to y */
206:   VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);
207:   VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);
208:   VecResetArray(red->ydup);
209:   VecRestoreArray(red->ysub,&array);
210:   return(0);
211: }

215: static PetscErrorCode PCApplyTranspose_Redundant(PC pc,Vec x,Vec y)
216: {
217:   PC_Redundant   *red = (PC_Redundant*)pc->data;
219:   PetscScalar    *array;

222:   if (!red->useparallelmat) {
223:     KSPSolveTranspose(red->ksp,x,y);
224:     return(0);
225:   }

227:   /* scatter x to xdup */
228:   VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);
229:   VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);

231:   /* place xdup's local array into xsub */
232:   VecGetArray(red->xdup,&array);
233:   VecPlaceArray(red->xsub,(const PetscScalar*)array);

235:   /* apply preconditioner on each processor */
236:   KSPSolveTranspose(red->ksp,red->xsub,red->ysub);
237:   VecResetArray(red->xsub);
238:   VecRestoreArray(red->xdup,&array);

240:   /* place ysub's local array into ydup */
241:   VecGetArray(red->ysub,&array);
242:   VecPlaceArray(red->ydup,(const PetscScalar*)array);

244:   /* scatter ydup to y */
245:   VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);
246:   VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);
247:   VecResetArray(red->ydup);
248:   VecRestoreArray(red->ysub,&array);
249:   return(0);
250: }

254: static PetscErrorCode PCReset_Redundant(PC pc)
255: {
256:   PC_Redundant   *red = (PC_Redundant*)pc->data;

260:   if (red->useparallelmat) {
261:     VecScatterDestroy(&red->scatterin);
262:     VecScatterDestroy(&red->scatterout);
263:     VecDestroy(&red->ysub);
264:     VecDestroy(&red->xsub);
265:     VecDestroy(&red->xdup);
266:     VecDestroy(&red->ydup);
267:   }
268:   MatDestroy(&red->pmats);
269:   KSPReset(red->ksp);
270:   return(0);
271: }

275: static PetscErrorCode PCDestroy_Redundant(PC pc)
276: {
277:   PC_Redundant   *red = (PC_Redundant*)pc->data;

281:   PCReset_Redundant(pc);
282:   KSPDestroy(&red->ksp);
283:   PetscSubcommDestroy(&red->psubcomm);
284:   PetscFree(pc->data);
285:   return(0);
286: }

290: static PetscErrorCode PCSetFromOptions_Redundant(PC pc)
291: {
293:   PC_Redundant   *red = (PC_Redundant*)pc->data;

296:   PetscOptionsHead("Redundant options");
297:   PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);
298:   PetscOptionsTail();
299:   return(0);
300: }

304: static PetscErrorCode  PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
305: {
306:   PC_Redundant *red = (PC_Redundant*)pc->data;

309:   red->nsubcomm = nreds;
310:   return(0);
311: }

315: /*@
316:    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.

318:    Logically Collective on PC

320:    Input Parameters:
321: +  pc - the preconditioner context
322: -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
323:                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.

325:    Level: advanced

327: .keywords: PC, redundant solve
328: @*/
329: PetscErrorCode  PCRedundantSetNumber(PC pc,PetscInt nredundant)
330: {

335:   if (nredundant <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
336:   PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));
337:   return(0);
338: }

342: static PetscErrorCode  PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
343: {
344:   PC_Redundant   *red = (PC_Redundant*)pc->data;

348:   PetscObjectReference((PetscObject)in);
349:   VecScatterDestroy(&red->scatterin);

351:   red->scatterin  = in;

353:   PetscObjectReference((PetscObject)out);
354:   VecScatterDestroy(&red->scatterout);
355:   red->scatterout = out;
356:   return(0);
357: }

361: /*@
362:    PCRedundantSetScatter - Sets the scatter used to copy values into the
363:      redundant local solve and the scatter to move them back into the global
364:      vector.

366:    Logically Collective on PC

368:    Input Parameters:
369: +  pc - the preconditioner context
370: .  in - the scatter to move the values in
371: -  out - the scatter to move them out

373:    Level: advanced

375: .keywords: PC, redundant solve
376: @*/
377: PetscErrorCode  PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
378: {

385:   PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));
386:   return(0);
387: }

391: static PetscErrorCode  PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp)
392: {
394:   PC_Redundant   *red = (PC_Redundant*)pc->data;
395:   MPI_Comm       comm,subcomm;
396:   const char     *prefix;

399:   if (!red->psubcomm) {
400:     PetscObjectGetComm((PetscObject)pc,&comm);
401:     PetscSubcommCreate(comm,&red->psubcomm);
402:     PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);
403:     PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_INTERLACED);
404:     PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));

406:     /* create a new PC that processors in each subcomm have copy of */
407:     subcomm = red->psubcomm->comm;

409:     KSPCreate(subcomm,&red->ksp);
410:     PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);
411:     PetscLogObjectParent((PetscObject)pc,(PetscObject)red->ksp);
412:     KSPSetType(red->ksp,KSPPREONLY);
413:     KSPGetPC(red->ksp,&red->pc);
414:     PCSetType(red->pc,PCLU);

416:     PCGetOptionsPrefix(pc,&prefix);
417:     KSPSetOptionsPrefix(red->ksp,prefix);
418:     KSPAppendOptionsPrefix(red->ksp,"redundant_");
419:   }
420:   *innerksp = red->ksp;
421:   return(0);
422: }

426: /*@
427:    PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC.

429:    Not Collective

431:    Input Parameter:
432: .  pc - the preconditioner context

434:    Output Parameter:
435: .  innerksp - the KSP on the smaller set of processes

437:    Level: advanced

439: .keywords: PC, redundant solve
440: @*/
441: PetscErrorCode  PCRedundantGetKSP(PC pc,KSP *innerksp)
442: {

448:   PetscTryMethod(pc,"PCRedundantGetKSP_C",(PC,KSP*),(pc,innerksp));
449:   return(0);
450: }

454: static PetscErrorCode  PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
455: {
456:   PC_Redundant *red = (PC_Redundant*)pc->data;

459:   if (mat)  *mat  = red->pmats;
460:   if (pmat) *pmat = red->pmats;
461:   return(0);
462: }

466: /*@
467:    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix

469:    Not Collective

471:    Input Parameter:
472: .  pc - the preconditioner context

474:    Output Parameters:
475: +  mat - the matrix
476: -  pmat - the (possibly different) preconditioner matrix

478:    Level: advanced

480: .keywords: PC, redundant solve
481: @*/
482: PetscErrorCode  PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
483: {

490:   PetscTryMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));
491:   return(0);
492: }

494: /* -------------------------------------------------------------------------------------*/
495: /*MC
496:      PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors

498:      Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx

500:   Options Database:
501: .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
502:                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.

504:    Level: intermediate

506:    Notes: The default KSP is preonly and the default PC is LU.

508:    Developer Notes: Note that PCSetInitialGuessNonzero()  is not used by this class but likely should be.

510: .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
511:            PCRedundantGetKSP(), PCRedundantGetOperators(), PCRedundantSetNumber()
512: M*/

516: PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc)
517: {
519:   PC_Redundant   *red;
520:   PetscMPIInt    size;

523:   PetscNewLog(pc,&red);
524:   MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);

526:   red->nsubcomm       = size;
527:   red->useparallelmat = PETSC_TRUE;
528:   pc->data            = (void*)red;

530:   pc->ops->apply          = PCApply_Redundant;
531:   pc->ops->applytranspose = PCApplyTranspose_Redundant;
532:   pc->ops->setup          = PCSetUp_Redundant;
533:   pc->ops->destroy        = PCDestroy_Redundant;
534:   pc->ops->reset          = PCReset_Redundant;
535:   pc->ops->setfromoptions = PCSetFromOptions_Redundant;
536:   pc->ops->view           = PCView_Redundant;

538:   PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetScatter_C",PCRedundantSetScatter_Redundant);
539:   PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetNumber_C",PCRedundantSetNumber_Redundant);
540:   PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetKSP_C",PCRedundantGetKSP_Redundant);
541:   PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetOperators_C",PCRedundantGetOperators_Redundant);
542:   return(0);
543: }