Actual source code: redundant.c

  1: /*
  2:   This file defines a "solve the problem redundantly on each processor" preconditioner.

  4: */
 5:  #include src/ksp/pc/pcimpl.h
 6:  #include petscksp.h

  8: typedef struct {
  9:   PC         pc;                    /* actual preconditioner used on each processor */
 10:   Vec        x,b;                   /* sequential vectors to hold parallel vectors */
 11:   Mat        *pmats;                /* matrix and optional preconditioner matrix */
 12:   VecScatter scatterin,scatterout;  /* scatter used to move all values to each processor */
 13:   PetscTruth useparallelmat;
 14: } PC_Redundant;

 18: static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
 19: {
 20:   PC_Redundant   *red = (PC_Redundant*)pc->data;
 22:   PetscMPIInt    rank;
 23:   PetscTruth     iascii,isstring;
 24:   PetscViewer    sviewer;

 27:   MPI_Comm_rank(pc->comm,&rank);
 28:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
 29:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);
 30:   if (iascii) {
 31:     PetscViewerASCIIPrintf(viewer,"  Redundant solver preconditioner: Actual PC follows\n");
 32:     PetscViewerGetSingleton(viewer,&sviewer);
 33:     if (!rank) {
 34:       PetscViewerASCIIPushTab(viewer);
 35:       PCView(red->pc,sviewer);
 36:       PetscViewerASCIIPopTab(viewer);
 37:     }
 38:     PetscViewerRestoreSingleton(viewer,&sviewer);
 39:   } else if (isstring) {
 40:     PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");
 41:     PetscViewerGetSingleton(viewer,&sviewer);
 42:     if (!rank) {
 43:       PCView(red->pc,sviewer);
 44:     }
 45:     PetscViewerRestoreSingleton(viewer,&sviewer);
 46:   } else {
 47:     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for PC redundant",((PetscObject)viewer)->type_name);
 48:   }
 49:   return(0);
 50: }

 54: static PetscErrorCode PCSetUp_Redundant(PC pc)
 55: {
 56:   PC_Redundant   *red  = (PC_Redundant*)pc->data;
 58:   PetscInt       mstart,mlocal,m;
 59:   PetscMPIInt    size;
 60:   IS             isl;
 61:   MatReuse       reuse = MAT_INITIAL_MATRIX;
 62:   MatStructure   str   = DIFFERENT_NONZERO_PATTERN;
 63:   MPI_Comm       comm;
 64:   Vec            vec;

 67:   MatGetVecs(pc->pmat,&vec,0);
 68:   PCSetFromOptions(red->pc);
 69:   VecGetSize(vec,&m);
 70:   if (!pc->setupcalled) {
 71:     VecGetLocalSize(vec,&mlocal);
 72:     VecCreateSeq(PETSC_COMM_SELF,m,&red->x);
 73:     VecDuplicate(red->x,&red->b);
 74:     if (!red->scatterin) {

 76:       /*
 77:          Create the vectors and vector scatter to get the entire vector onto each processor
 78:       */
 79:       VecGetOwnershipRange(vec,&mstart,PETSC_NULL);
 80:       VecScatterCreate(vec,0,red->x,0,&red->scatterin);
 81:       ISCreateStride(pc->comm,mlocal,mstart,1,&isl);
 82:       VecScatterCreate(red->x,isl,vec,isl,&red->scatterout);
 83:       ISDestroy(isl);
 84:     }
 85:   }
 86:   VecDestroy(vec);

 88:   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix*/

 90:   PetscObjectGetComm((PetscObject)pc->pmat,&comm);
 91:   MPI_Comm_size(comm,&size);
 92:   if (size == 1) {
 93:     red->useparallelmat = PETSC_FALSE;
 94:   }

 96:   if (red->useparallelmat) {
 97:     if (pc->setupcalled == 1 && pc->flag == DIFFERENT_NONZERO_PATTERN) {
 98:       /* destroy old matrices */
 99:       if (red->pmats) {
100:         MatDestroyMatrices(1,&red->pmats);
101:       }
102:     } else if (pc->setupcalled == 1) {
103:       reuse = MAT_REUSE_MATRIX;
104:       str   = SAME_NONZERO_PATTERN;
105:     }
106: 
107:     /* 
108:        grab the parallel matrix and put it on each processor
109:     */
110:     ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);
111:     MatGetSubMatrices(pc->pmat,1,&isl,&isl,reuse,&red->pmats);
112:     ISDestroy(isl);

114:     /* tell sequential PC its operators */
115:     PCSetOperators(red->pc,red->pmats[0],red->pmats[0],str);
116:   } else {
117:     PCSetOperators(red->pc,pc->mat,pc->pmat,pc->flag);
118:   }
119:   PCSetFromOptions(red->pc);
120:   PCSetUp(red->pc);
121:   return(0);
122: }


127: static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
128: {
129:   PC_Redundant   *red = (PC_Redundant*)pc->data;

133:   /* move all values to each processor */
134:   VecScatterBegin(x,red->b,INSERT_VALUES,SCATTER_FORWARD,red->scatterin);
135:   VecScatterEnd(x,red->b,INSERT_VALUES,SCATTER_FORWARD,red->scatterin);

137:   /* apply preconditioner on each processor */
138:   PCApply(red->pc,red->b,red->x);

140:   /* move local part of values into y vector */
141:   VecScatterBegin(red->x,y,INSERT_VALUES,SCATTER_FORWARD,red->scatterout);
142:   VecScatterEnd(red->x,y,INSERT_VALUES,SCATTER_FORWARD,red->scatterout);
143:   return(0);
144: }


149: static PetscErrorCode PCDestroy_Redundant(PC pc)
150: {
151:   PC_Redundant   *red = (PC_Redundant*)pc->data;

155:   if (red->scatterin)  {VecScatterDestroy(red->scatterin);}
156:   if (red->scatterout) {VecScatterDestroy(red->scatterout);}
157:   if (red->x)          {VecDestroy(red->x);}
158:   if (red->b)          {VecDestroy(red->b);}
159:   if (red->pmats) {
160:     MatDestroyMatrices(1,&red->pmats);
161:   }
162:   PCDestroy(red->pc);
163:   PetscFree(red);
164:   return(0);
165: }

169: static PetscErrorCode PCSetFromOptions_Redundant(PC pc)
170: {
172:   return(0);
173: }

178: PetscErrorCode PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
179: {
180:   PC_Redundant   *red = (PC_Redundant*)pc->data;

184:   red->scatterin  = in;
185:   red->scatterout = out;
186:   PetscObjectReference((PetscObject)in);
187:   PetscObjectReference((PetscObject)out);
188:   return(0);
189: }

194: /*@
195:    PCRedundantSetScatter - Sets the scatter used to copy values into the
196:      redundant local solve and the scatter to move them back into the global
197:      vector.

199:    Collective on PC

201:    Input Parameters:
202: +  pc - the preconditioner context
203: .  in - the scatter to move the values in
204: -  out - the scatter to move them out

206:    Level: advanced

208: .keywords: PC, redundant solve
209: @*/
210: PetscErrorCode PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
211: {
212:   PetscErrorCode ierr,(*f)(PC,VecScatter,VecScatter);

218:   PetscObjectQueryFunction((PetscObject)pc,"PCRedundantSetScatter_C",(void (**)(void))&f);
219:   if (f) {
220:     (*f)(pc,in,out);
221:   }
222:   return(0);
223: }

228: PetscErrorCode PCRedundantGetPC_Redundant(PC pc,PC *innerpc)
229: {
230:   PC_Redundant *red = (PC_Redundant*)pc->data;

233:   *innerpc = red->pc;
234:   return(0);
235: }

240: /*@
241:    PCRedundantGetPC - Gets the sequential PC created by the redundant PC.

243:    Not Collective

245:    Input Parameter:
246: .  pc - the preconditioner context

248:    Output Parameter:
249: .  innerpc - the sequential PC 

251:    Level: advanced

253: .keywords: PC, redundant solve
254: @*/
255: PetscErrorCode PCRedundantGetPC(PC pc,PC *innerpc)
256: {
257:   PetscErrorCode ierr,(*f)(PC,PC*);

262:   PetscObjectQueryFunction((PetscObject)pc,"PCRedundantGetPC_C",(void (**)(void))&f);
263:   if (f) {
264:     (*f)(pc,innerpc);
265:   }
266:   return(0);
267: }

272: PetscErrorCode PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
273: {
274:   PC_Redundant *red = (PC_Redundant*)pc->data;

277:   if (mat)  *mat  = red->pmats[0];
278:   if (pmat) *pmat = red->pmats[0];
279:   return(0);
280: }

285: /*@
286:    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix

288:    Not Collective

290:    Input Parameter:
291: .  pc - the preconditioner context

293:    Output Parameters:
294: +  mat - the matrix
295: -  pmat - the (possibly different) preconditioner matrix

297:    Level: advanced

299: .keywords: PC, redundant solve
300: @*/
301: PetscErrorCode PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
302: {
303:   PetscErrorCode ierr,(*f)(PC,Mat*,Mat*);

309:   PetscObjectQueryFunction((PetscObject)pc,"PCRedundantGetOperators_C",(void (**)(void))&f);
310:   if (f) {
311:     (*f)(pc,mat,pmat);
312:   }
313:   return(0);
314: }

316: /* -------------------------------------------------------------------------------------*/
317: /*MC
318:      PCREDUNDANT - Runs a preconditioner for the entire problem on each processor


321:      Options for the redundant preconditioners can be set with -redundant_pc_xxx

323:    Level: intermediate


326: .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
327:            PCRedundantGetPC(), PCRedundantGetOperators()

329: M*/

334: PetscErrorCode PCCreate_Redundant(PC pc)
335: {
337:   PC_Redundant   *red;
338:   char           *prefix;

341:   PetscNew(PC_Redundant,&red);
342:   PetscLogObjectMemory(pc,sizeof(PC_Redundant));
343:   PetscMemzero(red,sizeof(PC_Redundant));
344:   red->useparallelmat   = PETSC_TRUE;

346:   /* create the sequential PC that each processor has copy of */
347:   PCCreate(PETSC_COMM_SELF,&red->pc);
348:   PCSetType(red->pc,PCLU);
349:   PCGetOptionsPrefix(pc,&prefix);
350:   PCSetOptionsPrefix(red->pc,prefix);
351:   PCAppendOptionsPrefix(red->pc,"redundant_");

353:   pc->ops->apply             = PCApply_Redundant;
354:   pc->ops->applytranspose    = 0;
355:   pc->ops->setup             = PCSetUp_Redundant;
356:   pc->ops->destroy           = PCDestroy_Redundant;
357:   pc->ops->setfromoptions    = PCSetFromOptions_Redundant;
358:   pc->ops->setuponblocks     = 0;
359:   pc->ops->view              = PCView_Redundant;
360:   pc->ops->applyrichardson   = 0;

362:   pc->data              = (void*)red;

364:   PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetScatter_C","PCRedundantSetScatter_Redundant",
365:                     PCRedundantSetScatter_Redundant);
366:   PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetPC_C","PCRedundantGetPC_Redundant",
367:                     PCRedundantGetPC_Redundant);
368:   PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetOperators_C","PCRedundantGetOperators_Redundant",
369:                     PCRedundantGetOperators_Redundant);

371:   return(0);
372: }