Actual source code: hpc.c

petsc-3.4.5 2014-06-29
  2: #include <petsc-private/pcimpl.h>     /*I "petscpc.h" I*/
  3: #include <petscksp.h>

  5: typedef struct {
  6:   MatStructure flag;               /* pc->flag */
  7:   PetscInt     setupcalled;        /* pc->setupcalled */
  8:   PetscInt     n;
  9:   MPI_Comm     comm;                 /* local world used by this preconditioner */
 10:   KSP          ksp;                  /* actual solver used across local world */
 11:   Mat          mat;                  /* matrix in local world */
 12:   Mat          gmat;                 /* matrix known only to process 0 in the local world */
 13:   Vec          x,y,xdummy,ydummy;
 14:   VecScatter   scatter;
 15:   PetscBool    nonzero_guess;
 16: } PC_HMPI;


 21: /*
 22:     Would like to have this simply call PCView() on the inner PC. The problem is
 23:   that the outer comm does not live on the inside so cannot do this. Instead
 24:   handle the special case when the viewer is stdout, construct a new one just
 25:   for this call.
 26: */

 28: static PetscErrorCode PCView_HMPI_MP(MPI_Comm comm,void *ctx)
 29: {
 30:   PC_HMPI        *red = (PC_HMPI*)ctx;
 32:   PetscViewer    viewer;

 35:   PetscViewerASCIIGetStdout(comm,&viewer);
 36:   PetscViewerASCIIPushTab(viewer);         /* this is bogus in general */
 37:   KSPView(red->ksp,viewer);
 38:   PetscViewerASCIIPopTab(viewer);
 39:   return(0);
 40: }

 44: static PetscErrorCode PCView_HMPI(PC pc,PetscViewer viewer)
 45: {
 46:   PC_HMPI        *red = (PC_HMPI*)pc->data;
 47:   PetscMPIInt    size;
 49:   PetscBool      iascii;

 52:   MPI_Comm_size(red->comm,&size);
 53:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
 54:   if (iascii) {
 55:     PetscViewerASCIIPrintf(viewer,"  Size of solver nodes %d\n",size);
 56:     PetscViewerASCIIPrintf(viewer,"  Parallel sub-solver given next\n",size);
 57:     /* should only make the next call if the viewer is associated with stdout */
 58:     PetscHMPIRun(red->comm,PCView_HMPI_MP,red);
 59:   }
 60:   return(0);
 61: }

 63: PETSC_EXTERN PetscErrorCode MatDistribute_MPIAIJ(MPI_Comm,Mat,PetscInt,MatReuse,Mat*);

 67: static PetscErrorCode PCApply_HMPI_1(PC pc,Vec x,Vec y)
 68: {
 69:   PC_HMPI        *red = (PC_HMPI*)pc->data;

 73:   KSPSetInitialGuessNonzero(red->ksp,pc->nonzero_guess);
 74:   KSPSolve(red->ksp,x,y);
 75:   return(0);
 76: }

 80: static PetscErrorCode PCSetUp_HMPI_MP(MPI_Comm comm,void *ctx)
 81: {
 82:   PC_HMPI        *red = (PC_HMPI*)ctx;
 84:   PetscInt       m;
 85:   MatReuse       scal;
 86:   PetscMPIInt    rank;

 89:   red->comm = comm;

 91:   MPI_Bcast(&red->setupcalled,1,MPIU_INT,0,comm);
 92:   MPI_Bcast((PetscEnum*)&red->flag,1,MPIU_ENUM,0,comm);
 93:   if (!red->setupcalled) {
 94:     /* setup vector communication */
 95:     MPI_Bcast(&red->n,1,MPIU_INT,0,comm);
 96:     VecCreateMPI(comm,PETSC_DECIDE,red->n,&red->x);
 97:     VecCreateMPI(comm,PETSC_DECIDE,red->n,&red->y);
 98:     VecScatterCreateToZero(red->x,&red->scatter,&red->xdummy);
 99:     VecDuplicate(red->xdummy,&red->ydummy);
100:     MPI_Comm_rank(comm,&rank);
101:     if (!rank) {
102:       VecDestroy(&red->xdummy);
103:       VecDestroy(&red->ydummy);
104:     }
105:     scal = MAT_INITIAL_MATRIX;
106:   } else {
107:     if (red->flag == DIFFERENT_NONZERO_PATTERN) {
108:       MatDestroy(&red->mat);
109:       scal = MAT_INITIAL_MATRIX;
110:     } else {
111:       scal = MAT_REUSE_MATRIX;
112:     }
113:   }

115:   /* copy matrix out onto processes */
116:   VecGetLocalSize(red->x,&m);
117:   MatDistribute_MPIAIJ(comm,red->gmat,m,scal,&red->mat);
118:   if (!red->setupcalled) {
119:     /* create the solver */
120:     KSPCreate(comm,&red->ksp);
121:     /* would like to set proper tablevel for KSP, but do not have direct access to parent pc */
122:     KSPSetOptionsPrefix(red->ksp,"hmpi_"); /* should actually append with global pc prefix */
123:     KSPSetOperators(red->ksp,red->mat,red->mat,red->flag);
124:     KSPSetFromOptions(red->ksp);
125:   } else {
126:     KSPSetOperators(red->ksp,red->mat,red->mat,red->flag);
127:   }
128:   return(0);
129: }

133: static PetscErrorCode PCSetUp_HMPI(PC pc)
134: {
135:   PC_HMPI        *red = (PC_HMPI*)pc->data;
137:   PetscMPIInt    size;

140:   red->gmat        = pc->mat;
141:   red->flag        = pc->flag;
142:   red->setupcalled = pc->setupcalled;

144:   MPI_Comm_size(red->comm,&size);
145:   if (size == 1) {  /* special case where copy of matrix is not needed */
146:     if (!red->setupcalled) {
147:       /* create the solver */
148:       KSPCreate(PetscObjectComm((PetscObject)pc),&red->ksp);
149:       PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);
150:       KSPSetOptionsPrefix(red->ksp,"hmpi_"); /* should actually append with global pc prefix */
151:       KSPSetOperators(red->ksp,red->gmat,red->gmat,red->flag);
152:       KSPSetFromOptions(red->ksp);
153:     } else {
154:       KSPSetOperators(red->ksp,red->gmat,red->gmat,red->flag);
155:     }
156:     pc->ops->apply = PCApply_HMPI_1;
157:     return(0);
158:   } else {
159:     MatGetSize(pc->mat,&red->n,PETSC_IGNORE);
160:     PetscHMPIRun(red->comm,PCSetUp_HMPI_MP,red);
161:   }
162:   return(0);
163: }

167: static PetscErrorCode PCApply_HMPI_MP(MPI_Comm comm,void *ctx)
168: {
169:   PC_HMPI        *red = (PC_HMPI*)ctx;

173:   VecScatterBegin(red->scatter,red->xdummy,red->x,INSERT_VALUES,SCATTER_REVERSE);
174:   VecScatterEnd(red->scatter,red->xdummy,red->x,INSERT_VALUES,SCATTER_REVERSE);
175:   MPI_Bcast(&red->nonzero_guess,1,MPIU_BOOL,0,red->comm);
176:   if (red->nonzero_guess) {
177:     VecScatterBegin(red->scatter,red->ydummy,red->y,INSERT_VALUES,SCATTER_REVERSE);
178:     VecScatterEnd(red->scatter,red->ydummy,red->y,INSERT_VALUES,SCATTER_REVERSE);
179:   }
180:   KSPSetInitialGuessNonzero(red->ksp,red->nonzero_guess);

182:   KSPSolve(red->ksp,red->x,red->y);

184:   VecScatterBegin(red->scatter,red->y,red->ydummy,INSERT_VALUES,SCATTER_FORWARD);
185:   VecScatterEnd(red->scatter,red->y,red->ydummy,INSERT_VALUES,SCATTER_FORWARD);
186:   return(0);
187: }

191: static PetscErrorCode PCApply_HMPI(PC pc,Vec x,Vec y)
192: {
193:   PC_HMPI        *red = (PC_HMPI*)pc->data;

197:   red->xdummy        = x;
198:   red->ydummy        = y;
199:   red->nonzero_guess = pc->nonzero_guess;

201:   PetscHMPIRun(red->comm,PCApply_HMPI_MP,red);
202:   return(0);
203: }

207: static PetscErrorCode PCDestroy_HMPI_MP(MPI_Comm comm,void *ctx)
208: {
209:   PC_HMPI        *red = (PC_HMPI*)ctx;
210:   PetscMPIInt    rank;

214:   VecScatterDestroy(&red->scatter);
215:   VecDestroy(&red->x);
216:   VecDestroy(&red->y);
217:   KSPDestroy(&red->ksp);
218:   MatDestroy(&red->mat);
219:   MPI_Comm_rank(comm,&rank);
220:   if (rank) {
221:     VecDestroy(&red->xdummy);
222:     VecDestroy(&red->ydummy);
223:   }
224:   return(0);
225: }

229: static PetscErrorCode PCDestroy_HMPI(PC pc)
230: {
231:   PC_HMPI        *red = (PC_HMPI*)pc->data;

235:   PetscHMPIRun(red->comm,PCDestroy_HMPI_MP,red);
236:   PetscHMPIFree(red->comm,red);
237:   return(0);
238: }

242: static PetscErrorCode PCSetFromOptions_HMPI(PC pc)
243: {
245:   return(0);
246: }


249: /* -------------------------------------------------------------------------------------*/
250: /*MC
251:      PCHMPI - Runs a preconditioner for a single process matrix across several MPI processes

253: $     This will usually be run with -pc_type hmpi -ksp_type preonly
254: $     solver options are set with -hmpi_ksp_... and -hmpi_pc_... for example
255: $     -hmpi_ksp_type cg would use cg as the Krylov method or -hmpi_ksp_monitor or
256: $     -hmpi_pc_type hypre -hmpi_pc_hypre_type boomeramg

258:        Always run with -ksp_view (or -snes_view) to see what solver is actually being used.

260:        Currently the solver options INSIDE the HMPI preconditioner can ONLY be set via the
261:       options database.

263:    Level: intermediate

265:    See PetscHMPIMerge() and PetscHMPISpawn() for two ways to start up MPI for use with this preconditioner

267: .seealso:  PCCreate(), PCSetType(), PCType (for list of available types)

269: M*/

273: PETSC_EXTERN PetscErrorCode PCCreate_HMPI(PC pc)
274: {
276:   PC_HMPI        *red;
277:   PetscMPIInt    size;

280:   MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
281:   if (size > 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_SIZ,"HMPI preconditioner only works for sequential solves");
282:   if (!PETSC_COMM_LOCAL_WORLD) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PETSc not initialized for PCMPI see the manual pages for PetscHMPISpawn() and PetscHMPIMerge()");
283:   /* caste the struct length to a PetscInt for easier MPI calls */

285:   PetscHMPIMalloc(PETSC_COMM_LOCAL_WORLD,(PetscInt)sizeof(PC_HMPI),(void**)&red);
286:   red->comm = PETSC_COMM_LOCAL_WORLD;
287:   pc->data  = (void*) red;

289:   pc->ops->apply          = PCApply_HMPI;
290:   pc->ops->destroy        = PCDestroy_HMPI;
291:   pc->ops->setfromoptions = PCSetFromOptions_HMPI;
292:   pc->ops->setup          = PCSetUp_HMPI;
293:   pc->ops->view           = PCView_HMPI;
294:   return(0);
295: }