Actual source code: redistribute.c

  1: #define PETSCKSP_DLL

  3: /*
  4:   This file defines a "solve the problem redistributely on each subgroup of processor" preconditioner.
  5: */
 6:  #include private/pcimpl.h
 7:  #include petscksp.h

  9: typedef struct {
 10:   KSP          ksp;
 11:   Vec          x,b;
 12:   VecScatter   scatter;
 13:   IS           is;
 14:   PetscInt     dcnt,*drows;   /* these are the local rows that have only diagonal entry */
 15:   PetscScalar  *diag;
 16:   Vec          work;
 17: } PC_Redistribute;

 21: static PetscErrorCode PCView_Redistribute(PC pc,PetscViewer viewer)
 22: {
 23:   PC_Redistribute *red = (PC_Redistribute*)pc->data;
 24:   PetscErrorCode  ierr;
 25:   PetscTruth      iascii,isstring;
 26:   PetscInt        ncnt,N;

 29:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
 30:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);
 31:   if (iascii) {
 32:     MPI_Allreduce(&red->dcnt,&ncnt,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);
 33:     MatGetSize(pc->pmat,&N,PETSC_NULL);
 34:     PetscViewerASCIIPrintf(viewer,"    Number rows eliminated %D Percentage rows eliminated %g\n",ncnt,100.0*((PetscReal)ncnt)/((PetscReal)N));
 35:     PetscViewerASCIIPrintf(viewer,"  Redistribute preconditioner: \n");
 36:     KSPView(red->ksp,viewer);
 37:   } else if (isstring) {
 38:     PetscViewerStringSPrintf(viewer," Redistribute preconditioner");
 39:     KSPView(red->ksp,viewer);
 40:   } else {
 41:     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for PC redistribute",((PetscObject)viewer)->type_name);
 42:   }
 43:   return(0);
 44: }

 48: static PetscErrorCode PCSetUp_Redistribute(PC pc)
 49: {
 50:   PC_Redistribute   *red = (PC_Redistribute*)pc->data;
 51:   PetscErrorCode    ierr;
 52:   MPI_Comm          comm;
 53:   PetscInt          rstart,rend,i,nz,cnt,*rows,ncnt,dcnt,*drows;
 54:   PetscLayout       map,nmap;
 55:   PetscMPIInt       size,rank,imdex,tag,n;
 56:   PetscInt          *source = PETSC_NULL;
 57:   PetscMPIInt       *nprocs = PETSC_NULL,nrecvs;
 58:   PetscInt          j,nsends;
 59:   PetscInt          *owner = PETSC_NULL,*starts = PETSC_NULL,count,slen;
 60:   PetscInt          *rvalues,*svalues,recvtotal;
 61:   PetscMPIInt       *onodes1,*olengths1;
 62:   MPI_Request       *send_waits = PETSC_NULL,*recv_waits = PETSC_NULL;
 63:   MPI_Status        recv_status,*send_status;
 64:   Vec               tvec,diag;
 65:   Mat               tmat;
 66:   const PetscScalar *d;

 69:   if (pc->setupcalled) {
 70:     KSPGetOperators(red->ksp,PETSC_NULL,&tmat,PETSC_NULL);
 71:     MatGetSubMatrix(pc->pmat,red->is,red->is,MAT_REUSE_MATRIX,&tmat);
 72:     KSPSetOperators(red->ksp,tmat,tmat,SAME_NONZERO_PATTERN);
 73:   } else {
 74:     PetscInt NN;

 76:     PetscObjectGetComm((PetscObject)pc,&comm);
 77:     MPI_Comm_size(comm,&size);
 78:     MPI_Comm_rank(comm,&rank);
 79:     PetscObjectGetNewTag((PetscObject)pc,&tag);

 81:     /* count non-diagonal rows on process */
 82:     MatGetOwnershipRange(pc->mat,&rstart,&rend);
 83:     cnt  = 0;
 84:     for (i=rstart; i<rend; i++) {
 85:       MatGetRow(pc->mat,i,&nz,PETSC_NULL,PETSC_NULL);
 86:       if (nz > 1) cnt++;
 87:       MatRestoreRow(pc->mat,i,&nz,PETSC_NULL,PETSC_NULL);
 88:     }
 89:     PetscMalloc(cnt*sizeof(PetscInt),&rows);
 90:     PetscMalloc((rend - rstart - cnt)*sizeof(PetscInt),&drows);

 92:     /* list non-diagonal rows on process */
 93:     cnt  = 0; dcnt = 0;
 94:     for (i=rstart; i<rend; i++) {
 95:       MatGetRow(pc->mat,i,&nz,PETSC_NULL,PETSC_NULL);
 96:       if (nz > 1) rows[cnt++] = i;
 97:       else drows[dcnt++] = i - rstart;
 98:       MatRestoreRow(pc->mat,i,&nz,PETSC_NULL,PETSC_NULL);
 99:     }

101:     /* create PetscLayout for non-diagonal rows on each process */
102:     PetscLayoutCreate(comm,&map);
103:     PetscLayoutSetLocalSize(map,cnt);
104:     PetscLayoutSetBlockSize(map,1);
105:     PetscLayoutSetUp(map);
106:     rstart = map->rstart;
107:     rend   = map->rend;
108: 
109:     /* create PetscLayout for load-balanced non-diagonal rows on each process */
110:     PetscLayoutCreate(comm,&nmap);
111:     MPI_Allreduce(&cnt,&ncnt,1,MPIU_INT,MPI_SUM,comm);
112:     PetscLayoutSetSize(nmap,ncnt);
113:     PetscLayoutSetBlockSize(nmap,1);
114:     PetscLayoutSetUp(nmap);

116:     MatGetSize(pc->pmat,&NN,PETSC_NULL);
117:     PetscInfo2(pc,"Number of diagonal rows eliminated %d, percentage eliminated %g\n",NN-ncnt,((PetscReal)(NN-ncnt))/((PetscReal)(NN)));
118:     /*  
119:         this code is taken from VecScatterCreate_PtoS() 
120:         Determines what rows need to be moved where to 
121:         load balance the non-diagonal rows 
122:     */
123:     /*  count number of contributors to each processor */
124:     PetscMalloc2(size,PetscMPIInt,&nprocs,cnt,PetscInt,&owner);
125:     PetscMemzero(nprocs,size*sizeof(PetscMPIInt));
126:     j      = 0;
127:     nsends = 0;
128:     for (i=rstart; i<rend; i++) {
129:       if (i < nmap->range[j]) j = 0;
130:       for (; j<size; j++) {
131:         if (i < nmap->range[j+1]) {
132:           if (!nprocs[j]++) nsends++;
133:           owner[i-rstart] = j;
134:           break;
135:         }
136:       }
137:     }
138:     /* inform other processors of number of messages and max length*/
139:     PetscGatherNumberOfMessages(comm,PETSC_NULL,nprocs,&nrecvs);
140:     PetscGatherMessageLengths(comm,nsends,nrecvs,nprocs,&onodes1,&olengths1);
141:     PetscSortMPIIntWithArray(nrecvs,onodes1,olengths1);
142:     recvtotal = 0; for (i=0; i<nrecvs; i++) recvtotal += olengths1[i];
143: 
144:     /* post receives:  rvalues - rows I will own; count - nu */
145:     PetscMalloc3(recvtotal,PetscInt,&rvalues,nrecvs,PetscInt,&source,nrecvs,MPI_Request,&recv_waits);
146:     count  = 0;
147:     for (i=0; i<nrecvs; i++) {
148:       MPI_Irecv((rvalues+count),olengths1[i],MPIU_INT,onodes1[i],tag,comm,recv_waits+i);
149:       count += olengths1[i];
150:     }

152:     /* do sends:
153:        1) starts[i] gives the starting index in svalues for stuff going to 
154:        the ith processor
155:     */
156:     PetscMalloc3(cnt,PetscInt,&svalues,nsends,MPI_Request,&send_waits,size,PetscInt,&starts);
157:     starts[0]  = 0;
158:     for (i=1; i<size; i++) { starts[i] = starts[i-1] + nprocs[i-1];}
159:     for (i=0; i<cnt; i++) {
160:       svalues[starts[owner[i]]++] = rows[i];
161:     }
162:     for (i=0; i<cnt; i++) rows[i] = rows[i] - rstart;
163:     red->drows = drows;
164:     red->dcnt  = dcnt;
165:     PetscFree(rows);

167:     starts[0] = 0;
168:     for (i=1; i<size; i++) { starts[i] = starts[i-1] + nprocs[i-1];}
169:     count = 0;
170:     for (i=0; i<size; i++) {
171:       if (nprocs[i]) {
172:         MPI_Isend(svalues+starts[i],nprocs[i],MPIU_INT,i,tag,comm,send_waits+count++);
173:       }
174:     }
175: 
176:     /*  wait on receives */
177:     count  = nrecvs;
178:     slen   = 0;
179:     while (count) {
180:       MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);
181:       /* unpack receives into our local space */
182:       MPI_Get_count(&recv_status,MPIU_INT,&n);
183:       slen += n;
184:       count--;
185:     }
186:     if (slen != recvtotal) SETERRQ2(PETSC_ERR_PLIB,"Total message lengths %D not expected %D",slen,recvtotal);
187: 
188:     ISCreateGeneral(comm,slen,rvalues,&red->is);
189: 
190:     /* free up all work space */
191:     PetscFree(olengths1);
192:     PetscFree(onodes1);
193:     PetscFree3(rvalues,source,recv_waits);
194:     PetscFree2(nprocs,owner);
195:     if (nsends) {   /* wait on sends */
196:       PetscMalloc(nsends*sizeof(MPI_Status),&send_status);
197:       MPI_Waitall(nsends,send_waits,send_status);
198:       PetscFree(send_status);
199:     }
200:     PetscFree3(svalues,send_waits,starts);
201:     PetscLayoutDestroy(map);
202:     PetscLayoutDestroy(nmap);

204:     VecCreateMPI(comm,slen,PETSC_DETERMINE,&red->b);
205:     VecDuplicate(red->b,&red->x);
206:     MatGetVecs(pc->pmat,&tvec,PETSC_NULL);
207:     VecScatterCreate(tvec,red->is,red->b,PETSC_NULL,&red->scatter);
208:     VecDestroy(tvec);
209:     MatGetSubMatrix(pc->pmat,red->is,red->is,MAT_INITIAL_MATRIX,&tmat);
210:     KSPSetOperators(red->ksp,tmat,tmat,SAME_NONZERO_PATTERN);
211:     MatDestroy(tmat);
212:   }

214:   /* get diagonal portion of matrix */
215:   PetscMalloc(red->dcnt*sizeof(PetscScalar),&red->diag);
216:   MatGetVecs(pc->pmat,&diag,PETSC_NULL);
217:   MatGetDiagonal(pc->pmat,diag);
218:   VecGetArray(diag,(PetscScalar**)&d);
219:   for (i=0; i<red->dcnt; i++) {
220:     red->diag[i] = 1.0/d[red->drows[i]];
221:   }
222:   VecRestoreArray(diag,(PetscScalar**)&d);
223:   VecDestroy(diag);

225:   KSPSetUp(red->ksp);
226:   return(0);
227: }

231: static PetscErrorCode PCApply_Redistribute(PC pc,Vec b,Vec x)
232: {
233:   PC_Redistribute   *red = (PC_Redistribute*)pc->data;
234:   PetscErrorCode    ierr;
235:   PetscInt          dcnt = red->dcnt,i;
236:   const PetscInt    *drows = red->drows;
237:   PetscScalar       *xwork;
238:   const PetscScalar *bwork,*diag = red->diag;

241:   if (!red->work) {
242:     VecDuplicate(b,&red->work);
243:   }
244:   /* compute the rows of solution that have diagonal entries only */
245:   VecSet(x,0.0);         /* x = diag(A)^{-1} b */
246:   VecGetArray(x,&xwork);
247:   VecGetArray(b,(PetscScalar**)&bwork);
248:   for (i=0; i<dcnt; i++) {
249:     xwork[drows[i]] = diag[i]*bwork[drows[i]];
250:   }
251:   PetscLogFlops(dcnt);
252:   VecRestoreArray(red->work,&xwork);
253:   VecRestoreArray(b,(PetscScalar**)&bwork);
254:   /* update the right hand side for the reduced system with diagonal rows (and corresponding columns) removed */
255:   MatMult(pc->pmat,x,red->work);
256:   VecAYPX(red->work,-1.0,b);   /* red->work = b - A x */

258:   VecScatterBegin(red->scatter,red->work,red->b,INSERT_VALUES,SCATTER_FORWARD);
259:   VecScatterEnd(red->scatter,red->work,red->b,INSERT_VALUES,SCATTER_FORWARD);
260:   KSPSolve(red->ksp,red->b,red->x);
261:   VecScatterBegin(red->scatter,red->x,x,INSERT_VALUES,SCATTER_REVERSE);
262:   VecScatterEnd(red->scatter,red->x,x,INSERT_VALUES,SCATTER_REVERSE);
263:   return(0);
264: }

268: static PetscErrorCode PCDestroy_Redistribute(PC pc)
269: {
270:   PC_Redistribute *red = (PC_Redistribute*)pc->data;
271:   PetscErrorCode  ierr;

274:   if (red->scatter)  {VecScatterDestroy(red->scatter);}
275:   if (red->is)       {ISDestroy(red->is);}
276:   if (red->b)        {VecDestroy(red->b);}
277:   if (red->x)        {VecDestroy(red->x);}
278:   if (red->ksp)      {KSPDestroy(red->ksp);}
279:   if (red->work)     {VecDestroy(red->work);}
280:   PetscFree(red->drows);
281:   PetscFree(red->diag);
282:   PetscFree(red);
283:   return(0);
284: }

288: static PetscErrorCode PCSetFromOptions_Redistribute(PC pc)
289: {
290:   PetscErrorCode  ierr;
291:   PC_Redistribute *red = (PC_Redistribute*)pc->data;

294:   KSPSetFromOptions(red->ksp);
295:   return(0);
296: }

298: /* -------------------------------------------------------------------------------------*/
299: /*MC
300:      PCREDISTRIBUTE - Redistributes a matrix for load balancing, removing the rows that only have a diagonal entry and then applys a KSP to that new matrix

302:      Options for the redistribute preconditioners can be set with -redistribute_ksp_xxx <values> and -redistribute_pc_xxx <values>

304:      Notes:  Usually run this with -ksp_type preonly

306:      If you have used MatZeroRows() to eliminate (for example, Dirichlet) boundary conditions for a symmetric problem then you can use, for example, -ksp_type preonly 
307:      -pc_type redistribute -redistribute_ksp_type cg -redistribute_pc_type bjacobi -redistribute_sub_pc_type icc to take advantage of the symmetry.

309:    Level: intermediate

311: .seealso:  PCCreate(), PCSetType(), PCType (for list of available types)
312: M*/

317: PetscErrorCode  PCCreate_Redistribute(PC pc)
318: {
319:   PetscErrorCode  ierr;
320:   PC_Redistribute *red;
321:   const char      *prefix;
322: 
324:   PetscNewLog(pc,PC_Redistribute,&red);
325:   pc->data            = (void*)red;

327:   pc->ops->apply           = PCApply_Redistribute;
328:   pc->ops->applytranspose  = 0;
329:   pc->ops->setup           = PCSetUp_Redistribute;
330:   pc->ops->destroy         = PCDestroy_Redistribute;
331:   pc->ops->setfromoptions  = PCSetFromOptions_Redistribute;
332:   pc->ops->view            = PCView_Redistribute;

334:   KSPCreate(((PetscObject)pc)->comm,&red->ksp);
335:   PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);
336:   PetscLogObjectParent(pc,red->ksp);
337:   PCGetOptionsPrefix(pc,&prefix);
338:   KSPSetOptionsPrefix(red->ksp,prefix);
339:   KSPAppendOptionsPrefix(red->ksp,"redistribute_");
340:   return(0);
341: }