Actual source code: mis.c

petsc-3.6.1 2015-08-06
Report Typos and Errors
  1: #include <petsc/private/matimpl.h>    /*I "petscmat.h" I*/
  2: #include <../src/mat/impls/aij/seq/aij.h>
  3: #include <../src/mat/impls/aij/mpi/mpiaij.h>
  4: #include <petscsf.h>

  6: #define MIS_NOT_DONE -2
  7: #define MIS_DELETED  -1
  8: #define MIS_REMOVED  -3
  9: #define MIS_IS_SELECTED(s) (s!=MIS_DELETED && s!=MIS_NOT_DONE && s!=MIS_REMOVED)

 11: /* -------------------------------------------------------------------------- */
 12: /*
 13:    maxIndSetAgg - parallel maximal independent set (MIS) with data locality info. MatAIJ specific!!!

 15:    Input Parameter:
 16:    . perm - serial permutation of rows of local to process in MIS
 17:    . Gmat - glabal matrix of graph (data not defined)
 18:    . strict_aggs - flag for whether to keep strict (non overlapping) aggregates in 'llist';

 20:    Output Parameter:
 21:    . a_selected - IS of selected vertices, includes 'ghost' nodes at end with natural local indices
 22:    . a_locals_llist - array of list of nodes rooted at selected nodes
 23: */
 26: PetscErrorCode maxIndSetAgg(IS perm,Mat Gmat,PetscBool strict_aggs,PetscCoarsenData **a_locals_llist)
 27: {
 28:   PetscErrorCode   ierr;
 29:   Mat_SeqAIJ       *matA,*matB=NULL;
 30:   Mat_MPIAIJ       *mpimat=NULL;
 31:   MPI_Comm         comm;
 32:   PetscInt         num_fine_ghosts,kk,n,ix,j,*idx,*ii,iter,Iend,my0,nremoved,gid,lid,cpid,lidj,sgid,t1,t2,slid,nDone,nselected=0,state,statej;
 33:   PetscInt         *cpcol_gid,*cpcol_state,*lid_cprowID,*lid_gid,*cpcol_sel_gid,*icpcol_gid,*lid_state,*lid_parent_gid=NULL;
 34:   PetscBool        *lid_removed;
 35:   PetscBool        isMPI,isAIJ,isOK;
 36:   const PetscInt   *perm_ix;
 37:   const PetscInt   nloc = Gmat->rmap->n;
 38:   PetscCoarsenData *agg_lists;
 39:   PetscLayout      layout;
 40:   PetscSF          sf;

 43:   PetscObjectGetComm((PetscObject)Gmat,&comm);

 45:   /* get submatrices */
 46:   PetscObjectTypeCompare((PetscObject)Gmat,MATMPIAIJ,&isMPI);
 47:   if (isMPI) {
 48:     mpimat = (Mat_MPIAIJ*)Gmat->data;
 49:     matA   = (Mat_SeqAIJ*)mpimat->A->data;
 50:     matB   = (Mat_SeqAIJ*)mpimat->B->data;
 51:     /* force compressed storage of B */
 52:     MatCheckCompressedRow(mpimat->B,matB->nonzerorowcnt,&matB->compressedrow,matB->i,Gmat->rmap->n,-1.0);
 53:   } else {
 54:     PetscObjectTypeCompare((PetscObject)Gmat,MATSEQAIJ,&isAIJ);
 55:     matA = (Mat_SeqAIJ*)Gmat->data;
 56:   }
 57:   MatGetOwnershipRange(Gmat,&my0,&Iend);
 58:   PetscMalloc1(nloc,&lid_gid); /* explicit array needed */
 59:   if (mpimat) {
 60:     for (kk=0,gid=my0; kk<nloc; kk++,gid++) {
 61:       lid_gid[kk] = gid;
 62:     }
 63:     VecGetLocalSize(mpimat->lvec, &num_fine_ghosts);
 64:     PetscMalloc1(num_fine_ghosts,&cpcol_gid);
 65:     PetscMalloc1(num_fine_ghosts,&cpcol_state);
 66:     PetscSFCreate(PetscObjectComm((PetscObject)Gmat),&sf);
 67:     MatGetLayouts(Gmat,&layout,NULL);
 68:     PetscSFSetGraphLayout(sf,layout,num_fine_ghosts,NULL,PETSC_COPY_VALUES,mpimat->garray);
 69:     PetscSFBcastBegin(sf,MPIU_INT,lid_gid,cpcol_gid);
 70:     PetscSFBcastEnd(sf,MPIU_INT,lid_gid,cpcol_gid);
 71:     for (kk=0;kk<num_fine_ghosts;kk++) {
 72:       cpcol_state[kk]=MIS_NOT_DONE;
 73:     }
 74:   } else num_fine_ghosts = 0;

 76:   PetscMalloc1(nloc, &lid_cprowID);
 77:   PetscMalloc1(nloc, &lid_removed); /* explicit array needed */
 78:   if (strict_aggs) {
 79:     PetscMalloc1(nloc,&lid_parent_gid);
 80:   }
 81:   PetscMalloc1(nloc,&lid_state);

 83:   /* has ghost nodes for !strict and uses local indexing (yuck) */
 84:   PetscCDCreate(strict_aggs ? nloc : num_fine_ghosts+nloc, &agg_lists);
 85:   if (a_locals_llist) *a_locals_llist = agg_lists;

 87:   /* need an inverse map - locals */
 88:   for (kk=0; kk<nloc; kk++) {
 89:     lid_cprowID[kk] = -1; lid_removed[kk] = PETSC_FALSE;
 90:     if (strict_aggs) {
 91:       lid_parent_gid[kk] = -1.0;
 92:     }
 93:     lid_state[kk] = MIS_NOT_DONE;
 94:   }
 95:   /* set index into cmpressed row 'lid_cprowID' */
 96:   if (matB) {
 97:     for (ix=0; ix<matB->compressedrow.nrows; ix++) {
 98:       lid = matB->compressedrow.rindex[ix];
 99:       lid_cprowID[lid] = ix;
100:     }
101:   }
102:   /* MIS */
103:   iter = nremoved = nDone = 0;
104:   ISGetIndices(perm, &perm_ix);
105:   while (nDone < nloc || PETSC_TRUE) { /* asyncronous not implemented */
106:     iter++;
107:     /* check all vertices */
108:     for (kk=0; kk<nloc; kk++) {
109:       lid   = perm_ix[kk];
110:       state = lid_state[lid];
111:       if (lid_removed[lid]) continue;
112:       if (state == MIS_NOT_DONE) {
113:         /* parallel test, delete if selected ghost */
114:         isOK = PETSC_TRUE;
115:         if ((ix=lid_cprowID[lid]) != -1) { /* if I have any ghost neighbors */
116:           ii  = matB->compressedrow.i; n = ii[ix+1] - ii[ix];
117:           idx = matB->j + ii[ix];
118:           for (j=0; j<n; j++) {
119:             cpid   = idx[j]; /* compressed row ID in B mat */
120:             gid    = cpcol_gid[cpid];
121:             statej = cpcol_state[cpid];
122:             if (statej == MIS_NOT_DONE && gid >= Iend) { /* should be (pe>rank), use gid as pe proxy */
123:               isOK = PETSC_FALSE; /* can not delete */
124:               break;
125:             }
126:           }
127:         } /* parallel test */
128:         if (isOK) { /* select or remove this vertex */
129:           nDone++;
130:           /* check for singleton */
131:           ii = matA->i; n = ii[lid+1] - ii[lid];
132:           if (n < 2) {
133:             /* if I have any ghost adj then not a sing */
134:             ix = lid_cprowID[lid];
135:             if (ix==-1 || (matB->compressedrow.i[ix+1]-matB->compressedrow.i[ix])==0) {
136:               nremoved++;
137:               lid_removed[lid] = PETSC_TRUE;
138:               /* should select this because it is technically in the MIS but lets not */
139:               continue; /* one local adj (me) and no ghost - singleton */
140:             }
141:           }
142:           /* SELECTED state encoded with global index */
143:           lid_state[lid] = lid+my0; /* needed???? */
144:           nselected++;
145:           if (strict_aggs) {
146:             PetscCDAppendID(agg_lists, lid, lid+my0);
147:           } else {
148:             PetscCDAppendID(agg_lists, lid, lid);
149:           }
150:           /* delete local adj */
151:           idx = matA->j + ii[lid];
152:           for (j=0; j<n; j++) {
153:             lidj   = idx[j];
154:             statej = lid_state[lidj];
155:             if (statej == MIS_NOT_DONE) {
156:               nDone++;
157:               if (strict_aggs) {
158:                 PetscCDAppendID(agg_lists, lid, lidj+my0);
159:               } else {
160:                 PetscCDAppendID(agg_lists, lid, lidj);
161:               }
162:               lid_state[lidj] = MIS_DELETED;  /* delete this */
163:             }
164:           }
165:           /* delete ghost adj of lid - deleted ghost done later for strict_aggs */
166:           if (!strict_aggs) {
167:             if ((ix=lid_cprowID[lid]) != -1) { /* if I have any ghost neighbors */
168:               ii  = matB->compressedrow.i; n = ii[ix+1] - ii[ix];
169:               idx = matB->j + ii[ix];
170:               for (j=0; j<n; j++) {
171:                 cpid   = idx[j]; /* compressed row ID in B mat */
172:                 statej = cpcol_state[cpid];
173:                 if (statej == MIS_NOT_DONE) {
174:                   PetscCDAppendID(agg_lists, lid, nloc+cpid);
175:                 }
176:               }
177:             }
178:           }
179:         } /* selected */
180:       } /* not done vertex */
181:     } /* vertex loop */

183:     /* update ghost states and count todos */
184:     if (mpimat) {
185:       /* scatter states, check for done */
186:       PetscSFBcastBegin(sf,MPIU_INT,lid_state,cpcol_state);
187:       PetscSFBcastEnd(sf,MPIU_INT,lid_state,cpcol_state);
188:       ii   = matB->compressedrow.i;
189:       for (ix=0; ix<matB->compressedrow.nrows; ix++) {
190:         lid   = matB->compressedrow.rindex[ix]; /* local boundary node */
191:         state = lid_state[lid];
192:         if (state == MIS_NOT_DONE) {
193:           /* look at ghosts */
194:           n   = ii[ix+1] - ii[ix];
195:           idx = matB->j + ii[ix];
196:           for (j=0; j<n; j++) {
197:             cpid   = idx[j]; /* compressed row ID in B mat */
198:             statej = cpcol_state[cpid];
199:             if (MIS_IS_SELECTED(statej)) { /* lid is now deleted, do it */
200:               nDone++;
201:               lid_state[lid] = MIS_DELETED; /* delete this */
202:               if (!strict_aggs) {
203:                 lidj = nloc + cpid;
204:                 PetscCDAppendID(agg_lists, lidj, lid);
205:               } else {
206:                 sgid = cpcol_gid[cpid];
207:                 lid_parent_gid[lid] = sgid; /* keep track of proc that I belong to */
208:               }
209:               break;
210:             }
211:           }
212:         }
213:       }
214:       /* all done? */
215:       t1   = nloc - nDone;
216:       MPI_Allreduce(&t1, &t2, 1, MPIU_INT, MPI_SUM, comm); /* synchronous version */
217:       if (t2 == 0) break;
218:     } else break; /* all done */
219:   } /* outer parallel MIS loop */
220:   ISRestoreIndices(perm,&perm_ix);
221:   PetscInfo3(Gmat,"\t removed %D of %D vertices.  %D selected.\n",nremoved,nloc,nselected);

223:   /* tell adj who my lid_parent_gid vertices belong to - fill in agg_lists selected ghost lists */
224:   if (strict_aggs && matB) {
225:     /* need to copy this to free buffer -- should do this globaly */
226:     PetscMalloc1(num_fine_ghosts, &cpcol_sel_gid);
227:     PetscMalloc1(num_fine_ghosts, &icpcol_gid);
228:     for (cpid=0; cpid<num_fine_ghosts; cpid++) icpcol_gid[cpid] = cpcol_gid[cpid];

230:     /* get proc of deleted ghost */
231:     PetscSFBcastBegin(sf,MPIU_INT,lid_parent_gid,cpcol_sel_gid);
232:     PetscSFBcastEnd(sf,MPIU_INT,lid_parent_gid,cpcol_sel_gid);
233:     for (cpid=0; cpid<num_fine_ghosts; cpid++) {
234:       sgid = cpcol_sel_gid[cpid];
235:       gid  = icpcol_gid[cpid];
236:       if (sgid >= my0 && sgid < Iend) { /* I own this deleted */
237:         slid = sgid - my0;
238:         PetscCDAppendID(agg_lists, slid, gid);
239:       }
240:     }
241:     PetscFree(icpcol_gid);
242:     PetscFree(cpcol_sel_gid);
243:   }
244:   if (mpimat) {
245:     PetscSFDestroy(&sf);
246:     PetscFree(cpcol_gid);
247:     PetscFree(cpcol_state);
248:   }
249:   PetscFree(lid_cprowID);
250:   PetscFree(lid_gid);
251:   PetscFree(lid_removed);
252:   if (strict_aggs) {
253:     PetscFree(lid_parent_gid);
254:   }
255:   PetscFree(lid_state);
256:   return(0);
257: }

259: typedef struct {
260:   int dummy;
261: } MatCoarsen_MIS;
262: /*
263:    MIS coarsen, simple greedy.
264: */
267: static PetscErrorCode MatCoarsenApply_MIS(MatCoarsen coarse)
268: {
269:   /* MatCoarsen_MIS *MIS = (MatCoarsen_MIS*)coarse->; */
271:   Mat            mat = coarse->graph;

275:   if (!coarse->perm) {
276:     IS       perm;
277:     PetscInt n,m;
278:     MPI_Comm comm;
279:     PetscObjectGetComm((PetscObject)mat,&comm);
280:     MatGetLocalSize(mat, &m, &n);
281:     ISCreateStride(comm, m, 0, 1, &perm);
282:     maxIndSetAgg(perm, mat, coarse->strict_aggs, &coarse->agg_lists);
283:     ISDestroy(&perm);
284:   } else {
285:     maxIndSetAgg(coarse->perm, mat, coarse->strict_aggs,  &coarse->agg_lists);
286:   }
287:   return(0);
288: }

292: PetscErrorCode MatCoarsenView_MIS(MatCoarsen coarse,PetscViewer viewer)
293: {
294:   /* MatCoarsen_MIS *MIS = (MatCoarsen_MIS*)coarse->; */
296:   PetscMPIInt    rank;
297:   PetscBool      iascii;

301:   MPI_Comm_rank(PetscObjectComm((PetscObject)coarse),&rank);
302:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
303:   if (iascii) {
304:     PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] MIS aggregator\n",rank);
305:     PetscViewerFlush(viewer);
306:     PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);
307:   }
308:   return(0);
309: }

313: PetscErrorCode MatCoarsenDestroy_MIS(MatCoarsen coarse)
314: {
315:   MatCoarsen_MIS *MIS = (MatCoarsen_MIS*)coarse->subctx;

320:   PetscFree(MIS);
321:   return(0);
322: }

324: /*MC
325:    MATCOARSENMIS - Creates a coarsen context via the external package MIS.

327:    Collective on MPI_Comm

329:    Input Parameter:
330: .  coarse - the coarsen context

332:    Options Database Keys:
333: +  -mat_coarsen_MIS_xxx -

335:    Level: beginner

337: .keywords: Coarsen, create, context

339: .seealso: MatCoarsenSetType(), MatCoarsenType

341: M*/

345: PETSC_EXTERN PetscErrorCode MatCoarsenCreate_MIS(MatCoarsen coarse)
346: {
348:   MatCoarsen_MIS *MIS;

351:   PetscNewLog(coarse,&MIS);
352:   coarse->subctx = (void*)MIS;

354:   coarse->ops->apply   = MatCoarsenApply_MIS;
355:   coarse->ops->view    = MatCoarsenView_MIS;
356:   coarse->ops->destroy = MatCoarsenDestroy_MIS;
357:   /* coarse->ops->setfromoptions = MatCoarsenSetFromOptions_MIS; */
358:   return(0);
359: }