Actual source code: mis.c

  1: #include <petsc/private/matimpl.h>
  2: #include <../src/mat/impls/aij/seq/aij.h>
  3: #include <../src/mat/impls/aij/mpi/mpiaij.h>
  4: #include <petscsf.h>

  6: #define MIS_NOT_DONE       -2
  7: #define MIS_DELETED        -1
  8: #define MIS_REMOVED        -3
  9: #define MIS_IS_SELECTED(s) (s != MIS_DELETED && s != MIS_NOT_DONE && s != MIS_REMOVED)

 11: /*
 12:    MatCoarsenApply_MIS_private - parallel maximal independent set (MIS) with data locality info. MatAIJ specific!!!

 14:    Input Parameter:
 15:    . perm - serial permutation of rows of local to process in MIS
 16:    . Gmat - global matrix of graph (data not defined)
 17:    . strict_aggs - flag for whether to keep strict (non overlapping) aggregates in 'llist';

 19:    Output Parameter:
 20:    . a_selected - IS of selected vertices, includes 'ghost' nodes at end with natural local indices
 21:    . a_locals_llist - array of list of nodes rooted at selected nodes
 22: */
 23: static PetscErrorCode MatCoarsenApply_MIS_private(IS perm, Mat Gmat, PetscBool strict_aggs, PetscCoarsenData **a_locals_llist)
 24: {
 25:   Mat_SeqAIJ       *matA, *matB = NULL;
 26:   Mat_MPIAIJ       *mpimat = NULL;
 27:   MPI_Comm          comm;
 28:   PetscInt          num_fine_ghosts, kk, n, ix, j, *idx, *ii, Iend, my0, nremoved, gid, lid, cpid, lidj, sgid, t1, t2, slid, nDone, nselected = 0, state, statej;
 29:   PetscInt         *cpcol_gid, *cpcol_state, *lid_cprowID, *lid_gid, *cpcol_sel_gid, *icpcol_gid, *lid_state, *lid_parent_gid = NULL;
 30:   PetscBool        *lid_removed;
 31:   PetscBool         isMPI, isAIJ, isOK;
 32:   const PetscInt   *perm_ix;
 33:   const PetscInt    nloc = Gmat->rmap->n;
 34:   PetscCoarsenData *agg_lists;
 35:   PetscLayout       layout;
 36:   PetscSF           sf;

 38:   PetscFunctionBegin;
 39:   PetscCall(PetscObjectGetComm((PetscObject)Gmat, &comm));

 41:   /* get submatrices */
 42:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)Gmat, MATMPIAIJ, &isMPI));
 43:   if (isMPI) {
 44:     mpimat = (Mat_MPIAIJ *)Gmat->data;
 45:     matA   = (Mat_SeqAIJ *)mpimat->A->data;
 46:     matB   = (Mat_SeqAIJ *)mpimat->B->data;
 47:     /* force compressed storage of B */
 48:     PetscCall(MatCheckCompressedRow(mpimat->B, matB->nonzerorowcnt, &matB->compressedrow, matB->i, Gmat->rmap->n, -1.0));
 49:   } else {
 50:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)Gmat, MATSEQAIJ, &isAIJ));
 51:     PetscCheck(isAIJ, PETSC_COMM_SELF, PETSC_ERR_USER, "Require AIJ matrix.");
 52:     matA = (Mat_SeqAIJ *)Gmat->data;
 53:   }
 54:   PetscCall(MatGetOwnershipRange(Gmat, &my0, &Iend));
 55:   PetscCall(PetscMalloc1(nloc, &lid_gid)); /* explicit array needed */
 56:   if (mpimat) {
 57:     for (kk = 0, gid = my0; kk < nloc; kk++, gid++) lid_gid[kk] = gid;
 58:     PetscCall(VecGetLocalSize(mpimat->lvec, &num_fine_ghosts));
 59:     PetscCall(PetscMalloc1(num_fine_ghosts, &cpcol_gid));
 60:     PetscCall(PetscMalloc1(num_fine_ghosts, &cpcol_state));
 61:     PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)Gmat), &sf));
 62:     PetscCall(MatGetLayouts(Gmat, &layout, NULL));
 63:     PetscCall(PetscSFSetGraphLayout(sf, layout, num_fine_ghosts, NULL, PETSC_COPY_VALUES, mpimat->garray));
 64:     PetscCall(PetscSFBcastBegin(sf, MPIU_INT, lid_gid, cpcol_gid, MPI_REPLACE));
 65:     PetscCall(PetscSFBcastEnd(sf, MPIU_INT, lid_gid, cpcol_gid, MPI_REPLACE));
 66:     for (kk = 0; kk < num_fine_ghosts; kk++) cpcol_state[kk] = MIS_NOT_DONE;
 67:   } else num_fine_ghosts = 0;

 69:   PetscCall(PetscMalloc1(nloc, &lid_cprowID));
 70:   PetscCall(PetscMalloc1(nloc, &lid_removed)); /* explicit array needed */
 71:   if (strict_aggs) PetscCall(PetscMalloc1(nloc, &lid_parent_gid));
 72:   PetscCall(PetscMalloc1(nloc, &lid_state));

 74:   /* has ghost nodes for !strict and uses local indexing (yuck) */
 75:   PetscCall(PetscCDCreate(strict_aggs ? nloc : num_fine_ghosts + nloc, &agg_lists));
 76:   if (a_locals_llist) *a_locals_llist = agg_lists;

 78:   /* need an inverse map - locals */
 79:   for (kk = 0; kk < nloc; kk++) {
 80:     lid_cprowID[kk] = -1;
 81:     lid_removed[kk] = PETSC_FALSE;
 82:     if (strict_aggs) lid_parent_gid[kk] = -1.0;
 83:     lid_state[kk] = MIS_NOT_DONE;
 84:   }
 85:   /* set index into cmpressed row 'lid_cprowID' */
 86:   if (matB) {
 87:     for (ix = 0; ix < matB->compressedrow.nrows; ix++) {
 88:       lid = matB->compressedrow.rindex[ix];
 89:       if (lid >= 0) lid_cprowID[lid] = ix;
 90:     }
 91:   }
 92:   /* MIS */
 93:   nremoved = nDone = 0;
 94:   PetscCall(ISGetIndices(perm, &perm_ix));
 95:   while (nDone < nloc || PETSC_TRUE) { /* asynchronous not implemented */
 96:     /* check all vertices */
 97:     for (kk = 0; kk < nloc; kk++) {
 98:       lid   = perm_ix[kk];
 99:       state = lid_state[lid];
100:       if (lid_removed[lid]) continue;
101:       if (state == MIS_NOT_DONE) {
102:         /* parallel test, delete if selected ghost */
103:         isOK = PETSC_TRUE;
104:         if ((ix = lid_cprowID[lid]) != -1) { /* if I have any ghost neighbors */
105:           ii  = matB->compressedrow.i;
106:           n   = ii[ix + 1] - ii[ix];
107:           idx = matB->j + ii[ix];
108:           for (j = 0; j < n; j++) {
109:             cpid   = idx[j]; /* compressed row ID in B mat */
110:             gid    = cpcol_gid[cpid];
111:             statej = cpcol_state[cpid];
112:             if (statej == MIS_NOT_DONE && gid >= Iend) { /* should be (pe>rank), use gid as pe proxy */
113:               isOK = PETSC_FALSE;                        /* can not delete */
114:               break;
115:             }
116:           }
117:         }           /* parallel test */
118:         if (isOK) { /* select or remove this vertex */
119:           nDone++;
120:           /* check for singleton */
121:           ii = matA->i;
122:           n  = ii[lid + 1] - ii[lid];
123:           if (n < 2) {
124:             /* if I have any ghost adj then not a sing */
125:             ix = lid_cprowID[lid];
126:             if (ix == -1 || !(matB->compressedrow.i[ix + 1] - matB->compressedrow.i[ix])) {
127:               nremoved++;
128:               lid_removed[lid] = PETSC_TRUE;
129:               /* should select this because it is technically in the MIS but lets not */
130:               continue; /* one local adj (me) and no ghost - singleton */
131:             }
132:           }
133:           /* SELECTED state encoded with global index */
134:           lid_state[lid] = lid + my0; /* needed???? */
135:           nselected++;
136:           if (strict_aggs) {
137:             PetscCall(PetscCDAppendID(agg_lists, lid, lid + my0));
138:           } else {
139:             PetscCall(PetscCDAppendID(agg_lists, lid, lid));
140:           }
141:           /* delete local adj */
142:           idx = matA->j + ii[lid];
143:           for (j = 0; j < n; j++) {
144:             lidj   = idx[j];
145:             statej = lid_state[lidj];
146:             if (statej == MIS_NOT_DONE) {
147:               nDone++;
148:               if (strict_aggs) {
149:                 PetscCall(PetscCDAppendID(agg_lists, lid, lidj + my0));
150:               } else {
151:                 PetscCall(PetscCDAppendID(agg_lists, lid, lidj));
152:               }
153:               lid_state[lidj] = MIS_DELETED; /* delete this */
154:             }
155:           }
156:           /* delete ghost adj of lid - deleted ghost done later for strict_aggs */
157:           if (!strict_aggs) {
158:             if ((ix = lid_cprowID[lid]) != -1) { /* if I have any ghost neighbors */
159:               ii  = matB->compressedrow.i;
160:               n   = ii[ix + 1] - ii[ix];
161:               idx = matB->j + ii[ix];
162:               for (j = 0; j < n; j++) {
163:                 cpid   = idx[j]; /* compressed row ID in B mat */
164:                 statej = cpcol_state[cpid];
165:                 if (statej == MIS_NOT_DONE) PetscCall(PetscCDAppendID(agg_lists, lid, nloc + cpid));
166:               }
167:             }
168:           }
169:         } /* selected */
170:       }   /* not done vertex */
171:     }     /* vertex loop */

173:     /* update ghost states and count todos */
174:     if (mpimat) {
175:       /* scatter states, check for done */
176:       PetscCall(PetscSFBcastBegin(sf, MPIU_INT, lid_state, cpcol_state, MPI_REPLACE));
177:       PetscCall(PetscSFBcastEnd(sf, MPIU_INT, lid_state, cpcol_state, MPI_REPLACE));
178:       ii = matB->compressedrow.i;
179:       for (ix = 0; ix < matB->compressedrow.nrows; ix++) {
180:         lid   = matB->compressedrow.rindex[ix]; /* local boundary node */
181:         state = lid_state[lid];
182:         if (state == MIS_NOT_DONE) {
183:           /* look at ghosts */
184:           n   = ii[ix + 1] - ii[ix];
185:           idx = matB->j + ii[ix];
186:           for (j = 0; j < n; j++) {
187:             cpid   = idx[j]; /* compressed row ID in B mat */
188:             statej = cpcol_state[cpid];
189:             if (MIS_IS_SELECTED(statej)) { /* lid is now deleted, do it */
190:               nDone++;
191:               lid_state[lid] = MIS_DELETED; /* delete this */
192:               if (!strict_aggs) {
193:                 lidj = nloc + cpid;
194:                 PetscCall(PetscCDAppendID(agg_lists, lidj, lid));
195:               } else {
196:                 sgid                = cpcol_gid[cpid];
197:                 lid_parent_gid[lid] = sgid; /* keep track of proc that I belong to */
198:               }
199:               break;
200:             }
201:           }
202:         }
203:       }
204:       /* all done? */
205:       t1 = nloc - nDone;
206:       PetscCall(MPIU_Allreduce(&t1, &t2, 1, MPIU_INT, MPI_SUM, comm)); /* synchronous version */
207:       if (!t2) break;
208:     } else break; /* all done */
209:   }               /* outer parallel MIS loop */
210:   PetscCall(ISRestoreIndices(perm, &perm_ix));
211:   PetscCall(PetscInfo(Gmat, "\t removed %" PetscInt_FMT " of %" PetscInt_FMT " vertices.  %" PetscInt_FMT " selected.\n", nremoved, nloc, nselected));

213:   /* tell adj who my lid_parent_gid vertices belong to - fill in agg_lists selected ghost lists */
214:   if (strict_aggs && matB) {
215:     /* need to copy this to free buffer -- should do this globally */
216:     PetscCall(PetscMalloc1(num_fine_ghosts, &cpcol_sel_gid));
217:     PetscCall(PetscMalloc1(num_fine_ghosts, &icpcol_gid));
218:     for (cpid = 0; cpid < num_fine_ghosts; cpid++) icpcol_gid[cpid] = cpcol_gid[cpid];

220:     /* get proc of deleted ghost */
221:     PetscCall(PetscSFBcastBegin(sf, MPIU_INT, lid_parent_gid, cpcol_sel_gid, MPI_REPLACE));
222:     PetscCall(PetscSFBcastEnd(sf, MPIU_INT, lid_parent_gid, cpcol_sel_gid, MPI_REPLACE));
223:     for (cpid = 0; cpid < num_fine_ghosts; cpid++) {
224:       sgid = cpcol_sel_gid[cpid];
225:       gid  = icpcol_gid[cpid];
226:       if (sgid >= my0 && sgid < Iend) { /* I own this deleted */
227:         slid = sgid - my0;
228:         PetscCall(PetscCDAppendID(agg_lists, slid, gid));
229:       }
230:     }
231:     PetscCall(PetscFree(icpcol_gid));
232:     PetscCall(PetscFree(cpcol_sel_gid));
233:   }
234:   if (mpimat) {
235:     PetscCall(PetscSFDestroy(&sf));
236:     PetscCall(PetscFree(cpcol_gid));
237:     PetscCall(PetscFree(cpcol_state));
238:   }
239:   PetscCall(PetscFree(lid_cprowID));
240:   PetscCall(PetscFree(lid_gid));
241:   PetscCall(PetscFree(lid_removed));
242:   if (strict_aggs) PetscCall(PetscFree(lid_parent_gid));
243:   PetscCall(PetscFree(lid_state));
244:   PetscFunctionReturn(PETSC_SUCCESS);
245: }

247: /*
248:    MIS coarsen, simple greedy.
249: */
250: static PetscErrorCode MatCoarsenApply_MIS(MatCoarsen coarse)
251: {
252:   Mat mat = coarse->graph;

254:   PetscFunctionBegin;
255:   if (!coarse->perm) {
256:     IS       perm;
257:     PetscInt n, m;
258:     MPI_Comm comm;

260:     PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
261:     PetscCall(MatGetLocalSize(mat, &m, &n));
262:     PetscCall(ISCreateStride(comm, m, 0, 1, &perm));
263:     PetscCall(MatCoarsenApply_MIS_private(perm, mat, coarse->strict_aggs, &coarse->agg_lists));
264:     PetscCall(ISDestroy(&perm));
265:   } else {
266:     PetscCall(MatCoarsenApply_MIS_private(coarse->perm, mat, coarse->strict_aggs, &coarse->agg_lists));
267:   }
268:   PetscFunctionReturn(PETSC_SUCCESS);
269: }

271: static PetscErrorCode MatCoarsenView_MIS(MatCoarsen coarse, PetscViewer viewer)
272: {
273:   PetscMPIInt rank;
274:   PetscBool   iascii;

276:   PetscFunctionBegin;
277:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)coarse), &rank));
278:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
279:   if (iascii) {
280:     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
281:     PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "  [%d] MIS aggregator\n", rank));
282:     if (!rank) {
283:       PetscCDIntNd *pos, *pos2;
284:       for (PetscInt kk = 0; kk < coarse->agg_lists->size; kk++) {
285:         PetscCall(PetscCDGetHeadPos(coarse->agg_lists, kk, &pos));
286:         if ((pos2 = pos)) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "selected %d: ", (int)kk));
287:         while (pos) {
288:           PetscInt gid1;
289:           PetscCall(PetscCDIntNdGetID(pos, &gid1));
290:           PetscCall(PetscCDGetNextPos(coarse->agg_lists, kk, &pos));
291:           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %d ", (int)gid1));
292:         }
293:         if (pos2) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n"));
294:       }
295:     }
296:     PetscCall(PetscViewerFlush(viewer));
297:     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
298:   }
299:   PetscFunctionReturn(PETSC_SUCCESS);
300: }

302: /*MC
303:    MATCOARSENMIS - Creates a coarsening with a maximal independent set (MIS) algorithm

305:    Collective

307:    Input Parameter:
308: .  coarse - the coarsen context

310:    Level: beginner

312: .seealso: `MatCoarsen`, `MatCoarsenApply()`, `MatCoarsenGetData()`, `MatCoarsenSetType()`, `MatCoarsenType`
313: M*/

315: PETSC_EXTERN PetscErrorCode MatCoarsenCreate_MIS(MatCoarsen coarse)
316: {
317:   PetscFunctionBegin;
318:   coarse->ops->apply = MatCoarsenApply_MIS;
319:   coarse->ops->view  = MatCoarsenView_MIS;
320:   PetscFunctionReturn(PETSC_SUCCESS);
321: }