Actual source code: pmetis.c

petsc-3.9.4 2018-09-11
Report Typos and Errors

  2:  #include <../src/mat/impls/adj/mpi/mpiadj.h>

  4: /*
  5:    Currently using ParMetis-4.0.2
  6: */

  8: #include <parmetis.h>

 10: /*
 11:       The first 5 elements of this structure are the input control array to Metis
 12: */
 13: typedef struct {
 14:   PetscInt  cuts;         /* number of cuts made (output) */
 15:   PetscInt  foldfactor;
 16:   PetscInt  parallel;     /* use parallel partitioner for coarse problem */
 17:   PetscInt  indexing;     /* 0 indicates C indexing, 1 Fortran */
 18:   PetscInt  printout;     /* indicates if one wishes Metis to print info */
 19:   PetscBool repartition;
 20: } MatPartitioning_Parmetis;

 22: #define CHKERRQPARMETIS(n,func)                                             \
 23:   if (n == METIS_ERROR_INPUT) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"ParMETIS error due to wrong inputs and/or options for %s",func); \
 24:   else if (n == METIS_ERROR_MEMORY) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"ParMETIS error due to insufficient memory in %s",func); \
 25:   else if (n == METIS_ERROR) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"ParMETIS general error in %s",func); \

 27: #define PetscStackCallParmetis(func,args) do {PetscStackPush(#func);status = func args;PetscStackPop; CHKERRQPARMETIS(status,#func);} while (0)

 29: /*
 30:    Uses the ParMETIS parallel matrix partitioner to partition the matrix in parallel
 31: */
 32: static PetscErrorCode MatPartitioningApply_Parmetis(MatPartitioning part,IS *partitioning)
 33: {
 34:   MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis*)part->data;
 35:   PetscErrorCode           ierr;
 36:   PetscInt                 *locals = NULL;
 37:   Mat                      mat     = part->adj,amat,pmat;
 38:   PetscBool                flg;
 39:   PetscInt                 bs = 1;

 42:   PetscObjectTypeCompare((PetscObject)mat,MATMPIADJ,&flg);
 43:   if (flg) {
 44:     amat = mat;
 45:     PetscObjectReference((PetscObject)amat);
 46:   } else {
 47:     /* bs indicates if the converted matrix is "reduced" from the original and hence the
 48:        resulting partition results need to be stretched to match the original matrix */
 49:     MatConvert(mat,MATMPIADJ,MAT_INITIAL_MATRIX,&amat);
 50:     if (amat->rmap->n > 0) bs = mat->rmap->n/amat->rmap->n;
 51:   }
 52:   MatMPIAdjCreateNonemptySubcommMat(amat,&pmat);
 53:   MPI_Barrier(PetscObjectComm((PetscObject)part));

 55:   if (pmat) {
 56:     MPI_Comm   pcomm,comm;
 57:     Mat_MPIAdj *adj     = (Mat_MPIAdj*)pmat->data;
 58:     PetscInt   *vtxdist = pmat->rmap->range;
 59:     PetscInt   *xadj    = adj->i;
 60:     PetscInt   *adjncy  = adj->j;
 61:     PetscInt   itmp     = 0,wgtflag=0, numflag=0, ncon=1, nparts=part->n, options[24], i, j;
 62:     real_t     *tpwgts,*ubvec,itr=0.1;
 63:     int        status;

 65:     PetscObjectGetComm((PetscObject)pmat,&pcomm);
 66: #if defined(PETSC_USE_DEBUG)
 67:     /* check that matrix has no diagonal entries */
 68:     {
 69:       PetscInt rstart;
 70:       MatGetOwnershipRange(pmat,&rstart,NULL);
 71:       for (i=0; i<pmat->rmap->n; i++) {
 72:         for (j=xadj[i]; j<xadj[i+1]; j++) {
 73:           if (adjncy[j] == i+rstart) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Row %d has diagonal entry; Parmetis forbids diagonal entry",i+rstart);
 74:         }
 75:       }
 76:     }
 77: #endif

 79:     PetscMalloc1(amat->rmap->n,&locals);

 81:     if (adj->values && !part->vertex_weights)
 82:       wgtflag = 1;
 83:     if (part->vertex_weights && !adj->values)
 84:       wgtflag = 2;
 85:     if (part->vertex_weights && adj->values)
 86:       wgtflag = 3;

 88:     if (PetscLogPrintInfo) {itmp = pmetis->printout; pmetis->printout = 127;}
 89:     PetscMalloc1(ncon*nparts,&tpwgts);
 90:     for (i=0; i<ncon; i++) {
 91:       for (j=0; j<nparts; j++) {
 92:         if (part->part_weights) {
 93:           tpwgts[i*nparts+j] = part->part_weights[i*nparts+j];
 94:         } else {
 95:           tpwgts[i*nparts+j] = 1./nparts;
 96:         }
 97:       }
 98:     }
 99:     PetscMalloc1(ncon,&ubvec);
100:     for (i=0; i<ncon; i++) {
101:       ubvec[i] = 1.05;
102:     }
103:     /* This sets the defaults */
104:     options[0] = 0;
105:     for (i=1; i<24; i++) {
106:       options[i] = -1;
107:     }
108:     /* Duplicate the communicator to be sure that ParMETIS attribute caching does not interfere with PETSc. */
109:     MPI_Comm_dup(pcomm,&comm);
110:     if (pmetis->repartition){
111:       PetscStackCallParmetis(ParMETIS_V3_AdaptiveRepart,((idx_t*)vtxdist,(idx_t*)xadj,(idx_t*)adjncy,(idx_t*)part->vertex_weights,(idx_t*)part->vertex_weights,(idx_t*)adj->values,(idx_t*)&wgtflag,(idx_t*)&numflag,(idx_t*)&ncon,(idx_t*)&nparts,tpwgts,ubvec,&itr,(idx_t*)options,(idx_t*)&pmetis->cuts,(idx_t*)locals,&comm));
112:     } else{
113:       PetscStackCallParmetis(ParMETIS_V3_PartKway,((idx_t*)vtxdist,(idx_t*)xadj,(idx_t*)adjncy,(idx_t*)part->vertex_weights,(idx_t*)adj->values,(idx_t*)&wgtflag,(idx_t*)&numflag,(idx_t*)&ncon,(idx_t*)&nparts,tpwgts,ubvec,(idx_t*)options,(idx_t*)&pmetis->cuts,(idx_t*)locals,&comm));
114:     }
115:     MPI_Comm_free(&comm);

117:     PetscFree(tpwgts);
118:     PetscFree(ubvec);
119:     if (PetscLogPrintInfo) pmetis->printout = itmp;

121:     if (bs > 1) {
122:       PetscInt i,j,*newlocals;
123:       PetscMalloc1(bs*amat->rmap->n,&newlocals);
124:       for (i=0; i<amat->rmap->n; i++) {
125:         for (j=0; j<bs; j++) {
126:           newlocals[bs*i + j] = locals[i];
127:         }
128:       }
129:       PetscFree(locals);
130:       ISCreateGeneral(PetscObjectComm((PetscObject)part),bs*amat->rmap->n,newlocals,PETSC_OWN_POINTER,partitioning);
131:     } else {
132:       ISCreateGeneral(PetscObjectComm((PetscObject)part),amat->rmap->n,locals,PETSC_OWN_POINTER,partitioning);
133:     }
134:   } else {
135:     ISCreateGeneral(PetscObjectComm((PetscObject)part),0,NULL,PETSC_COPY_VALUES,partitioning);
136:   }

138:   MatDestroy(&pmat);
139:   MatDestroy(&amat);
140:   return(0);
141: }

143: PetscErrorCode MatPartitioningView_Parmetis(MatPartitioning part,PetscViewer viewer)
144: {
145:   MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis*)part->data;
146:   PetscErrorCode           ierr;
147:   int                      rank;
148:   PetscBool                iascii;

151:   MPI_Comm_rank(PetscObjectComm((PetscObject)part),&rank);
152:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
153:   if (iascii) {
154:     if (pmetis->parallel == 2) {
155:       PetscViewerASCIIPrintf(viewer,"  Using parallel coarse grid partitioner\n");
156:     } else {
157:       PetscViewerASCIIPrintf(viewer,"  Using sequential coarse grid partitioner\n");
158:     }
159:     PetscViewerASCIIPrintf(viewer,"  Using %d fold factor\n",pmetis->foldfactor);
160:     PetscViewerASCIIPushSynchronized(viewer);
161:     PetscViewerASCIISynchronizedPrintf(viewer,"  [%d]Number of cuts found %d\n",rank,pmetis->cuts);
162:     PetscViewerFlush(viewer);
163:     PetscViewerASCIIPopSynchronized(viewer);
164:   }
165:   return(0);
166: }

168: /*@
169:      MatPartitioningParmetisSetCoarseSequential - Use the sequential code to
170:          do the partitioning of the coarse grid.

172:   Logically Collective on MatPartitioning

174:   Input Parameter:
175: .  part - the partitioning context

177:    Level: advanced

179: @*/
180: PetscErrorCode  MatPartitioningParmetisSetCoarseSequential(MatPartitioning part)
181: {
182:   MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis*)part->data;

185:   pmetis->parallel = 1;
186:   return(0);
187: }

189: /*@
190:      MatPartitioningParmetisSetRepartition - Repartition
191:      current mesh to rebalance computation.

193:   Logically Collective on MatPartitioning

195:   Input Parameter:
196: .  part - the partitioning context

198:    Level: advanced

200: @*/
201: PetscErrorCode  MatPartitioningParmetisSetRepartition(MatPartitioning part)
202: {
203:   MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis*)part->data;

206:   pmetis->repartition = PETSC_TRUE;
207:   return(0);
208: }

210: /*@
211:   MatPartitioningParmetisGetEdgeCut - Returns the number of edge cuts in the vertex partition.

213:   Input Parameter:
214: . part - the partitioning context

216:   Output Parameter:
217: . cut - the edge cut

219:    Level: advanced

221: @*/
222: PetscErrorCode  MatPartitioningParmetisGetEdgeCut(MatPartitioning part, PetscInt *cut)
223: {
224:   MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis*) part->data;

227:   *cut = pmetis->cuts;
228:   return(0);
229: }

231: PetscErrorCode MatPartitioningSetFromOptions_Parmetis(PetscOptionItems *PetscOptionsObject,MatPartitioning part)
232: {
234:   PetscBool      flag = PETSC_FALSE;

237:   PetscOptionsHead(PetscOptionsObject,"Set ParMeTiS partitioning options");
238:   PetscOptionsBool("-mat_partitioning_parmetis_coarse_sequential","Use sequential coarse partitioner","MatPartitioningParmetisSetCoarseSequential",flag,&flag,NULL);
239:   if (flag) {
240:     MatPartitioningParmetisSetCoarseSequential(part);
241:   }
242:   PetscOptionsBool("-mat_partitioning_parmetis_repartition","","MatPartitioningParmetisSetRepartition",flag,&flag,NULL);
243:   if(flag){
244:      MatPartitioningParmetisSetRepartition(part);
245:   }
246:   PetscOptionsTail();
247:   return(0);
248: }


251: PetscErrorCode MatPartitioningDestroy_Parmetis(MatPartitioning part)
252: {
253:   MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis*)part->data;
254:   PetscErrorCode           ierr;

257:   PetscFree(pmetis);
258:   return(0);
259: }


262: /*MC
263:    MATPARTITIONINGPARMETIS - Creates a partitioning context via the external package PARMETIS.

265:    Collective on MPI_Comm

267:    Input Parameter:
268: .  part - the partitioning context

270:    Options Database Keys:
271: +  -mat_partitioning_parmetis_coarse_sequential - use sequential PARMETIS coarse partitioner

273:    Level: beginner

275:    Notes: See http://www-users.cs.umn.edu/~karypis/metis/

277: .keywords: Partitioning, create, context

279: .seealso: MatPartitioningSetType(), MatPartitioningType

281: M*/

283: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Parmetis(MatPartitioning part)
284: {
285:   PetscErrorCode           ierr;
286:   MatPartitioning_Parmetis *pmetis;

289:   PetscNewLog(part,&pmetis);
290:   part->data = (void*)pmetis;

292:   pmetis->cuts       = 0;   /* output variable */
293:   pmetis->foldfactor = 150; /*folding factor */
294:   pmetis->parallel   = 2;   /* use parallel partitioner for coarse grid */
295:   pmetis->indexing   = 0;   /* index numbering starts from 0 */
296:   pmetis->printout   = 0;   /* print no output while running */
297:   pmetis->repartition      = PETSC_FALSE;

299:   part->ops->apply          = MatPartitioningApply_Parmetis;
300:   part->ops->view           = MatPartitioningView_Parmetis;
301:   part->ops->destroy        = MatPartitioningDestroy_Parmetis;
302:   part->ops->setfromoptions = MatPartitioningSetFromOptions_Parmetis;
303:   return(0);
304: }

306: /*@
307:  MatMeshToVertexGraph -   This routine does not exist because ParMETIS does not provide the functionality.  Uses the ParMETIS package to
308:                        convert a Mat that represents a mesh to a Mat the represents the graph of the coupling
309:                        between vertices of the cells and is suitable for partitioning with the MatPartitioning object. Use this to partition
310:                        vertices of a mesh. More likely you should use MatMeshToCellGraph()

312:    Collective on Mat

314:    Input Parameter:
315: +     mesh - the graph that represents the mesh
316: -     ncommonnodes - mesh elements that share this number of common nodes are considered neighbors, use 2 for triangules and
317:                      quadralaterials, 3 for tetrahedrals and 4 for hexahedrals

319:    Output Parameter:
320: .     dual - the dual graph

322:    Notes:
323:      Currently requires ParMetis to be installed and uses ParMETIS_V3_Mesh2Dual()

325:      The columns of each row of the Mat mesh are the global vertex numbers of the vertices of that rows cell. The number of rows in mesh is
326:      number of cells, the number of columns is the number of vertices.

328:    Level: advanced

330: .seealso: MatMeshToCellGraph(), MatCreateMPIAdj(), MatPartitioningCreate()

332: @*/
333: PetscErrorCode MatMeshToVertexGraph(Mat mesh,PetscInt ncommonnodes,Mat *dual)
334: {
336:   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"ParMETIS does not provide this functionality");
337:   return(0);
338: }

340: /*@
341:      MatMeshToCellGraph -   Uses the ParMETIS package to convert a Mat that represents a mesh to a Mat the represents the graph of the coupling
342:                        between cells (the "dual" graph) and is suitable for partitioning with the MatPartitioning object. Use this to partition
343:                        cells of a mesh.

345:    Collective on Mat

347:    Input Parameter:
348: +     mesh - the graph that represents the mesh
349: -     ncommonnodes - mesh elements that share this number of common nodes are considered neighbors, use 2 for triangules and
350:                      quadralaterials, 3 for tetrahedrals and 4 for hexahedrals

352:    Output Parameter:
353: .     dual - the dual graph

355:    Notes:
356:      Currently requires ParMetis to be installed and uses ParMETIS_V3_Mesh2Dual()

358: $     Each row of the mesh object represents a single cell in the mesh. For triangles it has 3 entries, quadralaterials 4 entries,
359: $         tetrahedrals 4 entries and hexahedrals 8 entries. You can mix triangles and quadrilaterals in the same mesh, but cannot
360: $         mix  tetrahedrals and hexahedrals
361: $     The columns of each row of the Mat mesh are the global vertex numbers of the vertices of that row's cell.
362: $     The number of rows in mesh is number of cells, the number of columns is the number of vertices.


365:    Level: advanced

367: .seealso: MatMeshToVertexGraph(), MatCreateMPIAdj(), MatPartitioningCreate()


370: @*/
371: PetscErrorCode MatMeshToCellGraph(Mat mesh,PetscInt ncommonnodes,Mat *dual)
372: {
374:   PetscInt       *newxadj,*newadjncy;
375:   PetscInt       numflag=0;
376:   Mat_MPIAdj     *adj   = (Mat_MPIAdj*)mesh->data,*newadj;
377:   PetscBool      flg;
378:   int            status;
379:   MPI_Comm       comm;

382:   PetscObjectTypeCompare((PetscObject)mesh,MATMPIADJ,&flg);
383:   if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Must use MPIAdj matrix type");
384: 
385:   PetscObjectGetComm((PetscObject)mesh,&comm);
386:   PetscStackCallParmetis(ParMETIS_V3_Mesh2Dual,((idx_t*)mesh->rmap->range,(idx_t*)adj->i,(idx_t*)adj->j,(idx_t*)&numflag,(idx_t*)&ncommonnodes,(idx_t**)&newxadj,(idx_t**)&newadjncy,&comm));
387:   MatCreateMPIAdj(PetscObjectComm((PetscObject)mesh),mesh->rmap->n,mesh->rmap->N,newxadj,newadjncy,NULL,dual);
388:   newadj = (Mat_MPIAdj*)(*dual)->data;

390:   newadj->freeaijwithfree = PETSC_TRUE; /* signal the matrix should be freed with system free since space was allocated by ParMETIS */
391:   return(0);
392: }