Actual source code: pmetis.c

petsc-3.3-p7 2013-05-11
  1: 
  2: #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/

  4: /* 
  5:    Currently using ParMetis-4.0.2
  6: */

  8: #include <parmetis.h>

 10: /*
 11:       The first 5 elements of this structure are the input control array to Metis
 12: */
 13: typedef struct {
 14:   PetscInt cuts;         /* number of cuts made (output) */
 15:   PetscInt foldfactor;
 16:   PetscInt parallel;     /* use parallel partitioner for coarse problem */
 17:   PetscInt indexing;     /* 0 indicates C indexing, 1 Fortran */
 18:   PetscInt printout;     /* indicates if one wishes Metis to print info */
 19: } MatPartitioning_Parmetis;

 21: #define CHKERRQPARMETIS(n) \
 22:   if (n == METIS_ERROR_INPUT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"ParMETIS error due to wrong inputs and/or options"); \
 23:   else if (n == METIS_ERROR_MEMORY) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"ParMETIS error due to insufficient memory"); \
 24:   else if (n == METIS_ERROR) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"ParMETIS general error"); \

 26: /*
 27:    Uses the ParMETIS parallel matrix partitioner to partition the matrix in parallel
 28: */
 31: static PetscErrorCode MatPartitioningApply_Parmetis(MatPartitioning part,IS *partitioning)
 32: {
 33:   MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis*)part->data;
 34:   PetscErrorCode           ierr;
 35:   PetscInt                 *locals = PETSC_NULL;
 36:   Mat                      mat = part->adj,amat,pmat;
 37:   PetscBool                flg;
 38:   PetscInt                 bs = 1;

 41:   PetscObjectTypeCompare((PetscObject)mat,MATMPIADJ,&flg);
 42:   if (flg) {
 43:     amat = mat;
 44:     PetscObjectReference((PetscObject)amat);
 45:   } else {
 46:     /* bs indicates if the converted matrix is "reduced" from the original and hence the 
 47:        resulting partition results need to be stretched to match the original matrix */
 48:     MatConvert(mat,MATMPIADJ,MAT_INITIAL_MATRIX,&amat);
 49:     if (mat->rmap->n > 0) bs = amat->rmap->n/mat->rmap->n;
 50:   }
 51:   MatMPIAdjCreateNonemptySubcommMat(amat,&pmat);
 52:   MPI_Barrier(((PetscObject)part)->comm);

 54:   if (pmat) {
 55:     MPI_Comm   pcomm    = ((PetscObject)pmat)->comm,comm_pmetis;
 56:     Mat_MPIAdj *adj     = (Mat_MPIAdj*)pmat->data;
 57:     PetscInt   *vtxdist = pmat->rmap->range;
 58:     PetscInt   *xadj    = adj->i;
 59:     PetscInt   *adjncy  = adj->j;
 60:     PetscInt   itmp     = 0,wgtflag=0, numflag=0, ncon=1, nparts=part->n, options[24], i, j;
 61:     real_t     *tpwgts,*ubvec;
 62:     int        status;

 64: #if defined(PETSC_USE_DEBUG)
 65:     /* check that matrix has no diagonal entries */
 66:     {
 67:       PetscInt rstart;
 68:       MatGetOwnershipRange(mat,&rstart,PETSC_NULL);
 69:       for (i=0; i<mat->rmap->n; i++) {
 70:         for (j=xadj[i]; j<xadj[i+1]; j++) {
 71:           if (adjncy[j] == i+rstart) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Row %d has diagonal entry; Parmetis forbids diagonal entry",i+rstart);
 72:         }
 73:       }
 74:     }
 75: #endif

 77:     PetscMalloc(amat->rmap->n*sizeof(PetscInt),&locals);

 79:     if (PetscLogPrintInfo) {itmp = parmetis->printout; parmetis->printout = 127;}
 80:     PetscMalloc(ncon*nparts*sizeof(real_t),&tpwgts);
 81:     for (i=0; i<ncon; i++) {
 82:       for (j=0; j<nparts; j++) {
 83:         if (part->part_weights) {
 84:           tpwgts[i*nparts+j] = part->part_weights[i*nparts+j];
 85:         } else {
 86:           tpwgts[i*nparts+j] = 1./nparts;
 87:         }
 88:       }
 89:     }
 90:     PetscMalloc(ncon*sizeof(real_t),&ubvec);
 91:     for (i=0; i<ncon; i++) {
 92:       ubvec[i] = 1.05;
 93:     }
 94:     /* This sets the defaults */
 95:     options[0] = 0;
 96:     for (i=1; i<24; i++) {
 97:       options[i] = -1;
 98:     }
 99:     /* Duplicate the communicator to be sure that ParMETIS attribute caching does not interfere with PETSc. */
100:     MPI_Comm_dup(pcomm,&comm_pmetis);
101:     status = ParMETIS_V3_PartKway(vtxdist,xadj,adjncy,part->vertex_weights,adj->values,&wgtflag,&numflag,&ncon,&nparts,tpwgts,ubvec,options,&parmetis->cuts,locals,&comm_pmetis);CHKERRQPARMETIS(status);
102:     MPI_Comm_free(&comm_pmetis);

104:     PetscFree(tpwgts);
105:     PetscFree(ubvec);
106:     if (PetscLogPrintInfo) {parmetis->printout = itmp;}
107:   }

109:   if (bs > 1) {
110:     PetscInt i,j,*newlocals;
111:     PetscMalloc(bs*amat->rmap->n*sizeof(PetscInt),&newlocals);
112:     for (i=0; i<amat->rmap->n; i++) {
113:       for (j=0; j<bs; j++) {
114:         newlocals[bs*i + j] = locals[i];
115:       }
116:     }
117:     PetscFree(locals);
118:     ISCreateGeneral(((PetscObject)part)->comm,bs*amat->rmap->n,newlocals,PETSC_OWN_POINTER,partitioning);
119:   } else {
120:     ISCreateGeneral(((PetscObject)part)->comm,amat->rmap->n,locals,PETSC_OWN_POINTER,partitioning);
121:   }

123:   MatDestroy(&pmat);
124:   MatDestroy(&amat);
125:   return(0);
126: }

130: PetscErrorCode MatPartitioningView_Parmetis(MatPartitioning part,PetscViewer viewer)
131: {
132:   MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis *)part->data;
134:   int rank;
135:   PetscBool                iascii;

138:   MPI_Comm_rank(((PetscObject)part)->comm,&rank);
139:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
140:   if (iascii) {
141:     if (parmetis->parallel == 2) {
142:       PetscViewerASCIIPrintf(viewer,"  Using parallel coarse grid partitioner\n");
143:     } else {
144:       PetscViewerASCIIPrintf(viewer,"  Using sequential coarse grid partitioner\n");
145:     }
146:     PetscViewerASCIIPrintf(viewer,"  Using %d fold factor\n",parmetis->foldfactor);
147:     PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);
148:     PetscViewerASCIISynchronizedPrintf(viewer,"  [%d]Number of cuts found %d\n",rank,parmetis->cuts);
149:     PetscViewerFlush(viewer);
150:     PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);
151:   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for this Parmetis partitioner",((PetscObject)viewer)->type_name);

153:   return(0);
154: }

158: /*@
159:      MatPartitioningParmetisSetCoarseSequential - Use the sequential code to 
160:          do the partitioning of the coarse grid.

162:   Logically Collective on MatPartitioning

164:   Input Parameter:
165: .  part - the partitioning context

167:    Level: advanced

169: @*/
170: PetscErrorCode  MatPartitioningParmetisSetCoarseSequential(MatPartitioning part)
171: {
172:   MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis *)part->data;

175:   parmetis->parallel = 1;
176:   return(0);
177: }

181: /*@
182:   MatPartitioningParmetisGetEdgeCut - Returns the number of edge cuts in the vertex partition.

184:   Input Parameter:
185: . part - the partitioning context

187:   Output Parameter:
188: . cut - the edge cut

190:    Level: advanced

192: @*/
193: PetscErrorCode  MatPartitioningParmetisGetEdgeCut(MatPartitioning part, PetscInt *cut)
194: {
195:   MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis *) part->data;

198:   *cut = parmetis->cuts;
199:   return(0);
200: }

204: PetscErrorCode MatPartitioningSetFromOptions_Parmetis(MatPartitioning part)
205: {
207:   PetscBool      flag = PETSC_FALSE;

210:   PetscOptionsHead("Set ParMeTiS partitioning options");
211:   PetscOptionsBool("-mat_partitioning_parmetis_coarse_sequential","Use sequential coarse partitioner","MatPartitioningParmetisSetCoarseSequential",flag,&flag,PETSC_NULL);
212:     if (flag) {
213:       MatPartitioningParmetisSetCoarseSequential(part);
214:     }
215:   PetscOptionsTail();
216:   return(0);
217: }


222: PetscErrorCode MatPartitioningDestroy_Parmetis(MatPartitioning part)
223: {
224:   MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis *)part->data;

228:   PetscFree(parmetis);
229:   return(0);
230: }


233: /*MC
234:    MATPARTITIONINGPARMETIS - Creates a partitioning context via the external package PARMETIS.

236:    Collective on MPI_Comm

238:    Input Parameter:
239: .  part - the partitioning context

241:    Options Database Keys:
242: +  -mat_partitioning_parmetis_coarse_sequential - use sequential PARMETIS coarse partitioner

244:    Level: beginner

246:    Notes: See http://www-users.cs.umn.edu/~karypis/metis/

248: .keywords: Partitioning, create, context

250: .seealso: MatPartitioningSetType(), MatPartitioningType

252: M*/

254: EXTERN_C_BEGIN
257: PetscErrorCode  MatPartitioningCreate_Parmetis(MatPartitioning part)
258: {
260:   MatPartitioning_Parmetis *parmetis;

263:   PetscNewLog(part,MatPartitioning_Parmetis,&parmetis);
264:   part->data                = (void*)parmetis;

266:   parmetis->cuts       = 0;   /* output variable */
267:   parmetis->foldfactor = 150; /*folding factor */
268:   parmetis->parallel   = 2;   /* use parallel partitioner for coarse grid */
269:   parmetis->indexing   = 0;   /* index numbering starts from 0 */
270:   parmetis->printout   = 0;   /* print no output while running */

272:   part->ops->apply          = MatPartitioningApply_Parmetis;
273:   part->ops->view           = MatPartitioningView_Parmetis;
274:   part->ops->destroy        = MatPartitioningDestroy_Parmetis;
275:   part->ops->setfromoptions = MatPartitioningSetFromOptions_Parmetis;
276:   return(0);
277: }
278: EXTERN_C_END

282: /*@
283:  MatMeshToVertexGraph -   This routine does not exist because ParMETIS does not provide the functionality.  Uses the ParMETIS package to
284:                        convert a Mat that represents a mesh to a Mat the represents the graph of the coupling 
285:                        between vertices of the cells and is suitable for partitioning with the MatPartitioning object. Use this to partition
286:                        vertices of a mesh. More likely you should use MatMeshToCellGraph()

288:    Collective on Mat

290:    Input Parameter:
291: +     mesh - the graph that represents the mesh
292: -     ncommonnodes - mesh elements that share this number of common nodes are considered neighbors, use 2 for triangules and 
293:                      quadralaterials, 3 for tetrahedrals and 4 for hexahedrals

295:    Output Parameter:
296: .     dual - the dual graph

298:    Notes:
299:      Currently requires ParMetis to be installed and uses ParMETIS_V3_Mesh2Dual()

301:      The columns of each row of the Mat mesh are the global vertex numbers of the vertices of that rows cell. The number of rows in mesh is 
302:      number of cells, the number of columns is the number of vertices.

304:    Level: advanced

306: .seealso: MatMeshToCellGraph(), MatCreateMPIAdj(), MatPartitioningCreate()
307:    
308: @*/
309: PetscErrorCode MatMeshToVertexGraph(Mat mesh,PetscInt ncommonnodes,Mat *dual)
310: {
312:   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"ParMETIS does not provide this functionality");
313:   return(0);
314: }

318: /*@
319:      MatMeshToCellGraph -   Uses the ParMETIS package to convert a Mat that represents a mesh to a Mat the represents the graph of the coupling 
320:                        between cells (the "dual" graph) and is suitable for partitioning with the MatPartitioning object. Use this to partition
321:                        cells of a mesh.

323:    Collective on Mat

325:    Input Parameter:
326: +     mesh - the graph that represents the mesh
327: -     ncommonnodes - mesh elements that share this number of common nodes are considered neighbors, use 2 for triangules and 
328:                      quadralaterials, 3 for tetrahedrals and 4 for hexahedrals

330:    Output Parameter:
331: .     dual - the dual graph

333:    Notes:
334:      Currently requires ParMetis to be installed and uses ParMETIS_V3_Mesh2Dual()

336:      The columns of each row of the Mat mesh are the global vertex numbers of the vertices of that rows cell. The number of rows in mesh is 
337:      number of cells, the number of columns is the number of vertices.
338:    

340:    Level: advanced

342: .seealso: MatMeshToVertexGraph(), MatCreateMPIAdj(), MatPartitioningCreate()


345: @*/
346: PetscErrorCode MatMeshToCellGraph(Mat mesh,PetscInt ncommonnodes,Mat *dual)
347: {
348:   PetscErrorCode           ierr;
349:   PetscInt                 *newxadj,*newadjncy;
350:   PetscInt                 numflag=0;
351:   Mat_MPIAdj               *adj = (Mat_MPIAdj *)mesh->data,*newadj;
352:   PetscBool                flg;
353:   int                      status;

356:   PetscObjectTypeCompare((PetscObject)mesh,MATMPIADJ,&flg);
357:   if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Must use MPIAdj matrix type");

359:   CHKMEMQ;
360:   status = ParMETIS_V3_Mesh2Dual(mesh->rmap->range,adj->i,adj->j,&numflag,&ncommonnodes,&newxadj,&newadjncy,&((PetscObject)mesh)->comm);CHKERRQPARMETIS(status);
361:   CHKMEMQ;
362:   MatCreateMPIAdj(((PetscObject)mesh)->comm,mesh->rmap->n,mesh->rmap->N,newxadj,newadjncy,PETSC_NULL,dual);
363:   newadj = (Mat_MPIAdj *)(*dual)->data;
364:   newadj->freeaijwithfree = PETSC_TRUE; /* signal the matrix should be freed with system free since space was allocated by ParMETIS */
365:   return(0);
366: }