Actual source code: partparmetis.c

  1: #include <petsc/private/partitionerimpl.h>

  3: #if defined(PETSC_HAVE_PARMETIS)
  4: #include <parmetis.h>
  5: #endif

  7: PetscBool  ParMetisPartitionerCite = PETSC_FALSE;
  8: const char ParMetisPartitionerCitation[] =
  9:   "@article{KarypisKumar98,\n"
 10:   "  author  = {George Karypis and Vipin Kumar},\n"
 11:   "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
 12:   "  journal = {Journal of Parallel and Distributed Computing},\n"
 13:   "  volume  = {48},\n"
 14:   "  pages   = {71--85},\n"
 15:   "  year    = {1998}\n"
 16:   "  doi     = {https://doi.org/10.1006/jpdc.1997.1403}\n"
 17:   "}\n";

 19: typedef struct {
 20:   MPI_Comm  pcomm;
 21:   PetscInt  ptype;
 22:   PetscReal imbalanceRatio;
 23:   PetscInt  debugFlag;
 24:   PetscInt  randomSeed;
 25: } PetscPartitioner_ParMetis;

 27: static const char *ptypes[] = {"kway", "rb"};

 29: static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
 30: {
 31:   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;

 33:   MPI_Comm_free(&p->pcomm);
 34:   PetscFree(part->data);
 35:   return 0;
 36: }

 38: static PetscErrorCode PetscPartitionerView_ParMetis_ASCII(PetscPartitioner part, PetscViewer viewer)
 39: {
 40:   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;

 42:   PetscViewerASCIIPushTab(viewer);
 43:   PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);
 44:   PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);
 45:   PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);
 46:   PetscViewerASCIIPrintf(viewer, "random seed %D\n", p->randomSeed);
 47:   PetscViewerASCIIPopTab(viewer);
 48:   return 0;
 49: }

 51: static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
 52: {
 53:   PetscBool      iascii;

 57:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
 58:   if (iascii) PetscPartitionerView_ParMetis_ASCII(part, viewer);
 59:   return 0;
 60: }

 62: static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
 63: {
 64:   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;

 66:   PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");
 67:   PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);
 68:   PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);
 69:   PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);
 70:   PetscOptionsInt("-petscpartitioner_parmetis_seed", "Random seed", "", p->randomSeed, &p->randomSeed, NULL);
 71:   PetscOptionsTail();
 72:   return 0;
 73: }

 75: static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
 76: {
 77: #if defined(PETSC_HAVE_PARMETIS)
 78:   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
 79:   MPI_Comm       comm;
 80:   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
 81:   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
 82:   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
 83:   PetscInt      *adjncy      = adjacency;   /* Edge lists for all vertices */
 84:   PetscInt      *vwgt        = NULL;        /* Vertex weights */
 85:   PetscInt      *adjwgt      = NULL;        /* Edge weights */
 86:   PetscInt       wgtflag     = 0;           /* Indicates which weights are present */
 87:   PetscInt       numflag     = 0;           /* Indicates initial offset (0 or 1) */
 88:   PetscInt       ncon        = 1;           /* The number of weights per vertex */
 89:   PetscInt       metis_ptype = pm->ptype;   /* kway or recursive bisection */
 90:   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
 91:   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
 92:   PetscInt       options[64];               /* Options */
 93:   PetscInt       v, i, *assignment, *points;
 94:   PetscMPIInt    p, size, rank;
 95:   PetscBool      hasempty = PETSC_FALSE;

 97:   PetscObjectGetComm((PetscObject) part, &comm);
 98:   MPI_Comm_size(comm, &size);
 99:   MPI_Comm_rank(comm, &rank);
100:   /* Calculate vertex distribution */
101:   PetscMalloc4(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);
102:   vtxdist[0] = 0;
103:   MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);
104:   for (p = 2; p <= size; ++p) {
105:     hasempty = (PetscBool)(hasempty || !vtxdist[p-1] || !vtxdist[p]);
106:     vtxdist[p] += vtxdist[p-1];
107:   }
108:   /* null graph */
109:   if (vtxdist[size] == 0) {
110:     PetscFree4(vtxdist,tpwgts,ubvec,assignment);
111:     ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);
112:     return 0;
113:   }
114:   /* Calculate partition weights */
115:   if (targetSection) {
116:     PetscInt p;
117:     real_t   sumt = 0.0;

119:     for (p = 0; p < nparts; ++p) {
120:       PetscInt tpd;

122:       PetscSectionGetDof(targetSection,p,&tpd);
123:       sumt += tpd;
124:       tpwgts[p] = tpd;
125:     }
126:     if (sumt) { /* METIS/ParMETIS do not like exactly zero weight */
127:       for (p = 0, sumt = 0.0; p < nparts; ++p) {
128:         tpwgts[p] = PetscMax(tpwgts[p],PETSC_SMALL);
129:         sumt += tpwgts[p];
130:       }
131:       for (p = 0; p < nparts; ++p) tpwgts[p] /= sumt;
132:       for (p = 0, sumt = 0.0; p < nparts-1; ++p) sumt += tpwgts[p];
133:       tpwgts[nparts - 1] = 1. - sumt;
134:     }
135:   } else {
136:     for (p = 0; p < nparts; ++p) tpwgts[p] = 1.0/nparts;
137:   }
138:   ubvec[0] = pm->imbalanceRatio;

140:   /* Weight cells */
141:   if (vertSection) {
142:     PetscMalloc1(nvtxs,&vwgt);
143:     for (v = 0; v < nvtxs; ++v) {
144:       PetscSectionGetDof(vertSection, v, &vwgt[v]);
145:     }
146:     wgtflag |= 2; /* have weights on graph vertices */
147:   }

149:   for (p = 0; !vtxdist[p+1] && p < size; ++p);
150:   if (vtxdist[p+1] == vtxdist[size]) {
151:     if (rank == p) {
152:       int err;
153:       err = METIS_SetDefaultOptions(options); /* initialize all defaults */
154:       options[METIS_OPTION_DBGLVL] = pm->debugFlag;
155:       options[METIS_OPTION_SEED]   = pm->randomSeed;
157:       if (metis_ptype == 1) {
158:         PetscStackPush("METIS_PartGraphRecursive");
159:         err = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
160:         PetscStackPop;
162:       } else {
163:         /*
164:          It would be nice to activate the two options below, but they would need some actual testing.
165:          - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
166:          - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
167:         */
168:         /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
169:         /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
170:         PetscStackPush("METIS_PartGraphKway");
171:         err = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
172:         PetscStackPop;
174:       }
175:     }
176:   } else {
177:     MPI_Comm pcomm = pm->pcomm;

179:     options[0] = 1; /*use options */
180:     options[1] = pm->debugFlag;
181:     options[2] = (pm->randomSeed == -1) ? 15 : pm->randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */

183:     if (hasempty) { /* parmetis does not support empty graphs on some of the processes */
184:       PetscInt cnt;

186:       MPI_Comm_split(pm->pcomm,!!nvtxs,rank,&pcomm);
187:       for (p=0,cnt=0;p<size;p++) {
188:         if (vtxdist[p+1] != vtxdist[p]) {
189:           vtxdist[cnt+1] = vtxdist[p+1];
190:           cnt++;
191:         }
192:       }
193:     }
194:     if (nvtxs) {
195:       int err;
196:       PetscStackPush("ParMETIS_V3_PartKway");
197:       err = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &pcomm);
198:       PetscStackPop;
200:     }
201:     if (hasempty) {
202:       MPI_Comm_free(&pcomm);
203:     }
204:   }

206:   /* Convert to PetscSection+IS */
207:   for (v = 0; v < nvtxs; ++v) PetscSectionAddDof(partSection, assignment[v], 1);
208:   PetscMalloc1(nvtxs, &points);
209:   for (p = 0, i = 0; p < nparts; ++p) {
210:     for (v = 0; v < nvtxs; ++v) {
211:       if (assignment[v] == p) points[i++] = v;
212:     }
213:   }
215:   ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);
216:   PetscFree4(vtxdist,tpwgts,ubvec,assignment);
217:   PetscFree(vwgt);
218:   return 0;
219: #else
220:   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
221: #endif
222: }

224: static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
225: {
226:   part->noGraph             = PETSC_FALSE;
227:   part->ops->view           = PetscPartitionerView_ParMetis;
228:   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
229:   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
230:   part->ops->partition      = PetscPartitionerPartition_ParMetis;
231:   return 0;
232: }

234: /*MC
235:   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMETIS library

237:   Level: intermediate

239:   Options Database Keys:
240: +  -petscpartitioner_parmetis_type <string> - ParMETIS partitioning type. Either "kway" or "rb" (recursive bisection)
241: .  -petscpartitioner_parmetis_imbalance_ratio <value> - Load imbalance ratio limit
242: .  -petscpartitioner_parmetis_debug <int> - Debugging flag passed to ParMETIS/METIS routines
243: -  -petscpartitioner_parmetis_seed <int> - Random seed

245:   Notes: when the graph is on a single process, this partitioner actually calls METIS and not ParMETIS

247: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
248: M*/

250: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
251: {
252:   PetscPartitioner_ParMetis *p;

255:   PetscNewLog(part, &p);
256:   part->data = p;

258:   MPI_Comm_dup(PetscObjectComm((PetscObject)part),&p->pcomm);
259:   p->ptype          = 0;
260:   p->imbalanceRatio = 1.05;
261:   p->debugFlag      = 0;
262:   p->randomSeed     = -1; /* defaults to GLOBAL_SEED=15 from `libparmetis/defs.h` */

264:   PetscPartitionerInitialize_ParMetis(part);
265:   PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionerCite);
266:   return 0;
267: }