Actual source code: partparmetis.c
1: #include <petsc/private/partitionerimpl.h>
3: #if defined(PETSC_HAVE_PARMETIS)
4: #include <parmetis.h>
5: #endif
7: PetscBool ParMetisPartitionerCite = PETSC_FALSE;
8: const char ParMetisPartitionerCitation[] =
9: "@article{KarypisKumar98,\n"
10: " author = {George Karypis and Vipin Kumar},\n"
11: " title = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
12: " journal = {Journal of Parallel and Distributed Computing},\n"
13: " volume = {48},\n"
14: " pages = {71--85},\n"
15: " year = {1998}\n"
16: " doi = {https://doi.org/10.1006/jpdc.1997.1403}\n"
17: "}\n";
19: typedef struct {
20: MPI_Comm pcomm;
21: PetscInt ptype;
22: PetscReal imbalanceRatio;
23: PetscInt debugFlag;
24: PetscInt randomSeed;
25: } PetscPartitioner_ParMetis;
27: static const char *ptypes[] = {"kway", "rb"};
29: static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
30: {
31: PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
32: PetscErrorCode ierr;
35: MPI_Comm_free(&p->pcomm);
36: PetscFree(part->data);
37: return(0);
38: }
40: static PetscErrorCode PetscPartitionerView_ParMetis_ASCII(PetscPartitioner part, PetscViewer viewer)
41: {
42: PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
43: PetscErrorCode ierr;
46: PetscViewerASCIIPushTab(viewer);
47: PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);
48: PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);
49: PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);
50: PetscViewerASCIIPrintf(viewer, "random seed %D\n", p->randomSeed);
51: PetscViewerASCIIPopTab(viewer);
52: return(0);
53: }
55: static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
56: {
57: PetscBool iascii;
63: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
64: if (iascii) {PetscPartitionerView_ParMetis_ASCII(part, viewer);}
65: return(0);
66: }
68: static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
69: {
70: PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
71: PetscErrorCode ierr;
74: PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");
75: PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);
76: PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);
77: PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);
78: PetscOptionsInt("-petscpartitioner_parmetis_seed", "Random seed", "", p->randomSeed, &p->randomSeed, NULL);
79: PetscOptionsTail();
80: return(0);
81: }
83: static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
84: {
85: #if defined(PETSC_HAVE_PARMETIS)
86: PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
87: MPI_Comm comm;
88: PetscInt nvtxs = numVertices; /* The number of vertices in full graph */
89: PetscInt *vtxdist; /* Distribution of vertices across processes */
90: PetscInt *xadj = start; /* Start of edge list for each vertex */
91: PetscInt *adjncy = adjacency; /* Edge lists for all vertices */
92: PetscInt *vwgt = NULL; /* Vertex weights */
93: PetscInt *adjwgt = NULL; /* Edge weights */
94: PetscInt wgtflag = 0; /* Indicates which weights are present */
95: PetscInt numflag = 0; /* Indicates initial offset (0 or 1) */
96: PetscInt ncon = 1; /* The number of weights per vertex */
97: PetscInt metis_ptype = pm->ptype; /* kway or recursive bisection */
98: real_t *tpwgts; /* The fraction of vertex weights assigned to each partition */
99: real_t *ubvec; /* The balance intolerance for vertex weights */
100: PetscInt options[64]; /* Options */
101: PetscInt v, i, *assignment, *points;
102: PetscMPIInt p, size, rank;
103: PetscBool hasempty = PETSC_FALSE;
107: PetscObjectGetComm((PetscObject) part, &comm);
108: MPI_Comm_size(comm, &size);
109: MPI_Comm_rank(comm, &rank);
110: /* Calculate vertex distribution */
111: PetscMalloc4(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);
112: vtxdist[0] = 0;
113: MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);
114: for (p = 2; p <= size; ++p) {
115: hasempty = (PetscBool)(hasempty || !vtxdist[p-1] || !vtxdist[p]);
116: vtxdist[p] += vtxdist[p-1];
117: }
118: /* null graph */
119: if (vtxdist[size] == 0) {
120: PetscFree4(vtxdist,tpwgts,ubvec,assignment);
121: ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);
122: return(0);
123: }
124: /* Calculate partition weights */
125: if (targetSection) {
126: PetscInt p;
127: real_t sumt = 0.0;
129: for (p = 0; p < nparts; ++p) {
130: PetscInt tpd;
132: PetscSectionGetDof(targetSection,p,&tpd);
133: sumt += tpd;
134: tpwgts[p] = tpd;
135: }
136: if (sumt) { /* METIS/ParMETIS do not like exactly zero weight */
137: for (p = 0, sumt = 0.0; p < nparts; ++p) {
138: tpwgts[p] = PetscMax(tpwgts[p],PETSC_SMALL);
139: sumt += tpwgts[p];
140: }
141: for (p = 0; p < nparts; ++p) tpwgts[p] /= sumt;
142: for (p = 0, sumt = 0.0; p < nparts-1; ++p) sumt += tpwgts[p];
143: tpwgts[nparts - 1] = 1. - sumt;
144: }
145: } else {
146: for (p = 0; p < nparts; ++p) tpwgts[p] = 1.0/nparts;
147: }
148: ubvec[0] = pm->imbalanceRatio;
150: /* Weight cells */
151: if (vertSection) {
152: PetscMalloc1(nvtxs,&vwgt);
153: for (v = 0; v < nvtxs; ++v) {
154: PetscSectionGetDof(vertSection, v, &vwgt[v]);
155: }
156: wgtflag |= 2; /* have weights on graph vertices */
157: }
159: for (p = 0; !vtxdist[p+1] && p < size; ++p);
160: if (vtxdist[p+1] == vtxdist[size]) {
161: if (rank == p) {
162: int err;
163: err = METIS_SetDefaultOptions(options); /* initialize all defaults */
164: options[METIS_OPTION_DBGLVL] = pm->debugFlag;
165: options[METIS_OPTION_SEED] = pm->randomSeed;
166: if (err != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
167: if (metis_ptype == 1) {
168: PetscStackPush("METIS_PartGraphRecursive");
169: err = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
170: PetscStackPop;
171: if (err != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
172: } else {
173: /*
174: It would be nice to activate the two options below, but they would need some actual testing.
175: - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
176: - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
177: */
178: /* options[METIS_OPTION_CONTIG] = 1; */ /* try to produce partitions that are contiguous */
179: /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
180: PetscStackPush("METIS_PartGraphKway");
181: err = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
182: PetscStackPop;
183: if (err != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
184: }
185: }
186: } else {
187: MPI_Comm pcomm = pm->pcomm;
189: options[0] = 1; /*use options */
190: options[1] = pm->debugFlag;
191: options[2] = (pm->randomSeed == -1) ? 15 : pm->randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */
193: if (hasempty) { /* parmetis does not support empty graphs on some of the processes */
194: PetscInt cnt;
196: MPI_Comm_split(pm->pcomm,!!nvtxs,rank,&pcomm);
197: for (p=0,cnt=0;p<size;p++) {
198: if (vtxdist[p+1] != vtxdist[p]) {
199: vtxdist[cnt+1] = vtxdist[p+1];
200: cnt++;
201: }
202: }
203: }
204: if (nvtxs) {
205: int err;
206: PetscStackPush("ParMETIS_V3_PartKway");
207: err = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &pcomm);
208: PetscStackPop;
209: if (err != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", err);
210: }
211: if (hasempty) {
212: MPI_Comm_free(&pcomm);
213: }
214: }
216: /* Convert to PetscSection+IS */
217: for (v = 0; v < nvtxs; ++v) {PetscSectionAddDof(partSection, assignment[v], 1);}
218: PetscMalloc1(nvtxs, &points);
219: for (p = 0, i = 0; p < nparts; ++p) {
220: for (v = 0; v < nvtxs; ++v) {
221: if (assignment[v] == p) points[i++] = v;
222: }
223: }
224: if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
225: ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);
226: PetscFree4(vtxdist,tpwgts,ubvec,assignment);
227: PetscFree(vwgt);
228: return(0);
229: #else
230: SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
231: #endif
232: }
234: static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
235: {
237: part->noGraph = PETSC_FALSE;
238: part->ops->view = PetscPartitionerView_ParMetis;
239: part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
240: part->ops->destroy = PetscPartitionerDestroy_ParMetis;
241: part->ops->partition = PetscPartitionerPartition_ParMetis;
242: return(0);
243: }
245: /*MC
246: PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMETIS library
248: Level: intermediate
250: Options Database Keys:
251: + -petscpartitioner_parmetis_type <string> - ParMETIS partitioning type. Either "kway" or "rb" (recursive bisection)
252: . -petscpartitioner_parmetis_imbalance_ratio <value> - Load imbalance ratio limit
253: . -petscpartitioner_parmetis_debug <int> - Debugging flag passed to ParMETIS/METIS routines
254: - -petscpartitioner_parmetis_seed <int> - Random seed
256: Notes: when the graph is on a single process, this partitioner actually calls METIS and not ParMETIS
258: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
259: M*/
261: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
262: {
263: PetscPartitioner_ParMetis *p;
264: PetscErrorCode ierr;
268: PetscNewLog(part, &p);
269: part->data = p;
271: MPI_Comm_dup(PetscObjectComm((PetscObject)part),&p->pcomm);
272: p->ptype = 0;
273: p->imbalanceRatio = 1.05;
274: p->debugFlag = 0;
275: p->randomSeed = -1; /* defaults to GLOBAL_SEED=15 from `libparmetis/defs.h` */
277: PetscPartitionerInitialize_ParMetis(part);
278: PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionerCite);
279: return(0);
280: }