Actual source code: partparmetis.c
1: #include <petsc/private/partitionerimpl.h>
3: #if defined(PETSC_HAVE_PARMETIS)
4: #include <parmetis.h>
5: #endif
7: PetscBool ParMetisPartitionerCite = PETSC_FALSE;
8: const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
9: " author = {George Karypis and Vipin Kumar},\n"
10: " title = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
11: " journal = {Journal of Parallel and Distributed Computing},\n"
12: " volume = {48},\n"
13: " pages = {71--85},\n"
14: " year = {1998}\n"
15: " doi = {https://doi.org/10.1006/jpdc.1997.1403}\n"
16: "}\n";
18: typedef struct {
19: MPI_Comm pcomm;
20: PetscInt ptype;
21: PetscReal imbalanceRatio;
22: PetscInt debugFlag;
23: PetscInt randomSeed;
24: } PetscPartitioner_ParMetis;
26: static const char *ptypes[] = {"kway", "rb"};
28: static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
29: {
30: PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *)part->data;
32: PetscFunctionBegin;
33: PetscCallMPI(MPI_Comm_free(&p->pcomm));
34: PetscCall(PetscFree(part->data));
35: PetscFunctionReturn(PETSC_SUCCESS);
36: }
38: static PetscErrorCode PetscPartitionerView_ParMetis_ASCII(PetscPartitioner part, PetscViewer viewer)
39: {
40: PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *)part->data;
42: PetscFunctionBegin;
43: PetscCall(PetscViewerASCIIPushTab(viewer));
44: PetscCall(PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]));
45: PetscCall(PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double)p->imbalanceRatio));
46: PetscCall(PetscViewerASCIIPrintf(viewer, "debug flag %" PetscInt_FMT "\n", p->debugFlag));
47: PetscCall(PetscViewerASCIIPrintf(viewer, "random seed %" PetscInt_FMT "\n", p->randomSeed));
48: PetscCall(PetscViewerASCIIPopTab(viewer));
49: PetscFunctionReturn(PETSC_SUCCESS);
50: }
52: static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
53: {
54: PetscBool iascii;
56: PetscFunctionBegin;
59: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
60: if (iascii) PetscCall(PetscPartitionerView_ParMetis_ASCII(part, viewer));
61: PetscFunctionReturn(PETSC_SUCCESS);
62: }
64: static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscPartitioner part, PetscOptionItems *PetscOptionsObject)
65: {
66: PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *)part->data;
68: PetscFunctionBegin;
69: PetscOptionsHeadBegin(PetscOptionsObject, "PetscPartitioner ParMetis Options");
70: PetscCall(PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL));
71: PetscCall(PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL));
72: PetscCall(PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL));
73: PetscCall(PetscOptionsInt("-petscpartitioner_parmetis_seed", "Random seed", "", p->randomSeed, &p->randomSeed, NULL));
74: PetscOptionsHeadEnd();
75: PetscFunctionReturn(PETSC_SUCCESS);
76: }
78: static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection edgeSection, PetscSection targetSection, PetscSection partSection, IS *partition)
79: {
80: #if defined(PETSC_HAVE_PARMETIS)
81: PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *)part->data;
82: MPI_Comm comm;
83: PetscInt nvtxs = numVertices; /* The number of vertices in full graph */
84: PetscInt *vtxdist; /* Distribution of vertices across processes */
85: PetscInt *xadj = start; /* Start of edge list for each vertex */
86: PetscInt *adjncy = adjacency; /* Edge lists for all vertices */
87: PetscInt *vwgt = NULL; /* Vertex weights */
88: PetscInt *adjwgt = NULL; /* Edge weights */
89: PetscInt wgtflag = 0; /* Indicates which weights are present */
90: PetscInt numflag = 0; /* Indicates initial offset (0 or 1) */
91: PetscInt ncon = 1; /* The number of weights per vertex */
92: PetscInt metis_ptype = pm->ptype; /* kway or recursive bisection */
93: real_t *tpwgts; /* The fraction of vertex weights assigned to each partition */
94: real_t *ubvec; /* The balance intolerance for vertex weights */
95: PetscInt options[64]; /* Options */
96: PetscInt v, i, *assignment, *points;
97: PetscMPIInt p, size, rank;
98: PetscBool hasempty = PETSC_FALSE;
100: PetscFunctionBegin;
101: PetscCall(PetscObjectGetComm((PetscObject)part, &comm));
102: PetscCallMPI(MPI_Comm_size(comm, &size));
103: PetscCallMPI(MPI_Comm_rank(comm, &rank));
104: /* Calculate vertex distribution */
105: PetscCall(PetscMalloc4(size + 1, &vtxdist, nparts * ncon, &tpwgts, ncon, &ubvec, nvtxs, &assignment));
106: vtxdist[0] = 0;
107: PetscCallMPI(MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm));
108: for (p = 2; p <= size; ++p) {
109: hasempty = (PetscBool)(hasempty || !vtxdist[p - 1] || !vtxdist[p]);
110: vtxdist[p] += vtxdist[p - 1];
111: }
112: /* null graph */
113: if (vtxdist[size] == 0) {
114: PetscCall(PetscFree4(vtxdist, tpwgts, ubvec, assignment));
115: PetscCall(ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition));
116: PetscFunctionReturn(PETSC_SUCCESS);
117: }
118: /* Calculate partition weights */
119: if (targetSection) {
120: PetscInt p;
121: real_t sumt = 0.0;
123: for (p = 0; p < nparts; ++p) {
124: PetscInt tpd;
126: PetscCall(PetscSectionGetDof(targetSection, p, &tpd));
127: sumt += tpd;
128: tpwgts[p] = tpd;
129: }
130: if (sumt) { /* METIS/ParMETIS do not like exactly zero weight */
131: for (p = 0, sumt = 0.0; p < nparts; ++p) {
132: tpwgts[p] = PetscMax(tpwgts[p], PETSC_SMALL);
133: sumt += tpwgts[p];
134: }
135: for (p = 0; p < nparts; ++p) tpwgts[p] /= sumt;
136: for (p = 0, sumt = 0.0; p < nparts - 1; ++p) sumt += tpwgts[p];
137: tpwgts[nparts - 1] = 1. - sumt;
138: }
139: } else {
140: for (p = 0; p < nparts; ++p) tpwgts[p] = 1.0 / nparts;
141: }
142: ubvec[0] = pm->imbalanceRatio;
144: /* Weight cells */
145: if (vertSection) {
146: PetscCall(PetscMalloc1(nvtxs, &vwgt));
147: for (v = 0; v < nvtxs; ++v) PetscCall(PetscSectionGetDof(vertSection, v, &vwgt[v]));
148: wgtflag |= 2; /* have weights on graph vertices */
149: }
150: // Weight edges
151: if (edgeSection) {
152: PetscCall(PetscMalloc1(xadj[nvtxs], &adjwgt));
153: for (PetscInt e = 0; e < xadj[nvtxs]; ++e) PetscCall(PetscSectionGetDof(edgeSection, e, &adjwgt[e]));
154: wgtflag |= 1; /* have weights on graph edges */
155: }
157: for (p = 0; !vtxdist[p + 1] && p < size; ++p);
158: if (vtxdist[p + 1] == vtxdist[size]) {
159: if (rank == p) {
160: int err;
161: err = METIS_SetDefaultOptions(options); /* initialize all defaults */
162: options[METIS_OPTION_DBGLVL] = pm->debugFlag;
163: options[METIS_OPTION_SEED] = pm->randomSeed;
164: PetscCheck(err == METIS_OK, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
165: if (metis_ptype == 1) {
166: PetscStackPushExternal("METIS_PartGraphRecursive");
167: err = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
168: PetscStackPop;
169: PetscCheck(err == METIS_OK, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
170: } else {
171: /*
172: It would be nice to activate the two options below, but they would need some actual testing.
173: - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
174: - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
175: */
176: /* options[METIS_OPTION_CONTIG] = 1; */ /* try to produce partitions that are contiguous */
177: /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
178: PetscStackPushExternal("METIS_PartGraphKway");
179: err = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
180: PetscStackPop;
181: PetscCheck(err == METIS_OK, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
182: }
183: }
184: } else {
185: MPI_Comm pcomm = pm->pcomm;
187: options[0] = 1; /*use options */
188: options[1] = pm->debugFlag;
189: options[2] = (pm->randomSeed == -1) ? 15 : pm->randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */
191: if (hasempty) { /* parmetis does not support empty graphs on some of the processes */
192: PetscInt cnt;
194: PetscCallMPI(MPI_Comm_split(pm->pcomm, !!nvtxs, rank, &pcomm));
195: for (p = 0, cnt = 0; p < size; p++) {
196: if (vtxdist[p + 1] != vtxdist[p]) {
197: vtxdist[cnt + 1] = vtxdist[p + 1];
198: cnt++;
199: }
200: }
201: }
202: if (nvtxs) {
203: int err;
204: PetscStackPushExternal("ParMETIS_V3_PartKway");
205: err = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &pcomm);
206: PetscStackPop;
207: PetscCheck(err == METIS_OK, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", err);
208: }
209: if (hasempty) PetscCallMPI(MPI_Comm_free(&pcomm));
210: }
212: /* Convert to PetscSection+IS */
213: for (v = 0; v < nvtxs; ++v) PetscCall(PetscSectionAddDof(partSection, assignment[v], 1));
214: PetscCall(PetscMalloc1(nvtxs, &points));
215: for (p = 0, i = 0; p < nparts; ++p) {
216: for (v = 0; v < nvtxs; ++v) {
217: if (assignment[v] == p) points[i++] = v;
218: }
219: }
220: PetscCheck(i == nvtxs, comm, PETSC_ERR_PLIB, "Number of points %" PetscInt_FMT " should be %" PetscInt_FMT, i, nvtxs);
221: PetscCall(ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition));
222: PetscCall(PetscFree4(vtxdist, tpwgts, ubvec, assignment));
223: PetscCall(PetscFree(vwgt));
224: PetscCall(PetscFree(adjwgt));
225: PetscFunctionReturn(PETSC_SUCCESS);
226: #else
227: SETERRQ(PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
228: #endif
229: }
231: static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
232: {
233: PetscFunctionBegin;
234: part->noGraph = PETSC_FALSE;
235: part->ops->view = PetscPartitionerView_ParMetis;
236: part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
237: part->ops->destroy = PetscPartitionerDestroy_ParMetis;
238: part->ops->partition = PetscPartitionerPartition_ParMetis;
239: PetscFunctionReturn(PETSC_SUCCESS);
240: }
242: /*MC
243: PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMETIS library
245: Level: intermediate
247: Options Database Keys:
248: + -petscpartitioner_parmetis_type <string> - ParMETIS partitioning type. Either "kway" or "rb" (recursive bisection)
249: . -petscpartitioner_parmetis_imbalance_ratio <value> - Load imbalance ratio limit
250: . -petscpartitioner_parmetis_debug <int> - Debugging flag passed to ParMETIS/METIS routines
251: - -petscpartitioner_parmetis_seed <int> - Random seed
253: Notes: when the graph is on a single process, this partitioner actually calls METIS and not ParMETIS
255: .seealso: `PetscPartitionerType`, `PetscPartitionerCreate()`, `PetscPartitionerSetType()`
256: M*/
258: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
259: {
260: PetscPartitioner_ParMetis *p;
262: PetscFunctionBegin;
264: PetscCall(PetscNew(&p));
265: part->data = p;
267: PetscCallMPI(MPI_Comm_dup(PetscObjectComm((PetscObject)part), &p->pcomm));
268: p->ptype = 0;
269: p->imbalanceRatio = 1.05;
270: p->debugFlag = 0;
271: p->randomSeed = -1; /* defaults to GLOBAL_SEED=15 from `libparmetis/defs.h` */
273: PetscCall(PetscPartitionerInitialize_ParMetis(part));
274: PetscCall(PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionerCite));
275: PetscFunctionReturn(PETSC_SUCCESS);
276: }