Actual source code: partparmetis.c
1: #include <petsc/private/partitionerimpl.h>
3: #if defined(PETSC_HAVE_PARMETIS)
4: #include <parmetis.h>
5: #endif
7: PetscBool ParMetisPartitionerCite = PETSC_FALSE;
8: const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
9: " author = {George Karypis and Vipin Kumar},\n"
10: " title = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
11: " journal = {Journal of Parallel and Distributed Computing},\n"
12: " volume = {48},\n"
13: " pages = {71--85},\n"
14: " year = {1998}\n"
15: " doi = {https://doi.org/10.1006/jpdc.1997.1403}\n"
16: "}\n";
18: typedef struct {
19: MPI_Comm pcomm;
20: PetscInt ptype;
21: PetscReal imbalanceRatio;
22: PetscInt debugFlag;
23: PetscInt randomSeed;
24: } PetscPartitioner_ParMetis;
26: static const char *ptypes[] = {"kway", "rb"};
28: static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
29: {
30: PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *)part->data;
32: PetscFunctionBegin;
33: PetscCallMPI(MPI_Comm_free(&p->pcomm));
34: PetscCall(PetscFree(part->data));
35: PetscFunctionReturn(PETSC_SUCCESS);
36: }
38: static PetscErrorCode PetscPartitionerView_ParMetis_ASCII(PetscPartitioner part, PetscViewer viewer)
39: {
40: PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *)part->data;
42: PetscFunctionBegin;
43: PetscCall(PetscViewerASCIIPushTab(viewer));
44: PetscCall(PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]));
45: PetscCall(PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double)p->imbalanceRatio));
46: PetscCall(PetscViewerASCIIPrintf(viewer, "debug flag %" PetscInt_FMT "\n", p->debugFlag));
47: PetscCall(PetscViewerASCIIPrintf(viewer, "random seed %" PetscInt_FMT "\n", p->randomSeed));
48: PetscCall(PetscViewerASCIIPopTab(viewer));
49: PetscFunctionReturn(PETSC_SUCCESS);
50: }
52: static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
53: {
54: PetscBool iascii;
56: PetscFunctionBegin;
59: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
60: if (iascii) PetscCall(PetscPartitionerView_ParMetis_ASCII(part, viewer));
61: PetscFunctionReturn(PETSC_SUCCESS);
62: }
64: static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscPartitioner part, PetscOptionItems *PetscOptionsObject)
65: {
66: PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *)part->data;
68: PetscFunctionBegin;
69: PetscOptionsHeadBegin(PetscOptionsObject, "PetscPartitioner ParMetis Options");
70: PetscCall(PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL));
71: PetscCall(PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL));
72: PetscCall(PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL));
73: PetscCall(PetscOptionsInt("-petscpartitioner_parmetis_seed", "Random seed", "", p->randomSeed, &p->randomSeed, NULL));
74: PetscOptionsHeadEnd();
75: PetscFunctionReturn(PETSC_SUCCESS);
76: }
78: static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
79: {
80: #if defined(PETSC_HAVE_PARMETIS)
81: PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *)part->data;
82: MPI_Comm comm;
83: PetscInt nvtxs = numVertices; /* The number of vertices in full graph */
84: PetscInt *vtxdist; /* Distribution of vertices across processes */
85: PetscInt *xadj = start; /* Start of edge list for each vertex */
86: PetscInt *adjncy = adjacency; /* Edge lists for all vertices */
87: PetscInt *vwgt = NULL; /* Vertex weights */
88: PetscInt *adjwgt = NULL; /* Edge weights */
89: PetscInt wgtflag = 0; /* Indicates which weights are present */
90: PetscInt numflag = 0; /* Indicates initial offset (0 or 1) */
91: PetscInt ncon = 1; /* The number of weights per vertex */
92: PetscInt metis_ptype = pm->ptype; /* kway or recursive bisection */
93: real_t *tpwgts; /* The fraction of vertex weights assigned to each partition */
94: real_t *ubvec; /* The balance intolerance for vertex weights */
95: PetscInt options[64]; /* Options */
96: PetscInt v, i, *assignment, *points;
97: PetscMPIInt p, size, rank;
98: PetscBool hasempty = PETSC_FALSE;
100: PetscFunctionBegin;
101: PetscCall(PetscObjectGetComm((PetscObject)part, &comm));
102: PetscCallMPI(MPI_Comm_size(comm, &size));
103: PetscCallMPI(MPI_Comm_rank(comm, &rank));
104: /* Calculate vertex distribution */
105: PetscCall(PetscMalloc4(size + 1, &vtxdist, nparts * ncon, &tpwgts, ncon, &ubvec, nvtxs, &assignment));
106: vtxdist[0] = 0;
107: PetscCallMPI(MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm));
108: for (p = 2; p <= size; ++p) {
109: hasempty = (PetscBool)(hasempty || !vtxdist[p - 1] || !vtxdist[p]);
110: vtxdist[p] += vtxdist[p - 1];
111: }
112: /* null graph */
113: if (vtxdist[size] == 0) {
114: PetscCall(PetscFree4(vtxdist, tpwgts, ubvec, assignment));
115: PetscCall(ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition));
116: PetscFunctionReturn(PETSC_SUCCESS);
117: }
118: /* Calculate partition weights */
119: if (targetSection) {
120: PetscInt p;
121: real_t sumt = 0.0;
123: for (p = 0; p < nparts; ++p) {
124: PetscInt tpd;
126: PetscCall(PetscSectionGetDof(targetSection, p, &tpd));
127: sumt += tpd;
128: tpwgts[p] = tpd;
129: }
130: if (sumt) { /* METIS/ParMETIS do not like exactly zero weight */
131: for (p = 0, sumt = 0.0; p < nparts; ++p) {
132: tpwgts[p] = PetscMax(tpwgts[p], PETSC_SMALL);
133: sumt += tpwgts[p];
134: }
135: for (p = 0; p < nparts; ++p) tpwgts[p] /= sumt;
136: for (p = 0, sumt = 0.0; p < nparts - 1; ++p) sumt += tpwgts[p];
137: tpwgts[nparts - 1] = 1. - sumt;
138: }
139: } else {
140: for (p = 0; p < nparts; ++p) tpwgts[p] = 1.0 / nparts;
141: }
142: ubvec[0] = pm->imbalanceRatio;
144: /* Weight cells */
145: if (vertSection) {
146: PetscCall(PetscMalloc1(nvtxs, &vwgt));
147: for (v = 0; v < nvtxs; ++v) PetscCall(PetscSectionGetDof(vertSection, v, &vwgt[v]));
148: wgtflag |= 2; /* have weights on graph vertices */
149: }
151: for (p = 0; !vtxdist[p + 1] && p < size; ++p)
152: ;
153: if (vtxdist[p + 1] == vtxdist[size]) {
154: if (rank == p) {
155: int err;
156: err = METIS_SetDefaultOptions(options); /* initialize all defaults */
157: options[METIS_OPTION_DBGLVL] = pm->debugFlag;
158: options[METIS_OPTION_SEED] = pm->randomSeed;
159: PetscCheck(err == METIS_OK, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
160: if (metis_ptype == 1) {
161: PetscStackPushExternal("METIS_PartGraphRecursive");
162: err = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
163: PetscStackPop;
164: PetscCheck(err == METIS_OK, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
165: } else {
166: /*
167: It would be nice to activate the two options below, but they would need some actual testing.
168: - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
169: - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
170: */
171: /* options[METIS_OPTION_CONTIG] = 1; */ /* try to produce partitions that are contiguous */
172: /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
173: PetscStackPushExternal("METIS_PartGraphKway");
174: err = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
175: PetscStackPop;
176: PetscCheck(err == METIS_OK, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
177: }
178: }
179: } else {
180: MPI_Comm pcomm = pm->pcomm;
182: options[0] = 1; /*use options */
183: options[1] = pm->debugFlag;
184: options[2] = (pm->randomSeed == -1) ? 15 : pm->randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */
186: if (hasempty) { /* parmetis does not support empty graphs on some of the processes */
187: PetscInt cnt;
189: PetscCallMPI(MPI_Comm_split(pm->pcomm, !!nvtxs, rank, &pcomm));
190: for (p = 0, cnt = 0; p < size; p++) {
191: if (vtxdist[p + 1] != vtxdist[p]) {
192: vtxdist[cnt + 1] = vtxdist[p + 1];
193: cnt++;
194: }
195: }
196: }
197: if (nvtxs) {
198: int err;
199: PetscStackPushExternal("ParMETIS_V3_PartKway");
200: err = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &pcomm);
201: PetscStackPop;
202: PetscCheck(err == METIS_OK, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", err);
203: }
204: if (hasempty) PetscCallMPI(MPI_Comm_free(&pcomm));
205: }
207: /* Convert to PetscSection+IS */
208: for (v = 0; v < nvtxs; ++v) PetscCall(PetscSectionAddDof(partSection, assignment[v], 1));
209: PetscCall(PetscMalloc1(nvtxs, &points));
210: for (p = 0, i = 0; p < nparts; ++p) {
211: for (v = 0; v < nvtxs; ++v) {
212: if (assignment[v] == p) points[i++] = v;
213: }
214: }
215: PetscCheck(i == nvtxs, comm, PETSC_ERR_PLIB, "Number of points %" PetscInt_FMT " should be %" PetscInt_FMT, i, nvtxs);
216: PetscCall(ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition));
217: PetscCall(PetscFree4(vtxdist, tpwgts, ubvec, assignment));
218: PetscCall(PetscFree(vwgt));
219: PetscFunctionReturn(PETSC_SUCCESS);
220: #else
221: SETERRQ(PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
222: #endif
223: }
225: static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
226: {
227: PetscFunctionBegin;
228: part->noGraph = PETSC_FALSE;
229: part->ops->view = PetscPartitionerView_ParMetis;
230: part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
231: part->ops->destroy = PetscPartitionerDestroy_ParMetis;
232: part->ops->partition = PetscPartitionerPartition_ParMetis;
233: PetscFunctionReturn(PETSC_SUCCESS);
234: }
236: /*MC
237: PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMETIS library
239: Level: intermediate
241: Options Database Keys:
242: + -petscpartitioner_parmetis_type <string> - ParMETIS partitioning type. Either "kway" or "rb" (recursive bisection)
243: . -petscpartitioner_parmetis_imbalance_ratio <value> - Load imbalance ratio limit
244: . -petscpartitioner_parmetis_debug <int> - Debugging flag passed to ParMETIS/METIS routines
245: - -petscpartitioner_parmetis_seed <int> - Random seed
247: Notes: when the graph is on a single process, this partitioner actually calls METIS and not ParMETIS
249: .seealso: `PetscPartitionerType`, `PetscPartitionerCreate()`, `PetscPartitionerSetType()`
250: M*/
252: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
253: {
254: PetscPartitioner_ParMetis *p;
256: PetscFunctionBegin;
258: PetscCall(PetscNew(&p));
259: part->data = p;
261: PetscCallMPI(MPI_Comm_dup(PetscObjectComm((PetscObject)part), &p->pcomm));
262: p->ptype = 0;
263: p->imbalanceRatio = 1.05;
264: p->debugFlag = 0;
265: p->randomSeed = -1; /* defaults to GLOBAL_SEED=15 from `libparmetis/defs.h` */
267: PetscCall(PetscPartitionerInitialize_ParMetis(part));
268: PetscCall(PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionerCite));
269: PetscFunctionReturn(PETSC_SUCCESS);
270: }