Actual source code: partptscotch.c
1: #include <petsc/private/partitionerimpl.h>
3: #if defined(PETSC_HAVE_PTSCOTCH)
4: EXTERN_C_BEGIN
5: #include <ptscotch.h>
6: EXTERN_C_END
7: #endif
9: PetscBool PTScotchPartitionerCite = PETSC_FALSE;
10: const char PTScotchPartitionerCitation[] =
11: "@article{PTSCOTCH,\n"
12: " author = {C. Chevalier and F. Pellegrini},\n"
13: " title = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
14: " journal = {Parallel Computing},\n"
15: " volume = {34},\n"
16: " number = {6},\n"
17: " pages = {318--331},\n"
18: " year = {2008},\n"
19: " doi = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
20: "}\n";
22: typedef struct {
23: MPI_Comm pcomm;
24: PetscInt strategy;
25: PetscReal imbalance;
26: } PetscPartitioner_PTScotch;
28: #if defined(PETSC_HAVE_PTSCOTCH)
30: #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while (0)
32: static int PTScotch_Strategy(PetscInt strategy)
33: {
34: switch (strategy) {
35: case 0: return SCOTCH_STRATDEFAULT;
36: case 1: return SCOTCH_STRATQUALITY;
37: case 2: return SCOTCH_STRATSPEED;
38: case 3: return SCOTCH_STRATBALANCE;
39: case 4: return SCOTCH_STRATSAFETY;
40: case 5: return SCOTCH_STRATSCALABILITY;
41: case 6: return SCOTCH_STRATRECURSIVE;
42: case 7: return SCOTCH_STRATREMAP;
43: default: return SCOTCH_STRATDEFAULT;
44: }
45: }
47: static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
48: SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[])
49: {
50: SCOTCH_Arch archdat;
51: SCOTCH_Graph grafdat;
52: SCOTCH_Strat stradat;
53: SCOTCH_Num vertnbr = n;
54: SCOTCH_Num edgenbr = xadj[n];
55: SCOTCH_Num* velotab = vtxwgt;
56: SCOTCH_Num* edlotab = adjwgt;
57: SCOTCH_Num flagval = strategy;
58: double kbalval = imbalance;
62: {
63: PetscBool flg = PETSC_TRUE;
64: PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight",NULL,"3.13","Use -petscpartitioner_use_vertex_weights");
65: PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);
66: if (!flg) velotab = NULL;
67: }
68: SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
69: SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
70: SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
71: SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
72: SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
73: if (tpart) {
74: SCOTCH_archCmpltw(&archdat, nparts, tpart);CHKERRPTSCOTCH(ierr);
75: } else {
76: SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
77: }
78: SCOTCH_graphMap(&grafdat, &archdat, &stradat, part);CHKERRPTSCOTCH(ierr);
79: SCOTCH_archExit(&archdat);
80: SCOTCH_stratExit(&stradat);
81: SCOTCH_graphExit(&grafdat);
82: return(0);
83: }
85: static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
86: SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[], MPI_Comm comm)
87: {
88: PetscMPIInt procglbnbr;
89: PetscMPIInt proclocnum;
90: SCOTCH_Arch archdat;
91: SCOTCH_Dgraph grafdat;
92: SCOTCH_Dmapping mappdat;
93: SCOTCH_Strat stradat;
94: SCOTCH_Num vertlocnbr;
95: SCOTCH_Num edgelocnbr;
96: SCOTCH_Num* veloloctab = vtxwgt;
97: SCOTCH_Num* edloloctab = adjwgt;
98: SCOTCH_Num flagval = strategy;
99: double kbalval = imbalance;
100: PetscErrorCode ierr;
103: {
104: PetscBool flg = PETSC_TRUE;
105: PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight",NULL,"3.13","Use -petscpartitioner_use_vertex_weights");
106: PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);
107: if (!flg) veloloctab = NULL;
108: }
109: MPI_Comm_size(comm, &procglbnbr);
110: MPI_Comm_rank(comm, &proclocnum);
111: vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
112: edgelocnbr = xadj[vertlocnbr];
114: SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
115: SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
116: SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
117: SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);
118: SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
119: if (tpart) { /* target partition weights */
120: SCOTCH_archCmpltw(&archdat, nparts, tpart);CHKERRPTSCOTCH(ierr);
121: } else {
122: SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
123: }
124: SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
125: SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
126: SCOTCH_dgraphMapExit(&grafdat, &mappdat);
127: SCOTCH_archExit(&archdat);
128: SCOTCH_stratExit(&stradat);
129: SCOTCH_dgraphExit(&grafdat);
130: return(0);
131: }
133: #endif /* PETSC_HAVE_PTSCOTCH */
135: static const char *const
136: PTScotchStrategyList[] = {
137: "DEFAULT",
138: "QUALITY",
139: "SPEED",
140: "BALANCE",
141: "SAFETY",
142: "SCALABILITY",
143: "RECURSIVE",
144: "REMAP"
145: };
147: static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
148: {
149: PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
150: PetscErrorCode ierr;
153: MPI_Comm_free(&p->pcomm);
154: PetscFree(part->data);
155: return(0);
156: }
158: static PetscErrorCode PetscPartitionerView_PTScotch_ASCII(PetscPartitioner part, PetscViewer viewer)
159: {
160: PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
161: PetscErrorCode ierr;
164: PetscViewerASCIIPushTab(viewer);
165: PetscViewerASCIIPrintf(viewer,"using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);
166: PetscViewerASCIIPrintf(viewer,"using load imbalance ratio %g\n",(double)p->imbalance);
167: PetscViewerASCIIPopTab(viewer);
168: return(0);
169: }
171: static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
172: {
173: PetscBool iascii;
179: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
180: if (iascii) {PetscPartitionerView_PTScotch_ASCII(part, viewer);}
181: return(0);
182: }
184: static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
185: {
186: PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
187: const char *const *slist = PTScotchStrategyList;
188: PetscInt nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
189: PetscBool flag;
190: PetscErrorCode ierr;
193: PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");
194: PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);
195: PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);
196: PetscOptionsTail();
197: return(0);
198: }
200: static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
201: {
202: #if defined(PETSC_HAVE_PTSCOTCH)
203: MPI_Comm comm;
204: PetscInt nvtxs = numVertices; /* The number of vertices in full graph */
205: PetscInt *vtxdist; /* Distribution of vertices across processes */
206: PetscInt *xadj = start; /* Start of edge list for each vertex */
207: PetscInt *adjncy = adjacency; /* Edge lists for all vertices */
208: PetscInt *vwgt = NULL; /* Vertex weights */
209: PetscInt *adjwgt = NULL; /* Edge weights */
210: PetscInt v, i, *assignment, *points;
211: PetscMPIInt size, rank, p;
212: PetscBool hasempty = PETSC_FALSE;
213: PetscInt *tpwgts = NULL;
217: PetscObjectGetComm((PetscObject)part,&comm);
218: MPI_Comm_size(comm, &size);
219: MPI_Comm_rank(comm, &rank);
220: PetscMalloc2(size+1,&vtxdist,PetscMax(nvtxs,1),&assignment);
221: /* Calculate vertex distribution */
222: vtxdist[0] = 0;
223: MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);
224: for (p = 2; p <= size; ++p) {
225: hasempty = (PetscBool)(hasempty || !vtxdist[p-1] || !vtxdist[p]);
226: vtxdist[p] += vtxdist[p-1];
227: }
228: /* null graph */
229: if (vtxdist[size] == 0) {
230: PetscFree2(vtxdist, assignment);
231: ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);
232: return(0);
233: }
235: /* Calculate vertex weights */
236: if (vertSection) {
237: PetscMalloc1(nvtxs,&vwgt);
238: for (v = 0; v < nvtxs; ++v) {
239: PetscSectionGetDof(vertSection, v, &vwgt[v]);
240: }
241: }
243: /* Calculate partition weights */
244: if (targetSection) {
245: PetscInt sumw;
247: PetscCalloc1(nparts,&tpwgts);
248: for (p = 0, sumw = 0; p < nparts; ++p) {
249: PetscSectionGetDof(targetSection,p,&tpwgts[p]);
250: sumw += tpwgts[p];
251: }
252: if (!sumw) {PetscFree(tpwgts);}
253: }
255: {
256: PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
257: int strat = PTScotch_Strategy(pts->strategy);
258: double imbal = (double)pts->imbalance;
260: for (p = 0; !vtxdist[p+1] && p < size; ++p);
261: if (vtxdist[p+1] == vtxdist[size]) {
262: if (rank == p) {
263: PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment);
264: }
265: } else {
266: MPI_Comm pcomm = pts->pcomm;
268: if (hasempty) {
269: PetscInt cnt;
271: MPI_Comm_split(pts->pcomm,!!nvtxs,rank,&pcomm);
272: for (p=0,cnt=0;p<size;p++) {
273: if (vtxdist[p+1] != vtxdist[p]) {
274: vtxdist[cnt+1] = vtxdist[p+1];
275: cnt++;
276: }
277: }
278: };
279: if (nvtxs) {
280: PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment, pcomm);
281: }
282: if (hasempty) {
283: MPI_Comm_free(&pcomm);
284: }
285: }
286: }
287: PetscFree(vwgt);
288: PetscFree(tpwgts);
290: /* Convert to PetscSection+IS */
291: for (v = 0; v < nvtxs; ++v) {PetscSectionAddDof(partSection, assignment[v], 1);}
292: PetscMalloc1(nvtxs, &points);
293: for (p = 0, i = 0; p < nparts; ++p) {
294: for (v = 0; v < nvtxs; ++v) {
295: if (assignment[v] == p) points[i++] = v;
296: }
297: }
298: if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
299: ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);
301: PetscFree2(vtxdist,assignment);
302: return(0);
303: #else
304: SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
305: #endif
306: }
308: static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
309: {
311: part->noGraph = PETSC_FALSE;
312: part->ops->view = PetscPartitionerView_PTScotch;
313: part->ops->destroy = PetscPartitionerDestroy_PTScotch;
314: part->ops->partition = PetscPartitionerPartition_PTScotch;
315: part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
316: return(0);
317: }
319: /*MC
320: PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
322: Level: intermediate
324: Options Database Keys:
325: + -petscpartitioner_ptscotch_strategy <string> - PT-Scotch strategy. Choose one of default quality speed balance safety scalability recursive remap
326: - -petscpartitioner_ptscotch_imbalance <val> - Load imbalance ratio
328: Notes: when the graph is on a single process, this partitioner actually uses Scotch and not PT-Scotch
330: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
331: M*/
333: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
334: {
335: PetscPartitioner_PTScotch *p;
336: PetscErrorCode ierr;
340: PetscNewLog(part, &p);
341: part->data = p;
343: MPI_Comm_dup(PetscObjectComm((PetscObject)part),&p->pcomm);
344: p->strategy = 0;
345: p->imbalance = 0.01;
347: PetscPartitionerInitialize_PTScotch(part);
348: PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionerCite);
349: return(0);
350: }