Actual source code: party.c
1: #include <../src/mat/impls/adj/mpi/mpiadj.h>
3: #if defined(PETSC_HAVE_UNISTD_H)
4: #include <unistd.h>
5: #endif
7: EXTERN_C_BEGIN
8: #include <party_lib.h>
9: EXTERN_C_END
11: typedef struct {
12: PetscBool redm;
13: PetscBool redo;
14: PetscBool recursive;
15: PetscBool verbose;
16: char global[15]; /* global method */
17: char local[15]; /* local method */
18: PetscInt nbvtxcoarsed; /* number of vertices for the coarse graph */
19: } MatPartitioning_Party;
21: #define SIZE_LOG 10000 /* size of buffer for mesg_log */
23: static PetscErrorCode MatPartitioningApply_Party(MatPartitioning part, IS *partitioning)
24: {
25: int perr;
26: PetscInt i, *parttab, *locals, nb_locals, M, N;
27: PetscMPIInt size, rank;
28: Mat mat = part->adj, matAdj, matSeq, *A;
29: Mat_MPIAdj *adj;
30: MatPartitioning_Party *party = (MatPartitioning_Party *)part->data;
31: PetscBool flg;
32: IS isrow, iscol;
33: int n, *edge_p, *edge, *vertex_w, p, *part_party, cutsize, redl, rec;
34: const char *redm, *redo;
35: char *mesg_log;
36: #if defined(PETSC_HAVE_UNISTD_H)
37: int fd_stdout, fd_pipe[2], count;
38: #endif
40: PetscFunctionBegin;
41: PetscCheck(!part->use_edge_weights, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Party does not support edge weights");
42: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
43: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)mat), &rank));
44: PetscCall(PetscObjectTypeCompare((PetscObject)mat, MATMPIADJ, &flg));
45: if (size > 1) {
46: if (flg) {
47: PetscCall(MatMPIAdjToSeq(mat, &matSeq));
48: } else {
49: PetscCall(PetscInfo(part, "Converting distributed matrix to sequential: this could be a performance loss\n"));
50: PetscCall(MatGetSize(mat, &M, &N));
51: PetscCall(ISCreateStride(PETSC_COMM_SELF, M, 0, 1, &isrow));
52: PetscCall(ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol));
53: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscol, MAT_INITIAL_MATRIX, &A));
54: PetscCall(ISDestroy(&isrow));
55: PetscCall(ISDestroy(&iscol));
56: matSeq = *A;
57: PetscCall(PetscFree(A));
58: }
59: } else {
60: PetscCall(PetscObjectReference((PetscObject)mat));
61: matSeq = mat;
62: }
64: if (!flg) { /* convert regular matrix to MPIADJ */
65: PetscCall(MatConvert(matSeq, MATMPIADJ, MAT_INITIAL_MATRIX, &matAdj));
66: } else {
67: PetscCall(PetscObjectReference((PetscObject)matSeq));
68: matAdj = matSeq;
69: }
71: adj = (Mat_MPIAdj *)matAdj->data; /* finally adj contains adjacency graph */
73: /* arguments for Party library */
74: n = mat->rmap->N; /* number of vertices in full graph */
75: edge_p = adj->i; /* start of edge list for each vertex */
76: edge = adj->j; /* edge list data */
77: vertex_w = part->vertex_weights; /* weights for all vertices */
78: p = part->n; /* number of parts to create */
79: redl = party->nbvtxcoarsed; /* how many vertices to coarsen down to? */
80: rec = party->recursive ? 1 : 0; /* recursive bisection */
81: redm = party->redm ? "lam" : ""; /* matching method */
82: redo = party->redo ? "w3" : ""; /* matching optimization method */
84: PetscCall(PetscMalloc1(mat->rmap->N, &part_party));
86: /* redirect output to buffer */
87: #if defined(PETSC_HAVE_UNISTD_H)
88: fd_stdout = dup(1);
89: PetscCheck(!pipe(fd_pipe), PETSC_COMM_SELF, PETSC_ERR_SYS, "Could not open pipe");
90: close(1);
91: dup2(fd_pipe[1], 1);
92: PetscCall(PetscMalloc1(SIZE_LOG, &mesg_log));
93: #endif
95: /* library call */
96: party_lib_times_start();
97: perr = party_lib(n, vertex_w, NULL, NULL, NULL, edge_p, edge, NULL, p, part_party, &cutsize, redl, (char *)redm, (char *)redo, party->global, party->local, rec, 1);
99: party_lib_times_output(1);
100: part_info(n, vertex_w, edge_p, edge, NULL, p, part_party, 1);
102: #if defined(PETSC_HAVE_UNISTD_H)
103: PetscCall(PetscFFlush(stdout));
104: count = read(fd_pipe[0], mesg_log, (SIZE_LOG - 1) * sizeof(char));
105: if (count < 0) count = 0;
106: mesg_log[count] = 0;
107: close(1);
108: dup2(fd_stdout, 1);
109: close(fd_stdout);
110: close(fd_pipe[0]);
111: close(fd_pipe[1]);
112: if (party->verbose) PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "%s", mesg_log));
113: PetscCall(PetscFree(mesg_log));
114: #endif
115: PetscCheck(!perr, PETSC_COMM_SELF, PETSC_ERR_LIB, "Party failed");
117: PetscCall(PetscMalloc1(mat->rmap->N, &parttab));
118: for (i = 0; i < mat->rmap->N; i++) parttab[i] = part_party[i];
120: /* creation of the index set */
121: nb_locals = mat->rmap->n;
122: locals = parttab + mat->rmap->rstart;
124: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), nb_locals, locals, PETSC_COPY_VALUES, partitioning));
126: /* clean up */
127: PetscCall(PetscFree(parttab));
128: PetscCall(PetscFree(part_party));
129: PetscCall(MatDestroy(&matSeq));
130: PetscCall(MatDestroy(&matAdj));
131: PetscFunctionReturn(PETSC_SUCCESS);
132: }
134: static PetscErrorCode MatPartitioningView_Party(MatPartitioning part, PetscViewer viewer)
135: {
136: MatPartitioning_Party *party = (MatPartitioning_Party *)part->data;
137: PetscBool isascii;
139: PetscFunctionBegin;
140: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
141: if (isascii) {
142: PetscCall(PetscViewerASCIIPrintf(viewer, " Global method: %s\n", party->global));
143: PetscCall(PetscViewerASCIIPrintf(viewer, " Local method: %s\n", party->local));
144: PetscCall(PetscViewerASCIIPrintf(viewer, " Number of vertices for the coarse graph: %d\n", party->nbvtxcoarsed));
145: if (party->redm) PetscCall(PetscViewerASCIIPrintf(viewer, " Using matching method for graph reduction\n"));
146: if (party->redo) PetscCall(PetscViewerASCIIPrintf(viewer, " Using matching optimization\n"));
147: if (party->recursive) PetscCall(PetscViewerASCIIPrintf(viewer, " Using recursive bipartitioning\n"));
148: }
149: PetscFunctionReturn(PETSC_SUCCESS);
150: }
152: /*@C
153: MatPartitioningPartySetGlobal - Set global method for Party partitioner.
155: Collective
157: Input Parameters:
158: + part - the partitioning context
159: - global - a string representing the method
161: Options Database Key:
162: . -mat_partitioning_party_global <method> - the global method
164: Level: advanced
166: Note:
167: The method may be one of `MP_PARTY_OPT`, `MP_PARTY_LIN`, `MP_PARTY_SCA`,
168: `MP_PARTY_RAN`, `MP_PARTY_GBF`, `MP_PARTY_GCF`, `MP_PARTY_BUB` or `MP_PARTY_DEF`, or
169: alternatively a string describing the method. Two or more methods can be
170: combined like "gbf,gcf". Check the Party Library Users Manual for details.
172: Developer Note:
173: Should be `MatPartitioningPartySetGlobalType()` and all uses of method should be changed to type
175: .seealso: `MATPARTITIONINGPARTY`, `MatPartitioningPartySetLocal()`
176: @*/
177: PetscErrorCode MatPartitioningPartySetGlobal(MatPartitioning part, const char *global)
178: {
179: PetscFunctionBegin;
181: PetscTryMethod(part, "MatPartitioningPartySetGlobal_C", (MatPartitioning, const char *), (part, global));
182: PetscFunctionReturn(PETSC_SUCCESS);
183: }
185: static PetscErrorCode MatPartitioningPartySetGlobal_Party(MatPartitioning part, const char *global)
186: {
187: MatPartitioning_Party *party = (MatPartitioning_Party *)part->data;
189: PetscFunctionBegin;
190: PetscCall(PetscStrncpy(party->global, global, 15));
191: PetscFunctionReturn(PETSC_SUCCESS);
192: }
194: /*@C
195: MatPartitioningPartySetLocal - Set local method used by the Party partitioner.
197: Collective
199: Input Parameters:
200: + part - the partitioning context
201: - local - a string representing the method
203: Options Database Key:
204: . -mat_partitioning_party_local <method> - the local method
206: Level: advanced
208: Note:
209: The method may be one of `MP_PARTY_HELPFUL_SETS`, `MP_PARTY_KERNIGHAN_LIN`, or
210: `MP_PARTY_NONE`. Check the Party Library Users Manual for details.
212: Developer Note:
213: Should be `MatPartitioningPartySetLocalType()` and all uses of method should be changed to type
215: .seealso: `MATPARTITIONINGPARTY`, `MatPartitioningPartySetGlobal()`
216: @*/
217: PetscErrorCode MatPartitioningPartySetLocal(MatPartitioning part, const char *local)
218: {
219: PetscFunctionBegin;
221: PetscTryMethod(part, "MatPartitioningPartySetLocal_C", (MatPartitioning, const char *), (part, local));
222: PetscFunctionReturn(PETSC_SUCCESS);
223: }
225: static PetscErrorCode MatPartitioningPartySetLocal_Party(MatPartitioning part, const char *local)
227: {
228: MatPartitioning_Party *party = (MatPartitioning_Party *)part->data;
230: PetscFunctionBegin;
231: PetscCall(PetscStrncpy(party->local, local, 15));
232: PetscFunctionReturn(PETSC_SUCCESS);
233: }
235: /*@
236: MatPartitioningPartySetCoarseLevel - Set the coarse level parameter for the
237: Party partitioner.
239: Collective
241: Input Parameters:
242: + part - the partitioning context
243: - level - the coarse level in range [0.0,1.0]
245: Options Database Key:
246: . -mat_partitioning_party_coarse <l> - Coarse level
248: Level: advanced
250: .seealso: `MATPARTITIONINGPARTY`
251: @*/
252: PetscErrorCode MatPartitioningPartySetCoarseLevel(MatPartitioning part, PetscReal level)
253: {
254: PetscFunctionBegin;
257: PetscTryMethod(part, "MatPartitioningPartySetCoarseLevel_C", (MatPartitioning, PetscReal), (part, level));
258: PetscFunctionReturn(PETSC_SUCCESS);
259: }
261: static PetscErrorCode MatPartitioningPartySetCoarseLevel_Party(MatPartitioning part, PetscReal level)
262: {
263: MatPartitioning_Party *party = (MatPartitioning_Party *)part->data;
265: PetscFunctionBegin;
266: PetscCheck(level >= 0.0 && level <= 1.0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Party: level of coarsening out of range [0.0-1.0]");
267: party->nbvtxcoarsed = (PetscInt)(part->adj->cmap->N * level);
268: if (party->nbvtxcoarsed < 20) party->nbvtxcoarsed = 20;
269: PetscFunctionReturn(PETSC_SUCCESS);
270: }
272: /*@
273: MatPartitioningPartySetMatchOptimization - Activate matching optimization for
274: graph reduction.
276: Collective
278: Input Parameters:
279: + part - the partitioning context
280: - opt - boolean flag
282: Options Database Key:
283: . -mat_partitioning_party_match_optimization - Matching optimization on/off
285: Level: advanced
287: .seealso: `MATPARTITIONINGPARTY`
288: @*/
289: PetscErrorCode MatPartitioningPartySetMatchOptimization(MatPartitioning part, PetscBool opt)
290: {
291: PetscFunctionBegin;
294: PetscTryMethod(part, "MatPartitioningPartySetMatchOptimization_C", (MatPartitioning, PetscBool), (part, opt));
295: PetscFunctionReturn(PETSC_SUCCESS);
296: }
298: static PetscErrorCode MatPartitioningPartySetMatchOptimization_Party(MatPartitioning part, PetscBool opt)
299: {
300: MatPartitioning_Party *party = (MatPartitioning_Party *)part->data;
302: PetscFunctionBegin;
303: party->redo = opt;
304: PetscFunctionReturn(PETSC_SUCCESS);
305: }
307: /*@
308: MatPartitioningPartySetBipart - Activate or deactivate recursive bisection in the Party partitioner
310: Collective
312: Input Parameters:
313: + part - the partitioning context
314: - bp - boolean flag
316: Options Database Key:
317: . -mat_partitioning_party_bipart - Bipartitioning option on/off
319: Level: advanced
321: .seealso: `MATPARTITIONINGPARTY`
322: @*/
323: PetscErrorCode MatPartitioningPartySetBipart(MatPartitioning part, PetscBool bp)
324: {
325: PetscFunctionBegin;
328: PetscTryMethod(part, "MatPartitioningPartySetBipart_C", (MatPartitioning, PetscBool), (part, bp));
329: PetscFunctionReturn(PETSC_SUCCESS);
330: }
332: static PetscErrorCode MatPartitioningPartySetBipart_Party(MatPartitioning part, PetscBool bp)
333: {
334: MatPartitioning_Party *party = (MatPartitioning_Party *)part->data;
336: PetscFunctionBegin;
337: party->recursive = bp;
338: PetscFunctionReturn(PETSC_SUCCESS);
339: }
341: static PetscErrorCode MatPartitioningSetFromOptions_Party(MatPartitioning part, PetscOptionItems *PetscOptionsObject)
342: {
343: PetscBool flag;
344: char value[256];
345: PetscReal r;
346: MatPartitioning_Party *party = (MatPartitioning_Party *)part->data;
348: PetscFunctionBegin;
349: PetscOptionsHeadBegin(PetscOptionsObject, "Set Party partitioning options");
350: PetscCall(PetscOptionsString("-mat_partitioning_party_global", "Global method", "MatPartitioningPartySetGlobal", party->global, value, sizeof(value), &flag));
351: if (flag) PetscCall(MatPartitioningPartySetGlobal(part, value));
352: PetscCall(PetscOptionsString("-mat_partitioning_party_local", "Local method", "MatPartitioningPartySetLocal", party->local, value, sizeof(value), &flag));
353: if (flag) PetscCall(MatPartitioningPartySetLocal(part, value));
354: PetscCall(PetscOptionsReal("-mat_partitioning_party_coarse", "Coarse level", "MatPartitioningPartySetCoarseLevel", 0.0, &r, &flag));
355: if (flag) PetscCall(MatPartitioningPartySetCoarseLevel(part, r));
356: PetscCall(PetscOptionsBool("-mat_partitioning_party_match_optimization", "Matching optimization on/off", "MatPartitioningPartySetMatchOptimization", party->redo, &party->redo, NULL));
357: PetscCall(PetscOptionsBool("-mat_partitioning_party_bipart", "Bipartitioning on/off", "MatPartitioningPartySetBipart", party->recursive, &party->recursive, NULL));
358: PetscCall(PetscOptionsBool("-mat_partitioning_party_verbose", "Show library output", "", party->verbose, &party->verbose, NULL));
359: PetscOptionsHeadEnd();
360: PetscFunctionReturn(PETSC_SUCCESS);
361: }
363: static PetscErrorCode MatPartitioningDestroy_Party(MatPartitioning part)
364: {
365: MatPartitioning_Party *party = (MatPartitioning_Party *)part->data;
367: PetscFunctionBegin;
368: PetscCall(PetscFree(party));
369: /* clear composed functions */
370: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPartySetGlobal_C", NULL));
371: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPartySetLocal_C", NULL));
372: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPartySetCoarseLevel_C", NULL));
373: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPartySetMatchOptimization_C", NULL));
374: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPartySetBipart_C", NULL));
375: PetscFunctionReturn(PETSC_SUCCESS);
376: }
378: /*MC
379: MATPARTITIONINGPARTY - Creates a partitioning context via the external package Party <
380: http://wwwcs.upb.de/fachbereich/AG/monien/RESEARCH/PART/party.htm>.
382: Level: beginner
384: Note:
385: Does not support the `MatPartitioningSetUseEdgeWeights()` option
387: .seealso: `MatPartitioningSetType()`, `MatPartitioningType`, `MatPartitioningPartySetGlobal()`, `MatPartitioningPartySetLocal()`,
388: `MatPartitioningPartySetCoarseLevel()`, `MatPartitioningPartySetMatchOptimization()`, `MatPartitioningPartySetBipart()`
389: M*/
391: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Party(MatPartitioning part)
392: {
393: MatPartitioning_Party *party;
395: PetscFunctionBegin;
396: PetscCall(PetscNew(&party));
397: part->data = (void *)party;
399: PetscCall(PetscStrncpy(party->global, "gcf,gbf", sizeof(party->global)));
400: PetscCall(PetscStrncpy(party->local, "kl", sizeof(party->local)));
402: party->redm = PETSC_TRUE;
403: party->redo = PETSC_TRUE;
404: party->recursive = PETSC_TRUE;
405: party->verbose = PETSC_FALSE;
406: party->nbvtxcoarsed = 200;
408: part->ops->apply = MatPartitioningApply_Party;
409: part->ops->view = MatPartitioningView_Party;
410: part->ops->destroy = MatPartitioningDestroy_Party;
411: part->ops->setfromoptions = MatPartitioningSetFromOptions_Party;
413: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPartySetGlobal_C", MatPartitioningPartySetGlobal_Party));
414: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPartySetLocal_C", MatPartitioningPartySetLocal_Party));
415: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPartySetCoarseLevel_C", MatPartitioningPartySetCoarseLevel_Party));
416: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPartySetMatchOptimization_C", MatPartitioningPartySetMatchOptimization_Party));
417: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPartySetBipart_C", MatPartitioningPartySetBipart_Party));
418: PetscFunctionReturn(PETSC_SUCCESS);
419: }