Actual source code: party.c
2: #include src/mat/impls/adj/mpi/mpiadj.h
4: #ifdef PETSC_HAVE_UNISTD_H
5: #include <unistd.h>
6: #endif
8: #ifdef PETSC_HAVE_STDLIB_H
9: #include <stdlib.h>
10: #endif
12: #include "petscfix.h"
14: /*
15: Currently using Party-1.99
16: */
18: #include "party_lib.h"
21: typedef struct {
22: char redm[15];
23: char redo[15];
24: int rec;
25: int output;
26: char global_method[15]; /* global method */
27: char local_method[15]; /* local method */
28: int nbvtxcoarsed; /* number of vertices for the coarse graph */
29: char *mesg_log;
30: } MatPartitioning_Party;
32: #define SIZE_LOG 10000 /* size of buffer for msg_log */
36: static PetscErrorCode MatPartitioningApply_Party(MatPartitioning part, IS * partitioning)
37: {
39: int *locals, *parttab = NULL, rank, size;
40: Mat mat = part->adj, matMPI, matSeq;
41: int nb_locals;
42: Mat_MPIAdj *adj = (Mat_MPIAdj *) mat->data;
43: MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
44: PetscTruth flg;
45: #ifdef PETSC_HAVE_UNISTD_H
46: int fd_stdout, fd_pipe[2], count;
47: #endif
51: /* check if the matrix is sequential, use MatGetSubMatrices if necessary */
52: PetscTypeCompare((PetscObject) mat, MATMPIADJ, &flg);
53: MPI_Comm_size(mat->comm, &size);
54: MPI_Comm_rank(part->comm, &rank);
55: if (size > 1) {
56: int M, N;
57: IS isrow, iscol;
58: Mat *A;
60: if (flg) {
61: SETERRQ(0,
62: "Distributed matrix format MPIAdj is not supported for sequential partitioners");
63: }
64: ierr =
65: PetscPrintf(part->comm,
66: "Converting distributed matrix to sequential: this could be a performance loss\n");
68: MatGetSize(mat, &M, &N);
69: ISCreateStride(PETSC_COMM_SELF, M, 0, 1, &isrow);
70: ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol);
71: MatGetSubMatrices(mat, 1, &isrow, &iscol, MAT_INITIAL_MATRIX, &A);
72: ISDestroy(isrow);
73: ISDestroy(iscol);
74: matSeq = *A;
75: } else
76: matSeq = mat;
78: /* check for the input format that is supported only for a MPIADJ type
79: and set it to matMPI */
81: if (!flg) {
82: MatConvert(matSeq, MATMPIADJ, &matMPI);
83: } else
84: matMPI = matSeq;
86: adj = (Mat_MPIAdj *) matMPI->data; /* finaly adj contains adjacency graph */
88: {
89: /* Party library arguments definition */
90: int n = mat->M; /* number of vertices in full graph */
91: int *edge_p = adj->i; /* start of edge list for each vertex */
92: int *edge = adj->j; /* edge list data */
93: int *vertex_w = NULL; /* weights for all vertices */
94: int *edge_w = NULL; /* weights for all edges */
95: float *x = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
96: int p = part->n; /* number of parts to create */
97: int *part_party; /* set number of each vtx (length n) */
98: int cutsize; /* number of edge cut */
99: char *global = party->global_method; /* global partitioning algorithm */
100: char *local = party->local_method; /* local partitioning algorithm */
101: int redl = party->nbvtxcoarsed; /* how many vertices to coarsen down to? */
102: char *redm = party->redm;
103: char *redo = party->redo;
104: int rec = party->rec;
105: int output = party->output;
107: PetscMalloc((mat->M) * sizeof(int), &part_party);
109: /* redirect output to buffer party->mesg_log */
110: #ifdef PETSC_HAVE_UNISTD_H
111: fd_stdout = dup(1);
112: pipe(fd_pipe);
113: close(1);
114: dup2(fd_pipe[1], 1);
115: PetscMalloc(SIZE_LOG * sizeof(char), &(party->mesg_log));
116: #endif
118: /* library call */
120: party_lib_times_start();
122: party_lib(n, vertex_w, x, y, z, edge_p, edge, edge_w,
123: p, part_party, &cutsize, redl, redm, redo,
124: global, local, rec, output);
126: party_lib_times_output(output);
127: part_info(n, vertex_w, edge_p, edge, edge_w, p, part_party, output);
129: #ifdef PETSC_HAVE_UNISTD_H
130: fflush(stdout);
131: count =
132: read(fd_pipe[0], party->mesg_log, (SIZE_LOG - 1) * sizeof(char));
133: if (count < 0)
134: count = 0;
135: party->mesg_log[count] = 0;
136: close(1);
137: dup2(fd_stdout, 1);
138: close(fd_stdout);
139: close(fd_pipe[0]);
140: close(fd_pipe[1]);
141: #endif
142: /* if in the call we got an error, we say it */
144: if (ierr) {
145: SETERRQ(PETSC_ERR_LIB, party->mesg_log);
146: }
148: parttab = part_party;
149: }
151: /* Creation of the index set */
152: MPI_Comm_rank(part->comm, &rank);
153: MPI_Comm_size(part->comm, &size);
154: nb_locals = mat->M / size;
155: locals = parttab + rank * nb_locals;
156: if (rank < mat->M % size) {
157: nb_locals++;
158: locals += rank;
159: } else
160: locals += mat->M % size;
161: ISCreateGeneral(part->comm, nb_locals, locals, partitioning);
163: /* destroying old objects */
164: PetscFree(parttab);
165: if (matSeq != mat) {
166: MatDestroy(matSeq);
167: }
168: if (matMPI != mat) {
169: MatDestroy(matMPI);
170: }
172: return(0);
173: }
178: PetscErrorCode MatPartitioningView_Party(MatPartitioning part, PetscViewer viewer)
179: {
180: MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
181: PetscErrorCode ierr;
182: PetscMPIInt rank;
183: PetscTruth iascii;
186: MPI_Comm_rank(part->comm, &rank);
187: PetscTypeCompare((PetscObject) viewer, PETSC_VIEWER_ASCII, &iascii);
188: if (iascii) {
189: if (!rank && party->mesg_log) {
190: PetscViewerASCIIPrintf(viewer, "%s\n", party->mesg_log);
191: }
192: } else {
193: SETERRQ1(PETSC_ERR_SUP, "Viewer type %s not supported for this Party partitioner",((PetscObject) viewer)->type_name);
194: }
195: return(0);
196: }
200: /*@C
201: MatPartitioningPartySetGlobal - Set method for global partitioning.
203: Input Parameter:
204: . part - the partitioning context
205: . method - May be one of MP_PARTY_OPT, MP_PARTY_LIN, MP_PARTY_SCA,
206: MP_PARTY_RAN, MP_PARTY_GBF, MP_PARTY_GCF, MP_PARTY_BUB or MP_PARTY_DEF, or
207: alternatively a string describing the method. Two or more methods can be
208: combined like "gbf,gcf". Check the Party Library Users Manual for details.
210: Level: advanced
212: @*/
213: PetscErrorCode MatPartitioningPartySetGlobal(MatPartitioning part, const char *global)
214: {
215: MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
219: PetscStrcpy(party->global_method, global);
221: return(0);
222: }
226: /*@C
227: MatPartitioningPartySetLocal - Set method for local partitioning.
229: Input Parameter:
230: . part - the partitioning context
231: . method - One of MP_PARTY_HELPFUL_SETS, MP_PARTY_KERNIGHAN_LIN, or MP_PARTY_NONE.
232: Check the Party Library Users Manual for details.
234: Level: advanced
236: @*/
237: PetscErrorCode MatPartitioningPartySetLocal(MatPartitioning part, const char *local)
238: {
239: MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
243: PetscStrcpy(party->local_method, local);
245: return(0);
246: }
250: /*@
251: MatPartitioningPartySetCoarseLevel - Set the coarse level
252:
253: Input Parameter:
254: . part - the partitioning context
255: . level - the coarse level in range [0.0,1.0]
257: Level: advanced
259: @*/
260: PetscErrorCode MatPartitioningPartySetCoarseLevel(MatPartitioning part, PetscReal level)
261: {
262: MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
266: if (level < 0 || level > 1.0) {
267: SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,
268: "Party: level of coarsening out of range [0.01-1.0]");
269: } else
270: party->nbvtxcoarsed = (int)(part->adj->N * level);
272: if (party->nbvtxcoarsed < 20)
273: party->nbvtxcoarsed = 20;
275: return(0);
276: }
280: /*@
281: MatPartitioningPartySetMatchOptimization - Activate matching optimization for graph reduction
282:
283: Input Parameter:
284: . part - the partitioning context
285: . opt - activate optimization
287: Level: advanced
289: @*/
290: PetscErrorCode MatPartitioningPartySetMatchOptimization(MatPartitioning part,
291: PetscTruth opt)
292: {
293: MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
297: if (opt)
298: PetscStrcpy(party->redo, "w3");
299: else
300: PetscStrcpy(party->redo, "");
302: return(0);
303: }
307: /*@
308: MatPartitioningPartySetBipart - Activate or deactivate recursive bisection.
309:
310: Input Parameter:
311: . part - the partitioning context
312: . bp - PETSC_TRUE to activate recursive bisection
314: Level: advanced
316: @*/
317: PetscErrorCode MatPartitioningPartySetBipart(MatPartitioning part, PetscTruth bp)
318: {
319: MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
323: if (bp)
324: party->rec = 1;
325: else
326: party->rec = 0;
328: return(0);
329: }
333: PetscErrorCode MatPartitioningSetFromOptions_Party(MatPartitioning part)
334: {
336: PetscTruth flag, b;
337: char value[15];
338: PetscReal r;
341: PetscOptionsHead("Set Party partitioning options");
343: PetscOptionsString("-mat_partitioning_party_global",
344: "Global method to use", "MatPartitioningPartySetGlobal", "gcf,gbf",
345: value, 15, &flag);
346: if (flag)
347: MatPartitioningPartySetGlobal(part, value);
349: PetscOptionsString("-mat_partitioning_party_local",
350: "Local method to use", "MatPartitioningPartySetLocal", "kl", value, 15,
351: &flag);
352: if (flag)
353: MatPartitioningPartySetLocal(part, value);
355: PetscOptionsReal("-mat_partitioning_party_coarse_level",
356: "Coarse level", "MatPartitioningPartySetCoarseLevel", 0, &r,
357: &flag);
358: if (flag)
359: MatPartitioningPartySetCoarseLevel(part, r);
361: PetscOptionsLogical("-mat_partitioning_party_match_optimization",
362: "Matching optimization on/off (boolean)",
363: "MatPartitioningPartySetMatchOptimization", PETSC_TRUE, &b, &flag);
364: if (flag)
365: MatPartitioningPartySetMatchOptimization(part, b);
367: PetscOptionsLogical("-mat_partitioning_party_bipart",
368: "Bipartitioning option on/off (boolean)",
369: "MatPartitioningPartySetBipart", PETSC_TRUE, &b, &flag);
370: if (flag)
371: MatPartitioningPartySetBipart(part, b);
373: PetscOptionsTail();
374: return(0);
375: }
380: PetscErrorCode MatPartitioningDestroy_Party(MatPartitioning part)
381: {
382: MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
387: if (party->mesg_log) {
388: PetscFree(party->mesg_log);
389: }
391: PetscFree(party);
393: return(0);
394: }
399: PetscErrorCode MatPartitioningCreate_Party(MatPartitioning part)
400: {
402: MatPartitioning_Party *party;
405: PetscNew(MatPartitioning_Party, &party);
407: PetscStrcpy(party->global_method, "gcf,gbf");
408: PetscStrcpy(party->local_method, "kl");
409: PetscStrcpy(party->redm, "lam");
410: PetscStrcpy(party->redo, "w3");
412: party->nbvtxcoarsed = 200;
413: party->rec = 1;
414: party->output = 1;
415: party->mesg_log = NULL;
417: part->ops->apply = MatPartitioningApply_Party;
418: part->ops->view = MatPartitioningView_Party;
419: part->ops->destroy = MatPartitioningDestroy_Party;
420: part->ops->setfromoptions = MatPartitioningSetFromOptions_Party;
421: part->data = (void*) party;
423: return(0);
424: }