Actual source code: party.c
petsc-3.8.4 2018-03-24
2: #include <../src/mat/impls/adj/mpi/mpiadj.h>
4: #if defined(PETSC_HAVE_UNISTD_H)
5: #include <unistd.h>
6: #endif
8: /*
9: Currently using Party-1.99
10: */
11: EXTERN_C_BEGIN
12: #include <party_lib.h>
13: EXTERN_C_END
15: typedef struct {
16: PetscBool redm;
17: PetscBool redo;
18: PetscBool recursive;
19: PetscBool verbose;
20: char global[15]; /* global method */
21: char local[15]; /* local method */
22: PetscInt nbvtxcoarsed; /* number of vertices for the coarse graph */
23: } MatPartitioning_Party;
25: #define SIZE_LOG 10000 /* size of buffer for mesg_log */
27: static PetscErrorCode MatPartitioningApply_Party(MatPartitioning part,IS *partitioning)
28: {
29: PetscErrorCode ierr;
30: PetscInt i,*parttab,*locals,nb_locals,M,N;
31: PetscMPIInt size,rank;
32: Mat mat = part->adj,matAdj,matSeq,*A;
33: Mat_MPIAdj *adj;
34: MatPartitioning_Party *party = (MatPartitioning_Party*)part->data;
35: PetscBool flg;
36: IS isrow, iscol;
37: int n,*edge_p,*edge,*vertex_w,p,*part_party,cutsize,redl,rec;
38: const char *redm,*redo;
39: char *mesg_log;
40: #if defined(PETSC_HAVE_UNISTD_H)
41: int fd_stdout,fd_pipe[2],count,err;
42: #endif
45: MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size);
46: MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);
47: PetscObjectTypeCompare((PetscObject)mat,MATMPIADJ,&flg);
48: if (size>1) {
49: if (flg) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Distributed matrix format MPIAdj is not supported for sequential partitioners");
50: PetscInfo(part,"Converting distributed matrix to sequential: this could be a performance loss\n");
51: MatGetSize(mat,&M,&N);
52: ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);
53: ISCreateStride(PETSC_COMM_SELF,N,0,1,&iscol);
54: MatCreateSubMatrices(mat,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&A);
55: ISDestroy(&isrow);
56: ISDestroy(&iscol);
57: matSeq = *A;
58: PetscFree(A);
59: } else {
60: PetscObjectReference((PetscObject)mat);
61: matSeq = mat;
62: }
64: if (!flg) { /* convert regular matrix to MPIADJ */
65: MatConvert(matSeq,MATMPIADJ,MAT_INITIAL_MATRIX,&matAdj);
66: } else {
67: PetscObjectReference((PetscObject)matSeq);
68: matAdj = matSeq;
69: }
71: adj = (Mat_MPIAdj*)matAdj->data; /* finaly adj contains adjacency graph */
73: /* arguments for Party library */
74: n = mat->rmap->N; /* number of vertices in full graph */
75: edge_p = adj->i; /* start of edge list for each vertex */
76: edge = adj->j; /* edge list data */
77: vertex_w = part->vertex_weights; /* weights for all vertices */
78: p = part->n; /* number of parts to create */
79: redl = party->nbvtxcoarsed; /* how many vertices to coarsen down to? */
80: rec = party->recursive ? 1 : 0; /* recursive bisection */
81: redm = party->redm ? "lam" : ""; /* matching method */
82: redo = party->redo ? "w3" : ""; /* matching optimization method */
84: PetscMalloc1(mat->rmap->N,&part_party);
86: /* redirect output to buffer */
87: #if defined(PETSC_HAVE_UNISTD_H)
88: fd_stdout = dup(1);
89: if (pipe(fd_pipe)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SYS,"Could not open pipe");
90: close(1);
91: dup2(fd_pipe[1],1);
92: PetscMalloc1(SIZE_LOG,&mesg_log);
93: #endif
95: /* library call */
96: party_lib_times_start();
97: party_lib(n,vertex_w,NULL,NULL,NULL,edge_p,edge,NULL,p,part_party,&cutsize,redl,(char*)redm,(char*)redo,party->global,party->local,rec,1);
99: party_lib_times_output(1);
100: part_info(n,vertex_w,edge_p,edge,NULL,p,part_party,1);
102: #if defined(PETSC_HAVE_UNISTD_H)
103: err = fflush(stdout);
104: if (err) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SYS,"fflush() failed on stdout");
105: count = read(fd_pipe[0],mesg_log,(SIZE_LOG-1)*sizeof(char));
106: if (count<0) count = 0;
107: mesg_log[count] = 0;
108: close(1);
109: dup2(fd_stdout,1);
110: close(fd_stdout);
111: close(fd_pipe[0]);
112: close(fd_pipe[1]);
113: if (party->verbose) {
114: PetscPrintf(PetscObjectComm((PetscObject)mat),mesg_log);
115: }
116: PetscFree(mesg_log);
117: #endif
118: if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Party failed");
120: PetscMalloc1(mat->rmap->N,&parttab);
121: for (i=0; i<mat->rmap->N; i++) parttab[i] = part_party[i];
123: /* creation of the index set */
124: nb_locals = mat->rmap->N / size;
125: locals = parttab + rank*nb_locals;
126: if (rank < mat->rmap->N % size) {
127: nb_locals++;
128: locals += rank;
129: } else locals += mat->rmap->N % size;
131: ISCreateGeneral(PetscObjectComm((PetscObject)part),nb_locals,locals,PETSC_COPY_VALUES,partitioning);
133: /* clean up */
134: PetscFree(parttab);
135: PetscFree(part_party);
136: MatDestroy(&matSeq);
137: MatDestroy(&matAdj);
138: return(0);
139: }
141: PetscErrorCode MatPartitioningView_Party(MatPartitioning part,PetscViewer viewer)
142: {
143: MatPartitioning_Party *party = (MatPartitioning_Party*)part->data;
144: PetscErrorCode ierr;
145: PetscBool isascii;
148: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
149: if (isascii) {
150: PetscViewerASCIIPrintf(viewer," Global method: %s\n",party->global);
151: PetscViewerASCIIPrintf(viewer," Local method: %s\n",party->local);
152: PetscViewerASCIIPrintf(viewer," Number of vertices for the coarse graph: %d\n",party->nbvtxcoarsed);
153: if (party->redm) {
154: PetscViewerASCIIPrintf(viewer," Using matching method for graph reduction\n");
155: }
156: if (party->redo) {
157: PetscViewerASCIIPrintf(viewer," Using matching optimization\n");
158: }
159: if (party->recursive) {
160: PetscViewerASCIIPrintf(viewer," Using recursive bipartitioning\n");
161: }
162: }
163: return(0);
164: }
166: /*@C
167: MatPartitioningPartySetGlobal - Set global method for Party partitioner.
169: Collective on MatPartitioning
171: Input Parameters:
172: + part - the partitioning context
173: - method - a string representing the method
175: Options Database:
176: . -mat_partitioning_party_global <method> - the global method
178: Level: advanced
180: Notes:
181: The method may be one of MP_PARTY_OPT, MP_PARTY_LIN, MP_PARTY_SCA,
182: MP_PARTY_RAN, MP_PARTY_GBF, MP_PARTY_GCF, MP_PARTY_BUB or MP_PARTY_DEF, or
183: alternatively a string describing the method. Two or more methods can be
184: combined like "gbf,gcf". Check the Party Library Users Manual for details.
186: .seealso: MatPartitioningPartySetLocal()
187: @*/
188: PetscErrorCode MatPartitioningPartySetGlobal(MatPartitioning part,const char *global)
189: {
194: PetscTryMethod(part,"MatPartitioningPartySetGlobal_C",(MatPartitioning,const char*),(part,global));
195: return(0);
196: }
198: PetscErrorCode MatPartitioningPartySetGlobal_Party(MatPartitioning part,const char *global)
199: {
200: MatPartitioning_Party *party = (MatPartitioning_Party*)part->data;
201: PetscErrorCode ierr;
204: PetscStrncpy(party->global,global,15);
205: return(0);
206: }
208: /*@C
209: MatPartitioningPartySetLocal - Set local method for Party partitioner.
211: Collective on MatPartitioning
213: Input Parameters:
214: + part - the partitioning context
215: - method - a string representing the method
217: Options Database:
218: . -mat_partitioning_party_local <method> - the local method
220: Level: advanced
222: Notes:
223: The method may be one of MP_PARTY_HELPFUL_SETS, MP_PARTY_KERNIGHAN_LIN, or
224: MP_PARTY_NONE. Check the Party Library Users Manual for details.
226: .seealso: MatPartitioningPartySetGlobal()
227: @*/
228: PetscErrorCode MatPartitioningPartySetLocal(MatPartitioning part,const char *local)
229: {
234: PetscTryMethod(part,"MatPartitioningPartySetLocal_C",(MatPartitioning,const char*),(part,local));
235: return(0);
236: }
238: PetscErrorCode MatPartitioningPartySetLocal_Party(MatPartitioning part,const char *local)
240: {
241: MatPartitioning_Party *party = (MatPartitioning_Party*)part->data;
242: PetscErrorCode ierr;
245: PetscStrncpy(party->local,local,15);
246: return(0);
247: }
249: /*@
250: MatPartitioningPartySetCoarseLevel - Set the coarse level parameter for the
251: Party partitioner.
253: Collective on MatPartitioning
255: Input Parameters:
256: + part - the partitioning context
257: - level - the coarse level in range [0.0,1.0]
259: Options Database:
260: . -mat_partitioning_party_coarse <l> - Coarse level
262: Level: advanced
263: @*/
264: PetscErrorCode MatPartitioningPartySetCoarseLevel(MatPartitioning part,PetscReal level)
265: {
271: PetscTryMethod(part,"MatPartitioningPartySetCoarseLevel_C",(MatPartitioning,PetscReal),(part,level));
272: return(0);
273: }
275: PetscErrorCode MatPartitioningPartySetCoarseLevel_Party(MatPartitioning part,PetscReal level)
276: {
277: MatPartitioning_Party *party = (MatPartitioning_Party*)part->data;
280: if (level<0.0 || level>1.0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Party: level of coarsening out of range [0.0-1.0]");
281: party->nbvtxcoarsed = (PetscInt)(part->adj->cmap->N * level);
282: if (party->nbvtxcoarsed < 20) party->nbvtxcoarsed = 20;
283: return(0);
284: }
286: /*@
287: MatPartitioningPartySetMatchOptimization - Activate matching optimization for
288: graph reduction.
290: Collective on MatPartitioning
292: Input Parameters:
293: + part - the partitioning context
294: - opt - boolean flag
296: Options Database:
297: . -mat_partitioning_party_match_optimization - Matching optimization on/off
299: Level: advanced
300: @*/
301: PetscErrorCode MatPartitioningPartySetMatchOptimization(MatPartitioning part,PetscBool opt)
302: {
308: PetscTryMethod(part,"MatPartitioningPartySetMatchOptimization_C",(MatPartitioning,PetscBool),(part,opt));
309: return(0);
310: }
312: PetscErrorCode MatPartitioningPartySetMatchOptimization_Party(MatPartitioning part,PetscBool opt)
313: {
314: MatPartitioning_Party *party = (MatPartitioning_Party*)part->data;
317: party->redo = opt;
318: return(0);
319: }
321: /*@
322: MatPartitioningPartySetBipart - Activate or deactivate recursive bisection.
324: Collective on MatPartitioning
326: Input Parameters:
327: + part - the partitioning context
328: - bp - boolean flag
330: Options Database:
331: - -mat_partitioning_party_bipart - Bipartitioning option on/off
333: Level: advanced
334: @*/
335: PetscErrorCode MatPartitioningPartySetBipart(MatPartitioning part,PetscBool bp)
336: {
342: PetscTryMethod(part,"MatPartitioningPartySetBipart_C",(MatPartitioning,PetscBool),(part,bp));
343: return(0);
344: }
346: PetscErrorCode MatPartitioningPartySetBipart_Party(MatPartitioning part,PetscBool bp)
347: {
348: MatPartitioning_Party *party = (MatPartitioning_Party*)part->data;
351: party->recursive = bp;
352: return(0);
353: }
355: PetscErrorCode MatPartitioningSetFromOptions_Party(PetscOptionItems *PetscOptionsObject,MatPartitioning part)
356: {
357: PetscErrorCode ierr;
358: PetscBool flag;
359: char value[256];
360: PetscReal r;
361: MatPartitioning_Party *party = (MatPartitioning_Party*)part->data;
364: PetscOptionsHead(PetscOptionsObject,"Set Party partitioning options");
365: PetscOptionsString("-mat_partitioning_party_global","Global method","MatPartitioningPartySetGlobal",party->global,value,256,&flag);
366: if (flag) { MatPartitioningPartySetGlobal(part,value); }
367: PetscOptionsString("-mat_partitioning_party_local","Local method","MatPartitioningPartySetLocal",party->local,value,256,&flag);
368: if (flag) { MatPartitioningPartySetLocal(part,value); }
369: PetscOptionsReal("-mat_partitioning_party_coarse","Coarse level","MatPartitioningPartySetCoarseLevel",0.0,&r,&flag);
370: if (flag) { MatPartitioningPartySetCoarseLevel(part,r); }
371: PetscOptionsBool("-mat_partitioning_party_match_optimization","Matching optimization on/off","MatPartitioningPartySetMatchOptimization",party->redo,&party->redo,NULL);
372: PetscOptionsBool("-mat_partitioning_party_bipart","Bipartitioning on/off","MatPartitioningPartySetBipart",party->recursive,&party->recursive,NULL);
373: PetscOptionsBool("-mat_partitioning_party_verbose","Show library output","",party->verbose,&party->verbose,NULL);
374: PetscOptionsTail();
375: return(0);
376: }
378: PetscErrorCode MatPartitioningDestroy_Party(MatPartitioning part)
379: {
380: MatPartitioning_Party *party = (MatPartitioning_Party*)part->data;
381: PetscErrorCode ierr;
384: PetscFree(party);
385: /* clear composed functions */
386: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPartySetGlobal_C",NULL);
387: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPartySetLocal_C",NULL);
388: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPartySetCoarseLevel_C",NULL);
389: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPartySetMatchOptimization_C",NULL);
390: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPartySetBipart_C",NULL);
391: return(0);
392: }
394: /*MC
395: MATPARTITIONINGPARTY - Creates a partitioning context via the external package Party.
397: Level: beginner
399: Notes: See http://wwwcs.upb.de/fachbereich/AG/monien/RESEARCH/PART/party.html
401: .keywords: Partitioning, create, context
403: .seealso: MatPartitioningSetType(), MatPartitioningType
405: M*/
407: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Party(MatPartitioning part)
408: {
409: PetscErrorCode ierr;
410: MatPartitioning_Party *party;
413: PetscNewLog(part,&party);
414: part->data = (void*)party;
416: PetscStrcpy(party->global,"gcf,gbf");
417: PetscStrcpy(party->local,"kl");
419: party->redm = PETSC_TRUE;
420: party->redo = PETSC_TRUE;
421: party->recursive = PETSC_TRUE;
422: party->verbose = PETSC_FALSE;
423: party->nbvtxcoarsed = 200;
425: part->ops->apply = MatPartitioningApply_Party;
426: part->ops->view = MatPartitioningView_Party;
427: part->ops->destroy = MatPartitioningDestroy_Party;
428: part->ops->setfromoptions = MatPartitioningSetFromOptions_Party;
430: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPartySetGlobal_C",MatPartitioningPartySetGlobal_Party);
431: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPartySetLocal_C",MatPartitioningPartySetLocal_Party);
432: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPartySetCoarseLevel_C",MatPartitioningPartySetCoarseLevel_Party);
433: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPartySetMatchOptimization_C",MatPartitioningPartySetMatchOptimization_Party);
434: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPartySetBipart_C",MatPartitioningPartySetBipart_Party);
435: return(0);
436: }