Actual source code: pmetis.c
petsc-3.10.5 2019-03-28
2: #include <../src/mat/impls/adj/mpi/mpiadj.h>
4: /*
5: Currently using ParMetis-4.0.2
6: */
8: #include <parmetis.h>
10: /*
11: The first 5 elements of this structure are the input control array to Metis
12: */
13: typedef struct {
14: PetscInt cuts; /* number of cuts made (output) */
15: PetscInt foldfactor;
16: PetscInt parallel; /* use parallel partitioner for coarse problem */
17: PetscInt indexing; /* 0 indicates C indexing, 1 Fortran */
18: PetscInt printout; /* indicates if one wishes Metis to print info */
19: PetscBool repartition;
20: } MatPartitioning_Parmetis;
22: #define CHKERRQPARMETIS(n,func) \
23: if (n == METIS_ERROR_INPUT) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"ParMETIS error due to wrong inputs and/or options for %s",func); \
24: else if (n == METIS_ERROR_MEMORY) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"ParMETIS error due to insufficient memory in %s",func); \
25: else if (n == METIS_ERROR) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"ParMETIS general error in %s",func); \
27: #define PetscStackCallParmetis(func,args) do {PetscStackPush(#func);status = func args;PetscStackPop; CHKERRQPARMETIS(status,#func);} while (0)
29: static PetscErrorCode MatPartitioningApply_Parmetis_Private(MatPartitioning part, PetscBool useND, IS *partitioning)
30: {
31: MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis*)part->data;
32: PetscErrorCode ierr;
33: PetscInt *locals = NULL;
34: Mat mat = part->adj,amat,pmat;
35: PetscBool flg;
36: PetscInt bs = 1;
39: PetscObjectTypeCompare((PetscObject)mat,MATMPIADJ,&flg);
40: if (flg) {
41: amat = mat;
42: PetscObjectReference((PetscObject)amat);
43: } else {
44: /* bs indicates if the converted matrix is "reduced" from the original and hence the
45: resulting partition results need to be stretched to match the original matrix */
46: MatConvert(mat,MATMPIADJ,MAT_INITIAL_MATRIX,&amat);
47: if (amat->rmap->n > 0) bs = mat->rmap->n/amat->rmap->n;
48: }
49: MatMPIAdjCreateNonemptySubcommMat(amat,&pmat);
50: MPI_Barrier(PetscObjectComm((PetscObject)part));
52: if (pmat) {
53: MPI_Comm pcomm,comm;
54: Mat_MPIAdj *adj = (Mat_MPIAdj*)pmat->data;
55: PetscInt *vtxdist = pmat->rmap->range;
56: PetscInt *xadj = adj->i;
57: PetscInt *adjncy = adj->j;
58: PetscInt *NDorder = NULL;
59: PetscInt itmp = 0,wgtflag=0, numflag=0, ncon=1, nparts=part->n, options[24], i, j;
60: real_t *tpwgts,*ubvec,itr=0.1;
61: int status;
63: PetscObjectGetComm((PetscObject)pmat,&pcomm);
64: #if defined(PETSC_USE_DEBUG)
65: /* check that matrix has no diagonal entries */
66: {
67: PetscInt rstart;
68: MatGetOwnershipRange(pmat,&rstart,NULL);
69: for (i=0; i<pmat->rmap->n; i++) {
70: for (j=xadj[i]; j<xadj[i+1]; j++) {
71: if (adjncy[j] == i+rstart) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Row %d has diagonal entry; Parmetis forbids diagonal entry",i+rstart);
72: }
73: }
74: }
75: #endif
77: PetscMalloc1(pmat->rmap->n,&locals);
79: if (adj->values && !part->vertex_weights)
80: wgtflag = 1;
81: if (part->vertex_weights && !adj->values)
82: wgtflag = 2;
83: if (part->vertex_weights && adj->values)
84: wgtflag = 3;
86: if (PetscLogPrintInfo) {itmp = pmetis->printout; pmetis->printout = 127;}
87: PetscMalloc1(ncon*nparts,&tpwgts);
88: for (i=0; i<ncon; i++) {
89: for (j=0; j<nparts; j++) {
90: if (part->part_weights) {
91: tpwgts[i*nparts+j] = part->part_weights[i*nparts+j];
92: } else {
93: tpwgts[i*nparts+j] = 1./nparts;
94: }
95: }
96: }
97: PetscMalloc1(ncon,&ubvec);
98: for (i=0; i<ncon; i++) {
99: ubvec[i] = 1.05;
100: }
101: /* This sets the defaults */
102: options[0] = 0;
103: for (i=1; i<24; i++) {
104: options[i] = -1;
105: }
106: /* Duplicate the communicator to be sure that ParMETIS attribute caching does not interfere with PETSc. */
107: MPI_Comm_dup(pcomm,&comm);
108: if (useND) {
109: PetscInt *sizes, *seps, log2size, subd, *level;
110: PetscMPIInt size;
111: idx_t mtype = PARMETIS_MTYPE_GLOBAL, rtype = PARMETIS_SRTYPE_2PHASE, p_nseps = 1, s_nseps = 1;
112: real_t ubfrac = 1.05;
114: MPI_Comm_size(comm,&size);
115: PetscMalloc1(pmat->rmap->n,&NDorder);
116: PetscMalloc3(2*size,&sizes,4*size,&seps,size,&level);
117: PetscStackCallParmetis(ParMETIS_V32_NodeND,((idx_t*)vtxdist,(idx_t*)xadj,(idx_t*)adjncy,(idx_t*)part->vertex_weights,(idx_t*)&numflag,&mtype,&rtype,&p_nseps,&s_nseps,&ubfrac,NULL/* seed */,NULL/* dbglvl */,(idx_t*)NDorder,(idx_t*)(sizes),&comm));
118: log2size = PetscLog2Real(size);
119: subd = PetscPowInt(2,log2size);
120: MatPartitioningSizesToSep_Private(subd,sizes,seps,level);
121: for (i=0;i<pmat->rmap->n;i++) {
122: PetscInt loc;
124: PetscFindInt(NDorder[i],2*subd,seps,&loc);
125: if (loc < 0) {
126: loc = -(loc+1);
127: if (loc%2) { /* part of subdomain */
128: locals[i] = loc/2;
129: } else {
130: PetscFindInt(NDorder[i],2*(subd-1),seps+2*subd,&loc);
131: loc = loc < 0 ? -(loc+1)/2 : loc/2;
132: locals[i] = level[loc];
133: }
134: } else locals[i] = loc/2;
135: }
136: PetscFree3(sizes,seps,level);
137: } else {
138: if (pmetis->repartition) {
139: PetscStackCallParmetis(ParMETIS_V3_AdaptiveRepart,((idx_t*)vtxdist,(idx_t*)xadj,(idx_t*)adjncy,(idx_t*)part->vertex_weights,(idx_t*)part->vertex_weights,(idx_t*)adj->values,(idx_t*)&wgtflag,(idx_t*)&numflag,(idx_t*)&ncon,(idx_t*)&nparts,tpwgts,ubvec,&itr,(idx_t*)options,(idx_t*)&pmetis->cuts,(idx_t*)locals,&comm));
140: } else {
141: PetscStackCallParmetis(ParMETIS_V3_PartKway,((idx_t*)vtxdist,(idx_t*)xadj,(idx_t*)adjncy,(idx_t*)part->vertex_weights,(idx_t*)adj->values,(idx_t*)&wgtflag,(idx_t*)&numflag,(idx_t*)&ncon,(idx_t*)&nparts,tpwgts,ubvec,(idx_t*)options,(idx_t*)&pmetis->cuts,(idx_t*)locals,&comm));
142: }
143: }
144: MPI_Comm_free(&comm);
146: PetscFree(tpwgts);
147: PetscFree(ubvec);
148: if (PetscLogPrintInfo) pmetis->printout = itmp;
150: if (bs > 1) {
151: PetscInt i,j,*newlocals;
152: PetscMalloc1(bs*pmat->rmap->n,&newlocals);
153: for (i=0; i<pmat->rmap->n; i++) {
154: for (j=0; j<bs; j++) {
155: newlocals[bs*i + j] = locals[i];
156: }
157: }
158: PetscFree(locals);
159: ISCreateGeneral(PetscObjectComm((PetscObject)part),bs*pmat->rmap->n,newlocals,PETSC_OWN_POINTER,partitioning);
160: } else {
161: ISCreateGeneral(PetscObjectComm((PetscObject)part),pmat->rmap->n,locals,PETSC_OWN_POINTER,partitioning);
162: }
163: if (useND) {
164: IS ndis;
166: if (bs > 1) {
167: ISCreateBlock(PetscObjectComm((PetscObject)part),bs,pmat->rmap->n,NDorder,PETSC_OWN_POINTER,&ndis);
168: } else {
169: ISCreateGeneral(PetscObjectComm((PetscObject)part),pmat->rmap->n,NDorder,PETSC_OWN_POINTER,&ndis);
170: }
171: ISSetPermutation(ndis);
172: PetscObjectCompose((PetscObject)(*partitioning),"_petsc_matpartitioning_ndorder",(PetscObject)ndis);
173: ISDestroy(&ndis);
174: }
175: } else {
176: ISCreateGeneral(PetscObjectComm((PetscObject)part),0,NULL,PETSC_COPY_VALUES,partitioning);
177: if (useND) {
178: IS ndis;
180: if (bs > 1) {
181: ISCreateBlock(PetscObjectComm((PetscObject)part),bs,0,NULL,PETSC_COPY_VALUES,&ndis);
182: } else {
183: ISCreateGeneral(PetscObjectComm((PetscObject)part),0,NULL,PETSC_COPY_VALUES,&ndis);
184: }
185: ISSetPermutation(ndis);
186: PetscObjectCompose((PetscObject)(*partitioning),"_petsc_matpartitioning_ndorder",(PetscObject)ndis);
187: ISDestroy(&ndis);
188: }
189: }
190: MatDestroy(&pmat);
191: MatDestroy(&amat);
192: return(0);
193: }
195: /*
196: Uses the ParMETIS parallel matrix partitioner to compute a nested dissection ordering of the matrix in parallel
197: */
198: static PetscErrorCode MatPartitioningApplyND_Parmetis(MatPartitioning part, IS *partitioning)
199: {
203: MatPartitioningApply_Parmetis_Private(part, PETSC_TRUE, partitioning);
204: return(0);
205: }
207: /*
208: Uses the ParMETIS parallel matrix partitioner to partition the matrix in parallel
209: */
210: static PetscErrorCode MatPartitioningApply_Parmetis(MatPartitioning part, IS *partitioning)
211: {
215: MatPartitioningApply_Parmetis_Private(part, PETSC_FALSE, partitioning);
216: return(0);
217: }
219: PetscErrorCode MatPartitioningView_Parmetis(MatPartitioning part,PetscViewer viewer)
220: {
221: MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis*)part->data;
222: PetscErrorCode ierr;
223: int rank;
224: PetscBool iascii;
227: MPI_Comm_rank(PetscObjectComm((PetscObject)part),&rank);
228: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
229: if (iascii) {
230: if (pmetis->parallel == 2) {
231: PetscViewerASCIIPrintf(viewer," Using parallel coarse grid partitioner\n");
232: } else {
233: PetscViewerASCIIPrintf(viewer," Using sequential coarse grid partitioner\n");
234: }
235: PetscViewerASCIIPrintf(viewer," Using %d fold factor\n",pmetis->foldfactor);
236: PetscViewerASCIIPushSynchronized(viewer);
237: PetscViewerASCIISynchronizedPrintf(viewer," [%d]Number of cuts found %d\n",rank,pmetis->cuts);
238: PetscViewerFlush(viewer);
239: PetscViewerASCIIPopSynchronized(viewer);
240: }
241: return(0);
242: }
244: /*@
245: MatPartitioningParmetisSetCoarseSequential - Use the sequential code to
246: do the partitioning of the coarse grid.
248: Logically Collective on MatPartitioning
250: Input Parameter:
251: . part - the partitioning context
253: Level: advanced
255: @*/
256: PetscErrorCode MatPartitioningParmetisSetCoarseSequential(MatPartitioning part)
257: {
258: MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis*)part->data;
261: pmetis->parallel = 1;
262: return(0);
263: }
265: /*@
266: MatPartitioningParmetisSetRepartition - Repartition
267: current mesh to rebalance computation.
269: Logically Collective on MatPartitioning
271: Input Parameter:
272: . part - the partitioning context
274: Level: advanced
276: @*/
277: PetscErrorCode MatPartitioningParmetisSetRepartition(MatPartitioning part)
278: {
279: MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis*)part->data;
282: pmetis->repartition = PETSC_TRUE;
283: return(0);
284: }
286: /*@
287: MatPartitioningParmetisGetEdgeCut - Returns the number of edge cuts in the vertex partition.
289: Input Parameter:
290: . part - the partitioning context
292: Output Parameter:
293: . cut - the edge cut
295: Level: advanced
297: @*/
298: PetscErrorCode MatPartitioningParmetisGetEdgeCut(MatPartitioning part, PetscInt *cut)
299: {
300: MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis*) part->data;
303: *cut = pmetis->cuts;
304: return(0);
305: }
307: PetscErrorCode MatPartitioningSetFromOptions_Parmetis(PetscOptionItems *PetscOptionsObject,MatPartitioning part)
308: {
310: PetscBool flag = PETSC_FALSE;
313: PetscOptionsHead(PetscOptionsObject,"Set ParMeTiS partitioning options");
314: PetscOptionsBool("-mat_partitioning_parmetis_coarse_sequential","Use sequential coarse partitioner","MatPartitioningParmetisSetCoarseSequential",flag,&flag,NULL);
315: if (flag) {
316: MatPartitioningParmetisSetCoarseSequential(part);
317: }
318: PetscOptionsBool("-mat_partitioning_parmetis_repartition","","MatPartitioningParmetisSetRepartition",flag,&flag,NULL);
319: if(flag){
320: MatPartitioningParmetisSetRepartition(part);
321: }
322: PetscOptionsTail();
323: return(0);
324: }
327: PetscErrorCode MatPartitioningDestroy_Parmetis(MatPartitioning part)
328: {
329: MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis*)part->data;
330: PetscErrorCode ierr;
333: PetscFree(pmetis);
334: return(0);
335: }
338: /*MC
339: MATPARTITIONINGPARMETIS - Creates a partitioning context via the external package PARMETIS.
341: Collective on MPI_Comm
343: Input Parameter:
344: . part - the partitioning context
346: Options Database Keys:
347: + -mat_partitioning_parmetis_coarse_sequential - use sequential PARMETIS coarse partitioner
349: Level: beginner
351: Notes:
352: See http://www-users.cs.umn.edu/~karypis/metis/
354: .keywords: Partitioning, create, context
356: .seealso: MatPartitioningSetType(), MatPartitioningType
358: M*/
360: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Parmetis(MatPartitioning part)
361: {
362: PetscErrorCode ierr;
363: MatPartitioning_Parmetis *pmetis;
366: PetscNewLog(part,&pmetis);
367: part->data = (void*)pmetis;
369: pmetis->cuts = 0; /* output variable */
370: pmetis->foldfactor = 150; /*folding factor */
371: pmetis->parallel = 2; /* use parallel partitioner for coarse grid */
372: pmetis->indexing = 0; /* index numbering starts from 0 */
373: pmetis->printout = 0; /* print no output while running */
374: pmetis->repartition = PETSC_FALSE;
376: part->ops->apply = MatPartitioningApply_Parmetis;
377: part->ops->applynd = MatPartitioningApplyND_Parmetis;
378: part->ops->view = MatPartitioningView_Parmetis;
379: part->ops->destroy = MatPartitioningDestroy_Parmetis;
380: part->ops->setfromoptions = MatPartitioningSetFromOptions_Parmetis;
381: return(0);
382: }
384: /*@
385: MatMeshToVertexGraph - This routine does not exist because ParMETIS does not provide the functionality. Uses the ParMETIS package to
386: convert a Mat that represents a mesh to a Mat the represents the graph of the coupling
387: between vertices of the cells and is suitable for partitioning with the MatPartitioning object. Use this to partition
388: vertices of a mesh. More likely you should use MatMeshToCellGraph()
390: Collective on Mat
392: Input Parameter:
393: + mesh - the graph that represents the mesh
394: - ncommonnodes - mesh elements that share this number of common nodes are considered neighbors, use 2 for triangules and
395: quadralaterials, 3 for tetrahedrals and 4 for hexahedrals
397: Output Parameter:
398: . dual - the dual graph
400: Notes:
401: Currently requires ParMetis to be installed and uses ParMETIS_V3_Mesh2Dual()
403: The columns of each row of the Mat mesh are the global vertex numbers of the vertices of that rows cell. The number of rows in mesh is
404: number of cells, the number of columns is the number of vertices.
406: Level: advanced
408: .seealso: MatMeshToCellGraph(), MatCreateMPIAdj(), MatPartitioningCreate()
410: @*/
411: PetscErrorCode MatMeshToVertexGraph(Mat mesh,PetscInt ncommonnodes,Mat *dual)
412: {
414: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"ParMETIS does not provide this functionality");
415: return(0);
416: }
418: /*@
419: MatMeshToCellGraph - Uses the ParMETIS package to convert a Mat that represents a mesh to a Mat the represents the graph of the coupling
420: between cells (the "dual" graph) and is suitable for partitioning with the MatPartitioning object. Use this to partition
421: cells of a mesh.
423: Collective on Mat
425: Input Parameter:
426: + mesh - the graph that represents the mesh
427: - ncommonnodes - mesh elements that share this number of common nodes are considered neighbors, use 2 for triangules and
428: quadralaterials, 3 for tetrahedrals and 4 for hexahedrals
430: Output Parameter:
431: . dual - the dual graph
433: Notes:
434: Currently requires ParMetis to be installed and uses ParMETIS_V3_Mesh2Dual()
436: $ Each row of the mesh object represents a single cell in the mesh. For triangles it has 3 entries, quadralaterials 4 entries,
437: $ tetrahedrals 4 entries and hexahedrals 8 entries. You can mix triangles and quadrilaterals in the same mesh, but cannot
438: $ mix tetrahedrals and hexahedrals
439: $ The columns of each row of the Mat mesh are the global vertex numbers of the vertices of that row's cell.
440: $ The number of rows in mesh is number of cells, the number of columns is the number of vertices.
443: Level: advanced
445: .seealso: MatMeshToVertexGraph(), MatCreateMPIAdj(), MatPartitioningCreate()
448: @*/
449: PetscErrorCode MatMeshToCellGraph(Mat mesh,PetscInt ncommonnodes,Mat *dual)
450: {
452: PetscInt *newxadj,*newadjncy;
453: PetscInt numflag=0;
454: Mat_MPIAdj *adj = (Mat_MPIAdj*)mesh->data,*newadj;
455: PetscBool flg;
456: int status;
457: MPI_Comm comm;
460: PetscObjectTypeCompare((PetscObject)mesh,MATMPIADJ,&flg);
461: if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Must use MPIAdj matrix type");
463: PetscObjectGetComm((PetscObject)mesh,&comm);
464: PetscStackCallParmetis(ParMETIS_V3_Mesh2Dual,((idx_t*)mesh->rmap->range,(idx_t*)adj->i,(idx_t*)adj->j,(idx_t*)&numflag,(idx_t*)&ncommonnodes,(idx_t**)&newxadj,(idx_t**)&newadjncy,&comm));
465: MatCreateMPIAdj(PetscObjectComm((PetscObject)mesh),mesh->rmap->n,mesh->rmap->N,newxadj,newadjncy,NULL,dual);
466: newadj = (Mat_MPIAdj*)(*dual)->data;
468: newadj->freeaijwithfree = PETSC_TRUE; /* signal the matrix should be freed with system free since space was allocated by ParMETIS */
469: return(0);
470: }