Actual source code: partition.c
petsc-3.10.5 2019-03-28
2: #include <petsc/private/matimpl.h>
4: /* Logging support */
5: PetscClassId MAT_PARTITIONING_CLASSID;
7: /*
8: Simplest partitioning, keeps the current partitioning.
9: */
10: static PetscErrorCode MatPartitioningApply_Current(MatPartitioning part,IS *partitioning)
11: {
13: PetscInt m;
14: PetscMPIInt rank,size;
17: MPI_Comm_size(PetscObjectComm((PetscObject)part),&size);
18: if (part->n != size) {
19: const char *prefix;
20: PetscObjectGetOptionsPrefix((PetscObject)part,&prefix);
21: SETERRQ1(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"This is the DEFAULT NO-OP partitioner, it currently only supports one domain per processor\nuse -%smat_partitioning_type parmetis or chaco or ptscotch for more than one subdomain per processor",prefix ? prefix : "");
22: }
23: MPI_Comm_rank(PetscObjectComm((PetscObject)part),&rank);
25: MatGetLocalSize(part->adj,&m,NULL);
26: ISCreateStride(PetscObjectComm((PetscObject)part),m,rank,0,partitioning);
27: return(0);
28: }
30: /*
31: partition an index to rebalance the computation
32: */
33: static PetscErrorCode MatPartitioningApply_Average(MatPartitioning part,IS *partitioning)
34: {
36: PetscInt m,M,nparts,*indices,r,d,*parts,i,start,end,loc;
39: MatGetSize(part->adj,&M,NULL);
40: MatGetLocalSize(part->adj,&m,NULL);
41: nparts = part->n;
42: PetscCalloc1(nparts,&parts);
43: d = M/nparts;
44: for (i=0; i<nparts; i++) parts[i] = d;
45: r = M%nparts;
46: for (i=0; i<r; i++) parts[i] += 1;
47: for (i=1; i<nparts; i++) parts[i] += parts[i-1];
48: PetscCalloc1(m,&indices);
49: MatGetOwnershipRange(part->adj,&start,&end);
50: for (i=start; i<end; i++) {
51: PetscFindInt(i,nparts,parts,&loc);
52: if (loc<0) loc = -(loc+1);
53: else loc = loc+1;
54: indices[i-start] = loc;
55: }
56: PetscFree(parts);
57: ISCreateGeneral(PetscObjectComm((PetscObject)part),m,indices,PETSC_OWN_POINTER,partitioning);
58: return(0);
59: }
61: static PetscErrorCode MatPartitioningApply_Square(MatPartitioning part,IS *partitioning)
62: {
64: PetscInt cell,n,N,p,rstart,rend,*color;
65: PetscMPIInt size;
68: MPI_Comm_size(PetscObjectComm((PetscObject)part),&size);
69: if (part->n != size) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Currently only supports one domain per processor");
70: p = (PetscInt)PetscSqrtReal((PetscReal)part->n);
71: if (p*p != part->n) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Square partitioning requires \"perfect square\" number of domains");
73: MatGetSize(part->adj,&N,NULL);
74: n = (PetscInt)PetscSqrtReal((PetscReal)N);
75: if (n*n != N) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Square partitioning requires square domain");
76: if (n%p != 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Square partitioning requires p to divide n");
77: MatGetOwnershipRange(part->adj,&rstart,&rend);
78: PetscMalloc1(rend-rstart,&color);
79: /* for (int cell=rstart; cell<rend; cell++) { color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); } */
80: for (cell=rstart; cell<rend; cell++) {
81: color[cell-rstart] = ((cell%n) / (n/p)) + p * ((cell/n) / (n/p));
82: }
83: ISCreateGeneral(PetscObjectComm((PetscObject)part),rend-rstart,color,PETSC_OWN_POINTER,partitioning);
84: return(0);
85: }
87: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Current(MatPartitioning part)
88: {
90: part->ops->apply = MatPartitioningApply_Current;
91: part->ops->view = 0;
92: part->ops->destroy = 0;
93: return(0);
94: }
96: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Average(MatPartitioning part)
97: {
99: part->ops->apply = MatPartitioningApply_Average;
100: part->ops->view = 0;
101: part->ops->destroy = 0;
102: return(0);
103: }
105: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Square(MatPartitioning part)
106: {
108: part->ops->apply = MatPartitioningApply_Square;
109: part->ops->view = 0;
110: part->ops->destroy = 0;
111: return(0);
112: }
115: /* gets as input the "sizes" array computed by ParMetis_*_NodeND and returns
116: seps[ 0 : 2*p) : the start and end node of each subdomain
117: seps[2*p : 2*p+2*(p-1)) : the start and end node of each separator
118: levels[ 0 : p-1) : level in the tree for each separator (-1 root, -2 and -3 first level and so on)
119: The arrays must be large enough
120: */
121: PETSC_INTERN PetscErrorCode MatPartitioningSizesToSep_Private(PetscInt p, PetscInt sizes[], PetscInt seps[], PetscInt level[])
122: {
123: PetscInt l2p,i,pTree,pStartTree;
127: l2p = PetscLog2Real(p);
128: if (l2p - (PetscInt)PetscLog2Real(p)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"%D is not a power of 2",p);
129: if (!p) return(0);
130: PetscMemzero(seps,(2*p-2)*sizeof(PetscInt));
131: PetscMemzero(level,(p-1)*sizeof(PetscInt));
132: seps[2*p-2] = sizes[2*p-2];
133: pTree = p;
134: pStartTree = 0;
135: while (pTree != 1) {
136: for (i = pStartTree; i < pStartTree + pTree; i++) {
137: seps[i] += sizes[i];
138: seps[pStartTree + pTree + (i-pStartTree)/2] += seps[i];
139: }
140: pStartTree += pTree;
141: pTree = pTree/2;
142: }
143: seps[2*p-2] -= sizes[2*p-2];
145: pStartTree = 2*p-2;
146: pTree = 1;
147: while (pStartTree > 0) {
148: for (i = pStartTree; i < pStartTree + pTree; i++) {
149: PetscInt k = 2*i - (pStartTree +2*pTree);
150: PetscInt n = seps[k+1];
152: seps[k+1] = seps[i] - sizes[k+1];
153: seps[k] = seps[k+1] + sizes[k+1] - n - sizes[k];
154: level[i-p] = -pTree - i + pStartTree;
155: }
156: pTree *= 2;
157: pStartTree -= pTree;
158: }
159: /* I know there should be a formula */
160: PetscSortIntWithArrayPair(p-1,seps+p,sizes+p,level);
161: for (i=2*p-2;i>=0;i--) { seps[2*i] = seps[i]; seps[2*i+1] = seps[i] + sizes[i] - 1; }
162: return(0);
163: }
165: /* ===========================================================================================*/
167: PetscFunctionList MatPartitioningList = 0;
168: PetscBool MatPartitioningRegisterAllCalled = PETSC_FALSE;
171: /*@C
172: MatPartitioningRegister - Adds a new sparse matrix partitioning to the matrix package.
174: Not Collective
176: Input Parameters:
177: + sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
178: - function - function pointer that creates the partitioning type
180: Level: developer
182: Sample usage:
183: .vb
184: MatPartitioningRegister("my_part",MyPartCreate);
185: .ve
187: Then, your partitioner can be chosen with the procedural interface via
188: $ MatPartitioningSetType(part,"my_part")
189: or at runtime via the option
190: $ -mat_partitioning_type my_part
192: .keywords: matrix, partitioning, register
194: .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
195: @*/
196: PetscErrorCode MatPartitioningRegister(const char sname[],PetscErrorCode (*function)(MatPartitioning))
197: {
201: MatInitializePackage();
202: PetscFunctionListAdd(&MatPartitioningList,sname,function);
203: return(0);
204: }
206: /*@C
207: MatPartitioningGetType - Gets the Partitioning method type and name (as a string)
208: from the partitioning context.
210: Not collective
212: Input Parameter:
213: . partitioning - the partitioning context
215: Output Parameter:
216: . type - partitioner type
218: Level: intermediate
220: Not Collective
222: .keywords: Partitioning, get, method, name, type
223: @*/
224: PetscErrorCode MatPartitioningGetType(MatPartitioning partitioning,MatPartitioningType *type)
225: {
229: *type = ((PetscObject)partitioning)->type_name;
230: return(0);
231: }
233: /*@C
234: MatPartitioningSetNParts - Set how many partitions need to be created;
235: by default this is one per processor. Certain partitioning schemes may
236: in fact only support that option.
238: Not collective
240: Input Parameter:
241: . partitioning - the partitioning context
242: . n - the number of partitions
244: Level: intermediate
246: Not Collective
248: .keywords: Partitioning, set
250: .seealso: MatPartitioningCreate(), MatPartitioningApply()
251: @*/
252: PetscErrorCode MatPartitioningSetNParts(MatPartitioning part,PetscInt n)
253: {
255: part->n = n;
256: return(0);
257: }
259: /*@
260: MatPartitioningApplyND - Gets a nested dissection partitioning for a matrix.
262: Collective on Mat
264: Input Parameters:
265: . matp - the matrix partitioning object
267: Output Parameters:
268: . partitioning - the partitioning. For each local node, a positive value indicates the processor
269: number the node has been assigned to. Negative x values indicate the separator level -(x+1).
271: Level: beginner
273: The user can define additional partitionings; see MatPartitioningRegister().
275: .keywords: matrix, get, partitioning
277: .seealso: MatPartitioningRegister(), MatPartitioningCreate(),
278: MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
279: ISPartitioningCount()
280: @*/
281: PetscErrorCode MatPartitioningApplyND(MatPartitioning matp,IS *partitioning)
282: {
288: if (!matp->adj->assembled) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
289: if (matp->adj->factortype) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
290: if (!matp->ops->applynd) SETERRQ1(PetscObjectComm((PetscObject)matp),PETSC_ERR_SUP,"Nested dissection not provided by MatPartitioningType %s",((PetscObject)matp)->type_name);
291: PetscLogEventBegin(MAT_PartitioningND,matp,0,0,0);
292: (*matp->ops->applynd)(matp,partitioning);
293: PetscLogEventEnd(MAT_PartitioningND,matp,0,0,0);
295: MatPartitioningViewFromOptions(matp,NULL,"-mat_partitioning_view");
296: ISViewFromOptions(*partitioning,NULL,"-mat_partitioning_view");
297: return(0);
298: }
300: /*@
301: MatPartitioningApply - Gets a partitioning for a matrix.
303: Collective on Mat
305: Input Parameters:
306: . matp - the matrix partitioning object
308: Output Parameters:
309: . partitioning - the partitioning. For each local node this tells the processor
310: number that that node is assigned to.
312: Options Database Keys:
313: To specify the partitioning through the options database, use one of
314: the following
315: $ -mat_partitioning_type parmetis, -mat_partitioning current
316: To see the partitioning result
317: $ -mat_partitioning_view
319: Level: beginner
321: The user can define additional partitionings; see MatPartitioningRegister().
323: .keywords: matrix, get, partitioning
325: .seealso: MatPartitioningRegister(), MatPartitioningCreate(),
326: MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
327: ISPartitioningCount()
328: @*/
329: PetscErrorCode MatPartitioningApply(MatPartitioning matp,IS *partitioning)
330: {
336: if (!matp->adj->assembled) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
337: if (matp->adj->factortype) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
338: if (!matp->ops->apply) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Must set type with MatPartitioningSetFromOptions() or MatPartitioningSetType()");
339: PetscLogEventBegin(MAT_Partitioning,matp,0,0,0);
340: (*matp->ops->apply)(matp,partitioning);
341: PetscLogEventEnd(MAT_Partitioning,matp,0,0,0);
343: MatPartitioningViewFromOptions(matp,NULL,"-mat_partitioning_view");
344: ISViewFromOptions(*partitioning,NULL,"-mat_partitioning_view");
345: return(0);
346: }
348: /*@
349: MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
350: partitioned.
352: Collective on MatPartitioning and Mat
354: Input Parameters:
355: + part - the partitioning context
356: - adj - the adjacency matrix
358: Level: beginner
360: .keywords: Partitioning, adjacency
362: .seealso: MatPartitioningCreate()
363: @*/
364: PetscErrorCode MatPartitioningSetAdjacency(MatPartitioning part,Mat adj)
365: {
369: part->adj = adj;
370: return(0);
371: }
373: /*@
374: MatPartitioningDestroy - Destroys the partitioning context.
376: Collective on Partitioning
378: Input Parameters:
379: . part - the partitioning context
381: Level: beginner
383: .keywords: Partitioning, destroy, context
385: .seealso: MatPartitioningCreate()
386: @*/
387: PetscErrorCode MatPartitioningDestroy(MatPartitioning *part)
388: {
392: if (!*part) return(0);
394: if (--((PetscObject)(*part))->refct > 0) {*part = 0; return(0);}
396: if ((*part)->ops->destroy) {
397: (*(*part)->ops->destroy)((*part));
398: }
399: PetscFree((*part)->vertex_weights);
400: PetscFree((*part)->part_weights);
401: PetscHeaderDestroy(part);
402: return(0);
403: }
405: /*@C
406: MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.
408: Logically Collective on Partitioning
410: Input Parameters:
411: + part - the partitioning context
412: - weights - the weights, on each process this array must have the same size as the number of local rows
414: Level: beginner
416: Notes:
417: The array weights is freed by PETSc so the user should not free the array. In C/C++
418: the array must be obtained with a call to PetscMalloc(), not malloc().
420: .keywords: Partitioning, destroy, context
422: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetPartitionWeights()
423: @*/
424: PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning part,const PetscInt weights[])
425: {
431: PetscFree(part->vertex_weights);
433: part->vertex_weights = (PetscInt*)weights;
434: return(0);
435: }
437: /*@C
438: MatPartitioningSetPartitionWeights - Sets the weights for each partition.
440: Logically Collective on Partitioning
442: Input Parameters:
443: + part - the partitioning context
444: - weights - An array of size nparts that is used to specify the fraction of
445: vertex weight that should be distributed to each sub-domain for
446: the balance constraint. If all of the sub-domains are to be of
447: the same size, then each of the nparts elements should be set
448: to a value of 1/nparts. Note that the sum of all of the weights
449: should be one.
451: Level: beginner
453: Notes:
454: The array weights is freed by PETSc so the user should not free the array. In C/C++
455: the array must be obtained with a call to PetscMalloc(), not malloc().
457: .keywords: Partitioning, destroy, context
459: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetVertexWeights()
460: @*/
461: PetscErrorCode MatPartitioningSetPartitionWeights(MatPartitioning part,const PetscReal weights[])
462: {
468: PetscFree(part->part_weights);
470: part->part_weights = (PetscReal*)weights;
471: return(0);
472: }
474: /*@
475: MatPartitioningCreate - Creates a partitioning context.
477: Collective on MPI_Comm
479: Input Parameter:
480: . comm - MPI communicator
482: Output Parameter:
483: . newp - location to put the context
485: Level: beginner
487: .keywords: Partitioning, create, context
489: .seealso: MatPartitioningSetType(), MatPartitioningApply(), MatPartitioningDestroy(),
490: MatPartitioningSetAdjacency()
492: @*/
493: PetscErrorCode MatPartitioningCreate(MPI_Comm comm,MatPartitioning *newp)
494: {
495: MatPartitioning part;
496: PetscErrorCode ierr;
497: PetscMPIInt size;
500: *newp = 0;
502: MatInitializePackage();
503: PetscHeaderCreate(part,MAT_PARTITIONING_CLASSID,"MatPartitioning","Matrix/graph partitioning","MatOrderings",comm,MatPartitioningDestroy,MatPartitioningView);
504: part->vertex_weights = NULL;
505: part->part_weights = NULL;
507: MPI_Comm_size(comm,&size);
508: part->n = (PetscInt)size;
510: *newp = part;
511: return(0);
512: }
514: /*@C
515: MatPartitioningView - Prints the partitioning data structure.
517: Collective on MatPartitioning
519: Input Parameters:
520: . part - the partitioning context
521: . viewer - optional visualization context
523: Level: intermediate
525: Note:
526: The available visualization contexts include
527: + PETSC_VIEWER_STDOUT_SELF - standard output (default)
528: - PETSC_VIEWER_STDOUT_WORLD - synchronized standard
529: output where only the first processor opens
530: the file. All other processors send their
531: data to the first processor to print.
533: The user can open alternative visualization contexts with
534: . PetscViewerASCIIOpen() - output to a specified file
536: .keywords: Partitioning, view
538: .seealso: PetscViewerASCIIOpen()
539: @*/
540: PetscErrorCode MatPartitioningView(MatPartitioning part,PetscViewer viewer)
541: {
543: PetscBool iascii;
547: if (!viewer) {
548: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)part),&viewer);
549: }
553: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
554: if (iascii) {
555: PetscObjectPrintClassNamePrefixType((PetscObject)part,viewer);
556: if (part->vertex_weights) {
557: PetscViewerASCIIPrintf(viewer," Using vertex weights\n");
558: }
559: }
560: if (part->ops->view) {
561: PetscViewerASCIIPushTab(viewer);
562: (*part->ops->view)(part,viewer);
563: PetscViewerASCIIPopTab(viewer);
564: }
565: return(0);
566: }
568: /*@C
569: MatPartitioningSetType - Sets the type of partitioner to use
571: Collective on MatPartitioning
573: Input Parameter:
574: . part - the partitioning context.
575: . type - a known method
577: Options Database Command:
578: $ -mat_partitioning_type <type>
579: $ Use -help for a list of available methods
580: $ (for instance, parmetis)
582: Level: intermediate
584: .keywords: partitioning, set, method, type
586: .seealso: MatPartitioningCreate(), MatPartitioningApply(), MatPartitioningType
588: @*/
589: PetscErrorCode MatPartitioningSetType(MatPartitioning part,MatPartitioningType type)
590: {
591: PetscErrorCode ierr,(*r)(MatPartitioning);
592: PetscBool match;
598: PetscObjectTypeCompare((PetscObject)part,type,&match);
599: if (match) return(0);
601: if (part->ops->destroy) {
602: (*part->ops->destroy)(part);
604: part->ops->destroy = NULL;
605: part->data = 0;
606: part->setupcalled = 0;
607: }
609: PetscFunctionListFind(MatPartitioningList,type,&r);
610: if (!r) SETERRQ1(PetscObjectComm((PetscObject)part),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown partitioning type %s",type);
612: part->ops->destroy = (PetscErrorCode (*)(MatPartitioning)) 0;
613: part->ops->view = (PetscErrorCode (*)(MatPartitioning,PetscViewer)) 0;
615: (*r)(part);
617: PetscFree(((PetscObject)part)->type_name);
618: PetscStrallocpy(type,&((PetscObject)part)->type_name);
619: return(0);
620: }
622: /*@
623: MatPartitioningSetFromOptions - Sets various partitioning options from the
624: options database.
626: Collective on MatPartitioning
628: Input Parameter:
629: . part - the partitioning context.
631: Options Database Command:
632: $ -mat_partitioning_type <type>
633: $ Use -help for a list of available methods
634: $ (for instance, parmetis)
637: Notes:
638: If the partitioner has not been set by the user it uses one of the installed partitioner such as ParMetis. If there are
639: no installed partitioners it uses current which means no repartioning.
641: Level: beginner
643: .keywords: partitioning, set, method, type
644: @*/
645: PetscErrorCode MatPartitioningSetFromOptions(MatPartitioning part)
646: {
648: PetscBool flag;
649: char type[256];
650: const char *def;
653: PetscObjectOptionsBegin((PetscObject)part);
654: if (!((PetscObject)part)->type_name) {
655: #if defined(PETSC_HAVE_PARMETIS)
656: def = MATPARTITIONINGPARMETIS;
657: #elif defined(PETSC_HAVE_CHACO)
658: def = MATPARTITIONINGCHACO;
659: #elif defined(PETSC_HAVE_PARTY)
660: def = MATPARTITIONINGPARTY;
661: #elif defined(PETSC_HAVE_PTSCOTCH)
662: def = MATPARTITIONINGPTSCOTCH;
663: #else
664: def = MATPARTITIONINGCURRENT;
665: #endif
666: } else {
667: def = ((PetscObject)part)->type_name;
668: }
669: PetscOptionsFList("-mat_partitioning_type","Type of partitioner","MatPartitioningSetType",MatPartitioningList,def,type,256,&flag);
670: if (flag) {
671: MatPartitioningSetType(part,type);
672: }
673: /*
674: Set the type if it was never set.
675: */
676: if (!((PetscObject)part)->type_name) {
677: MatPartitioningSetType(part,def);
678: }
680: if (part->ops->setfromoptions) {
681: (*part->ops->setfromoptions)(PetscOptionsObject,part);
682: }
683: PetscOptionsEnd();
684: return(0);
685: }