Actual source code: partition.c
petsc-3.7.3 2016-08-01
2: #include <petsc/private/matimpl.h> /*I "petscmat.h" I*/
4: /* Logging support */
5: PetscClassId MAT_PARTITIONING_CLASSID;
7: /*
8: Simplest partitioning, keeps the current partitioning.
9: */
12: static PetscErrorCode MatPartitioningApply_Current(MatPartitioning part,IS *partitioning)
13: {
15: PetscInt m;
16: PetscMPIInt rank,size;
19: MPI_Comm_size(PetscObjectComm((PetscObject)part),&size);
20: if (part->n != size) {
21: const char *prefix;
22: PetscObjectGetOptionsPrefix((PetscObject)part,&prefix);
23: SETERRQ1(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"This is the DEFAULT NO-OP partitioner, it currently only supports one domain per processor\nuse -%smat_partitioning_type parmetis or chaco or ptscotch for more than one subdomain per processor",prefix ? prefix : "");
24: }
25: MPI_Comm_rank(PetscObjectComm((PetscObject)part),&rank);
27: MatGetLocalSize(part->adj,&m,NULL);
28: ISCreateStride(PetscObjectComm((PetscObject)part),m,rank,0,partitioning);
29: return(0);
30: }
32: /*
33: partition an index to rebalance the computation
34: */
37: static PetscErrorCode MatPartitioningApply_Average(MatPartitioning part,IS *partitioning)
38: {
40: PetscInt m,M,nparts,*indices,r,d,*parts,i,start,end,loc;
43: MatGetSize(part->adj,&M,NULL);
44: MatGetLocalSize(part->adj,&m,NULL);
45: nparts = part->n;
46: PetscCalloc1(nparts,&parts);
47: d = M/nparts;
48: for(i=0; i<nparts; i++){
49: parts[i] = d;
50: }
51: r = M%nparts;
52: for(i=0; i<r; i++){
53: parts[i] += 1;
54: }
55: for(i=1; i<nparts; i++){
56: parts[i] += parts[i-1];
57: }
58: PetscCalloc1(m,&indices);
59: MatGetOwnershipRange(part->adj,&start,&end);
60: for(i=start; i<end; i++){
61: PetscFindInt(i,nparts,parts,&loc);
62: if(loc<0) loc = -(loc+1);
63: else loc = loc+1;
64: indices[i-start] = loc;
65: }
66: PetscFree(parts);
67: ISCreateGeneral(PetscObjectComm((PetscObject)part),m,indices,PETSC_OWN_POINTER,partitioning);
68: return(0);
69: }
73: static PetscErrorCode MatPartitioningApply_Square(MatPartitioning part,IS *partitioning)
74: {
76: PetscInt cell,n,N,p,rstart,rend,*color;
77: PetscMPIInt size;
80: MPI_Comm_size(PetscObjectComm((PetscObject)part),&size);
81: if (part->n != size) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Currently only supports one domain per processor");
82: p = (PetscInt)PetscSqrtReal((PetscReal)part->n);
83: if (p*p != part->n) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Square partitioning requires \"perfect square\" number of domains");
85: MatGetSize(part->adj,&N,NULL);
86: n = (PetscInt)PetscSqrtReal((PetscReal)N);
87: if (n*n != N) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Square partitioning requires square domain");
88: if (n%p != 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Square partitioning requires p to divide n");
89: MatGetOwnershipRange(part->adj,&rstart,&rend);
90: PetscMalloc1(rend-rstart,&color);
91: /* for (int cell=rstart; cell<rend; cell++) { color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); } */
92: for (cell=rstart; cell<rend; cell++) {
93: color[cell-rstart] = ((cell%n) / (n/p)) + p * ((cell/n) / (n/p));
94: }
95: ISCreateGeneral(PetscObjectComm((PetscObject)part),rend-rstart,color,PETSC_OWN_POINTER,partitioning);
96: return(0);
97: }
101: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Current(MatPartitioning part)
102: {
104: part->ops->apply = MatPartitioningApply_Current;
105: part->ops->view = 0;
106: part->ops->destroy = 0;
107: return(0);
108: }
112: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Average(MatPartitioning part)
113: {
115: part->ops->apply = MatPartitioningApply_Average;
116: part->ops->view = 0;
117: part->ops->destroy = 0;
118: return(0);
119: }
123: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Square(MatPartitioning part)
124: {
126: part->ops->apply = MatPartitioningApply_Square;
127: part->ops->view = 0;
128: part->ops->destroy = 0;
129: return(0);
130: }
133: /* ===========================================================================================*/
135: PetscFunctionList MatPartitioningList = 0;
136: PetscBool MatPartitioningRegisterAllCalled = PETSC_FALSE;
141: /*@C
142: MatPartitioningRegister - Adds a new sparse matrix partitioning to the matrix package.
144: Not Collective
146: Input Parameters:
147: + sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
148: - function - function pointer that creates the partitioning type
150: Level: developer
152: Sample usage:
153: .vb
154: MatPartitioningRegister("my_part",MyPartCreate);
155: .ve
157: Then, your partitioner can be chosen with the procedural interface via
158: $ MatPartitioningSetType(part,"my_part")
159: or at runtime via the option
160: $ -mat_partitioning_type my_part
162: .keywords: matrix, partitioning, register
164: .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
165: @*/
166: PetscErrorCode MatPartitioningRegister(const char sname[],PetscErrorCode (*function)(MatPartitioning))
167: {
171: PetscFunctionListAdd(&MatPartitioningList,sname,function);
172: return(0);
173: }
177: /*@C
178: MatPartitioningGetType - Gets the Partitioning method type and name (as a string)
179: from the partitioning context.
181: Not collective
183: Input Parameter:
184: . partitioning - the partitioning context
186: Output Parameter:
187: . type - partitioner type
189: Level: intermediate
191: Not Collective
193: .keywords: Partitioning, get, method, name, type
194: @*/
195: PetscErrorCode MatPartitioningGetType(MatPartitioning partitioning,MatPartitioningType *type)
196: {
200: *type = ((PetscObject)partitioning)->type_name;
201: return(0);
202: }
206: /*@C
207: MatPartitioningSetNParts - Set how many partitions need to be created;
208: by default this is one per processor. Certain partitioning schemes may
209: in fact only support that option.
211: Not collective
213: Input Parameter:
214: . partitioning - the partitioning context
215: . n - the number of partitions
217: Level: intermediate
219: Not Collective
221: .keywords: Partitioning, set
223: .seealso: MatPartitioningCreate(), MatPartitioningApply()
224: @*/
225: PetscErrorCode MatPartitioningSetNParts(MatPartitioning part,PetscInt n)
226: {
228: part->n = n;
229: return(0);
230: }
234: /*@
235: MatPartitioningApply - Gets a partitioning for a matrix.
237: Collective on Mat
239: Input Parameters:
240: . matp - the matrix partitioning object
242: Output Parameters:
243: . partitioning - the partitioning. For each local node this tells the processor
244: number that that node is assigned to.
246: Options Database Keys:
247: To specify the partitioning through the options database, use one of
248: the following
249: $ -mat_partitioning_type parmetis, -mat_partitioning current
250: To see the partitioning result
251: $ -mat_partitioning_view
253: Level: beginner
255: The user can define additional partitionings; see MatPartitioningRegister().
257: .keywords: matrix, get, partitioning
259: .seealso: MatPartitioningRegister(), MatPartitioningCreate(),
260: MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
261: ISPartitioningCount()
262: @*/
263: PetscErrorCode MatPartitioningApply(MatPartitioning matp,IS *partitioning)
264: {
266: PetscBool flag = PETSC_FALSE;
271: if (!matp->adj->assembled) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
272: if (matp->adj->factortype) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
273: if (!matp->ops->apply) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Must set type with MatPartitioningSetFromOptions() or MatPartitioningSetType()");
274: PetscLogEventBegin(MAT_Partitioning,matp,0,0,0);
275: (*matp->ops->apply)(matp,partitioning);
276: PetscLogEventEnd(MAT_Partitioning,matp,0,0,0);
278: PetscOptionsGetBool(((PetscObject)matp)->options,NULL,"-mat_partitioning_view",&flag,NULL);
279: if (flag) {
280: PetscViewer viewer;
281: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)matp),&viewer);
282: MatPartitioningView(matp,viewer);
283: ISView(*partitioning,viewer);
284: }
285: return(0);
286: }
290: /*@
291: MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
292: partitioned.
294: Collective on MatPartitioning and Mat
296: Input Parameters:
297: + part - the partitioning context
298: - adj - the adjacency matrix
300: Level: beginner
302: .keywords: Partitioning, adjacency
304: .seealso: MatPartitioningCreate()
305: @*/
306: PetscErrorCode MatPartitioningSetAdjacency(MatPartitioning part,Mat adj)
307: {
311: part->adj = adj;
312: return(0);
313: }
317: /*@
318: MatPartitioningDestroy - Destroys the partitioning context.
320: Collective on Partitioning
322: Input Parameters:
323: . part - the partitioning context
325: Level: beginner
327: .keywords: Partitioning, destroy, context
329: .seealso: MatPartitioningCreate()
330: @*/
331: PetscErrorCode MatPartitioningDestroy(MatPartitioning *part)
332: {
336: if (!*part) return(0);
338: if (--((PetscObject)(*part))->refct > 0) {*part = 0; return(0);}
340: if ((*part)->ops->destroy) {
341: (*(*part)->ops->destroy)((*part));
342: }
343: PetscFree((*part)->vertex_weights);
344: PetscFree((*part)->part_weights);
345: PetscHeaderDestroy(part);
346: return(0);
347: }
351: /*@C
352: MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.
354: Logically Collective on Partitioning
356: Input Parameters:
357: + part - the partitioning context
358: - weights - the weights, on each process this array must have the same size as the number of local rows
360: Level: beginner
362: Notes:
363: The array weights is freed by PETSc so the user should not free the array. In C/C++
364: the array must be obtained with a call to PetscMalloc(), not malloc().
366: .keywords: Partitioning, destroy, context
368: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetPartitionWeights()
369: @*/
370: PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning part,const PetscInt weights[])
371: {
377: PetscFree(part->vertex_weights);
379: part->vertex_weights = (PetscInt*)weights;
380: return(0);
381: }
385: /*@C
386: MatPartitioningSetPartitionWeights - Sets the weights for each partition.
388: Logically Collective on Partitioning
390: Input Parameters:
391: + part - the partitioning context
392: - weights - An array of size nparts that is used to specify the fraction of
393: vertex weight that should be distributed to each sub-domain for
394: the balance constraint. If all of the sub-domains are to be of
395: the same size, then each of the nparts elements should be set
396: to a value of 1/nparts. Note that the sum of all of the weights
397: should be one.
399: Level: beginner
401: Notes:
402: The array weights is freed by PETSc so the user should not free the array. In C/C++
403: the array must be obtained with a call to PetscMalloc(), not malloc().
405: .keywords: Partitioning, destroy, context
407: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetVertexWeights()
408: @*/
409: PetscErrorCode MatPartitioningSetPartitionWeights(MatPartitioning part,const PetscReal weights[])
410: {
416: PetscFree(part->part_weights);
418: part->part_weights = (PetscReal*)weights;
419: return(0);
420: }
424: /*@
425: MatPartitioningCreate - Creates a partitioning context.
427: Collective on MPI_Comm
429: Input Parameter:
430: . comm - MPI communicator
432: Output Parameter:
433: . newp - location to put the context
435: Level: beginner
437: .keywords: Partitioning, create, context
439: .seealso: MatPartitioningSetType(), MatPartitioningApply(), MatPartitioningDestroy(),
440: MatPartitioningSetAdjacency()
442: @*/
443: PetscErrorCode MatPartitioningCreate(MPI_Comm comm,MatPartitioning *newp)
444: {
445: MatPartitioning part;
446: PetscErrorCode ierr;
447: PetscMPIInt size;
450: *newp = 0;
452: MatInitializePackage();
453: PetscHeaderCreate(part,MAT_PARTITIONING_CLASSID,"MatPartitioning","Matrix/graph partitioning","MatOrderings",comm,MatPartitioningDestroy,MatPartitioningView);
454: part->vertex_weights = NULL;
455: part->part_weights = NULL;
457: MPI_Comm_size(comm,&size);
458: part->n = (PetscInt)size;
460: *newp = part;
461: return(0);
462: }
466: /*@C
467: MatPartitioningView - Prints the partitioning data structure.
469: Collective on MatPartitioning
471: Input Parameters:
472: . part - the partitioning context
473: . viewer - optional visualization context
475: Level: intermediate
477: Note:
478: The available visualization contexts include
479: + PETSC_VIEWER_STDOUT_SELF - standard output (default)
480: - PETSC_VIEWER_STDOUT_WORLD - synchronized standard
481: output where only the first processor opens
482: the file. All other processors send their
483: data to the first processor to print.
485: The user can open alternative visualization contexts with
486: . PetscViewerASCIIOpen() - output to a specified file
488: .keywords: Partitioning, view
490: .seealso: PetscViewerASCIIOpen()
491: @*/
492: PetscErrorCode MatPartitioningView(MatPartitioning part,PetscViewer viewer)
493: {
495: PetscBool iascii;
499: if (!viewer) {
500: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)part),&viewer);
501: }
505: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
506: if (iascii) {
507: PetscObjectPrintClassNamePrefixType((PetscObject)part,viewer);
508: if (part->vertex_weights) {
509: PetscViewerASCIIPrintf(viewer," Using vertex weights\n");
510: }
511: }
512: if (part->ops->view) {
513: PetscViewerASCIIPushTab(viewer);
514: (*part->ops->view)(part,viewer);
515: PetscViewerASCIIPopTab(viewer);
516: }
517: return(0);
518: }
522: /*@C
523: MatPartitioningSetType - Sets the type of partitioner to use
525: Collective on MatPartitioning
527: Input Parameter:
528: . part - the partitioning context.
529: . type - a known method
531: Options Database Command:
532: $ -mat_partitioning_type <type>
533: $ Use -help for a list of available methods
534: $ (for instance, parmetis)
536: Level: intermediate
538: .keywords: partitioning, set, method, type
540: .seealso: MatPartitioningCreate(), MatPartitioningApply(), MatPartitioningType
542: @*/
543: PetscErrorCode MatPartitioningSetType(MatPartitioning part,MatPartitioningType type)
544: {
545: PetscErrorCode ierr,(*r)(MatPartitioning);
546: PetscBool match;
552: PetscObjectTypeCompare((PetscObject)part,type,&match);
553: if (match) return(0);
555: if (part->setupcalled) {
556: (*part->ops->destroy)(part);
558: part->ops->destroy = NULL;
559: part->data = 0;
560: part->setupcalled = 0;
561: }
563: PetscFunctionListFind(MatPartitioningList,type,&r);
564: if (!r) SETERRQ1(PetscObjectComm((PetscObject)part),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown partitioning type %s",type);
566: part->ops->destroy = (PetscErrorCode (*)(MatPartitioning)) 0;
567: part->ops->view = (PetscErrorCode (*)(MatPartitioning,PetscViewer)) 0;
569: (*r)(part);
571: PetscFree(((PetscObject)part)->type_name);
572: PetscStrallocpy(type,&((PetscObject)part)->type_name);
573: return(0);
574: }
578: /*@
579: MatPartitioningSetFromOptions - Sets various partitioning options from the
580: options database.
582: Collective on MatPartitioning
584: Input Parameter:
585: . part - the partitioning context.
587: Options Database Command:
588: $ -mat_partitioning_type <type>
589: $ Use -help for a list of available methods
590: $ (for instance, parmetis)
592: Level: beginner
594: .keywords: partitioning, set, method, type
595: @*/
596: PetscErrorCode MatPartitioningSetFromOptions(MatPartitioning part)
597: {
599: PetscBool flag;
600: char type[256];
601: const char *def;
604: PetscObjectOptionsBegin((PetscObject)part);
605: if (!((PetscObject)part)->type_name) {
606: #if defined(PETSC_HAVE_PARMETIS)
607: def = MATPARTITIONINGPARMETIS;
608: #else
609: def = MATPARTITIONINGCURRENT;
610: #endif
611: } else {
612: def = ((PetscObject)part)->type_name;
613: }
614: PetscOptionsFList("-mat_partitioning_type","Type of partitioner","MatPartitioningSetType",MatPartitioningList,def,type,256,&flag);
615: if (flag) {
616: MatPartitioningSetType(part,type);
617: }
618: /*
619: Set the type if it was never set.
620: */
621: if (!((PetscObject)part)->type_name) {
622: MatPartitioningSetType(part,def);
623: }
625: if (part->ops->setfromoptions) {
626: (*part->ops->setfromoptions)(PetscOptionsObject,part);
627: }
628: PetscOptionsEnd();
629: return(0);
630: }