Actual source code: partition.c
petsc-3.8.4 2018-03-24
2: #include <petsc/private/matimpl.h>
4: /* Logging support */
5: PetscClassId MAT_PARTITIONING_CLASSID;
7: /*
8: Simplest partitioning, keeps the current partitioning.
9: */
10: static PetscErrorCode MatPartitioningApply_Current(MatPartitioning part,IS *partitioning)
11: {
13: PetscInt m;
14: PetscMPIInt rank,size;
17: MPI_Comm_size(PetscObjectComm((PetscObject)part),&size);
18: if (part->n != size) {
19: const char *prefix;
20: PetscObjectGetOptionsPrefix((PetscObject)part,&prefix);
21: SETERRQ1(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"This is the DEFAULT NO-OP partitioner, it currently only supports one domain per processor\nuse -%smat_partitioning_type parmetis or chaco or ptscotch for more than one subdomain per processor",prefix ? prefix : "");
22: }
23: MPI_Comm_rank(PetscObjectComm((PetscObject)part),&rank);
25: MatGetLocalSize(part->adj,&m,NULL);
26: ISCreateStride(PetscObjectComm((PetscObject)part),m,rank,0,partitioning);
27: return(0);
28: }
30: /*
31: partition an index to rebalance the computation
32: */
33: static PetscErrorCode MatPartitioningApply_Average(MatPartitioning part,IS *partitioning)
34: {
36: PetscInt m,M,nparts,*indices,r,d,*parts,i,start,end,loc;
39: MatGetSize(part->adj,&M,NULL);
40: MatGetLocalSize(part->adj,&m,NULL);
41: nparts = part->n;
42: PetscCalloc1(nparts,&parts);
43: d = M/nparts;
44: for(i=0; i<nparts; i++){
45: parts[i] = d;
46: }
47: r = M%nparts;
48: for(i=0; i<r; i++){
49: parts[i] += 1;
50: }
51: for(i=1; i<nparts; i++){
52: parts[i] += parts[i-1];
53: }
54: PetscCalloc1(m,&indices);
55: MatGetOwnershipRange(part->adj,&start,&end);
56: for(i=start; i<end; i++){
57: PetscFindInt(i,nparts,parts,&loc);
58: if(loc<0) loc = -(loc+1);
59: else loc = loc+1;
60: indices[i-start] = loc;
61: }
62: PetscFree(parts);
63: ISCreateGeneral(PetscObjectComm((PetscObject)part),m,indices,PETSC_OWN_POINTER,partitioning);
64: return(0);
65: }
67: static PetscErrorCode MatPartitioningApply_Square(MatPartitioning part,IS *partitioning)
68: {
70: PetscInt cell,n,N,p,rstart,rend,*color;
71: PetscMPIInt size;
74: MPI_Comm_size(PetscObjectComm((PetscObject)part),&size);
75: if (part->n != size) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Currently only supports one domain per processor");
76: p = (PetscInt)PetscSqrtReal((PetscReal)part->n);
77: if (p*p != part->n) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Square partitioning requires \"perfect square\" number of domains");
79: MatGetSize(part->adj,&N,NULL);
80: n = (PetscInt)PetscSqrtReal((PetscReal)N);
81: if (n*n != N) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Square partitioning requires square domain");
82: if (n%p != 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Square partitioning requires p to divide n");
83: MatGetOwnershipRange(part->adj,&rstart,&rend);
84: PetscMalloc1(rend-rstart,&color);
85: /* for (int cell=rstart; cell<rend; cell++) { color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); } */
86: for (cell=rstart; cell<rend; cell++) {
87: color[cell-rstart] = ((cell%n) / (n/p)) + p * ((cell/n) / (n/p));
88: }
89: ISCreateGeneral(PetscObjectComm((PetscObject)part),rend-rstart,color,PETSC_OWN_POINTER,partitioning);
90: return(0);
91: }
93: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Current(MatPartitioning part)
94: {
96: part->ops->apply = MatPartitioningApply_Current;
97: part->ops->view = 0;
98: part->ops->destroy = 0;
99: return(0);
100: }
102: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Average(MatPartitioning part)
103: {
105: part->ops->apply = MatPartitioningApply_Average;
106: part->ops->view = 0;
107: part->ops->destroy = 0;
108: return(0);
109: }
111: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Square(MatPartitioning part)
112: {
114: part->ops->apply = MatPartitioningApply_Square;
115: part->ops->view = 0;
116: part->ops->destroy = 0;
117: return(0);
118: }
121: /* ===========================================================================================*/
123: PetscFunctionList MatPartitioningList = 0;
124: PetscBool MatPartitioningRegisterAllCalled = PETSC_FALSE;
127: /*@C
128: MatPartitioningRegister - Adds a new sparse matrix partitioning to the matrix package.
130: Not Collective
132: Input Parameters:
133: + sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
134: - function - function pointer that creates the partitioning type
136: Level: developer
138: Sample usage:
139: .vb
140: MatPartitioningRegister("my_part",MyPartCreate);
141: .ve
143: Then, your partitioner can be chosen with the procedural interface via
144: $ MatPartitioningSetType(part,"my_part")
145: or at runtime via the option
146: $ -mat_partitioning_type my_part
148: .keywords: matrix, partitioning, register
150: .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
151: @*/
152: PetscErrorCode MatPartitioningRegister(const char sname[],PetscErrorCode (*function)(MatPartitioning))
153: {
157: PetscFunctionListAdd(&MatPartitioningList,sname,function);
158: return(0);
159: }
161: /*@C
162: MatPartitioningGetType - Gets the Partitioning method type and name (as a string)
163: from the partitioning context.
165: Not collective
167: Input Parameter:
168: . partitioning - the partitioning context
170: Output Parameter:
171: . type - partitioner type
173: Level: intermediate
175: Not Collective
177: .keywords: Partitioning, get, method, name, type
178: @*/
179: PetscErrorCode MatPartitioningGetType(MatPartitioning partitioning,MatPartitioningType *type)
180: {
184: *type = ((PetscObject)partitioning)->type_name;
185: return(0);
186: }
188: /*@C
189: MatPartitioningSetNParts - Set how many partitions need to be created;
190: by default this is one per processor. Certain partitioning schemes may
191: in fact only support that option.
193: Not collective
195: Input Parameter:
196: . partitioning - the partitioning context
197: . n - the number of partitions
199: Level: intermediate
201: Not Collective
203: .keywords: Partitioning, set
205: .seealso: MatPartitioningCreate(), MatPartitioningApply()
206: @*/
207: PetscErrorCode MatPartitioningSetNParts(MatPartitioning part,PetscInt n)
208: {
210: part->n = n;
211: return(0);
212: }
214: /*@
215: MatPartitioningApply - Gets a partitioning for a matrix.
217: Collective on Mat
219: Input Parameters:
220: . matp - the matrix partitioning object
222: Output Parameters:
223: . partitioning - the partitioning. For each local node this tells the processor
224: number that that node is assigned to.
226: Options Database Keys:
227: To specify the partitioning through the options database, use one of
228: the following
229: $ -mat_partitioning_type parmetis, -mat_partitioning current
230: To see the partitioning result
231: $ -mat_partitioning_view
233: Level: beginner
235: The user can define additional partitionings; see MatPartitioningRegister().
237: .keywords: matrix, get, partitioning
239: .seealso: MatPartitioningRegister(), MatPartitioningCreate(),
240: MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
241: ISPartitioningCount()
242: @*/
243: PetscErrorCode MatPartitioningApply(MatPartitioning matp,IS *partitioning)
244: {
246: PetscBool flag = PETSC_FALSE;
251: if (!matp->adj->assembled) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
252: if (matp->adj->factortype) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
253: if (!matp->ops->apply) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Must set type with MatPartitioningSetFromOptions() or MatPartitioningSetType()");
254: PetscLogEventBegin(MAT_Partitioning,matp,0,0,0);
255: (*matp->ops->apply)(matp,partitioning);
256: PetscLogEventEnd(MAT_Partitioning,matp,0,0,0);
258: PetscOptionsGetBool(((PetscObject)matp)->options,NULL,"-mat_partitioning_view",&flag,NULL);
259: if (flag) {
260: PetscViewer viewer;
261: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)matp),&viewer);
262: MatPartitioningView(matp,viewer);
263: ISView(*partitioning,viewer);
264: }
265: return(0);
266: }
268: /*@
269: MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
270: partitioned.
272: Collective on MatPartitioning and Mat
274: Input Parameters:
275: + part - the partitioning context
276: - adj - the adjacency matrix
278: Level: beginner
280: .keywords: Partitioning, adjacency
282: .seealso: MatPartitioningCreate()
283: @*/
284: PetscErrorCode MatPartitioningSetAdjacency(MatPartitioning part,Mat adj)
285: {
289: part->adj = adj;
290: return(0);
291: }
293: /*@
294: MatPartitioningDestroy - Destroys the partitioning context.
296: Collective on Partitioning
298: Input Parameters:
299: . part - the partitioning context
301: Level: beginner
303: .keywords: Partitioning, destroy, context
305: .seealso: MatPartitioningCreate()
306: @*/
307: PetscErrorCode MatPartitioningDestroy(MatPartitioning *part)
308: {
312: if (!*part) return(0);
314: if (--((PetscObject)(*part))->refct > 0) {*part = 0; return(0);}
316: if ((*part)->ops->destroy) {
317: (*(*part)->ops->destroy)((*part));
318: }
319: PetscFree((*part)->vertex_weights);
320: PetscFree((*part)->part_weights);
321: PetscHeaderDestroy(part);
322: return(0);
323: }
325: /*@C
326: MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.
328: Logically Collective on Partitioning
330: Input Parameters:
331: + part - the partitioning context
332: - weights - the weights, on each process this array must have the same size as the number of local rows
334: Level: beginner
336: Notes:
337: The array weights is freed by PETSc so the user should not free the array. In C/C++
338: the array must be obtained with a call to PetscMalloc(), not malloc().
340: .keywords: Partitioning, destroy, context
342: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetPartitionWeights()
343: @*/
344: PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning part,const PetscInt weights[])
345: {
351: PetscFree(part->vertex_weights);
353: part->vertex_weights = (PetscInt*)weights;
354: return(0);
355: }
357: /*@C
358: MatPartitioningSetPartitionWeights - Sets the weights for each partition.
360: Logically Collective on Partitioning
362: Input Parameters:
363: + part - the partitioning context
364: - weights - An array of size nparts that is used to specify the fraction of
365: vertex weight that should be distributed to each sub-domain for
366: the balance constraint. If all of the sub-domains are to be of
367: the same size, then each of the nparts elements should be set
368: to a value of 1/nparts. Note that the sum of all of the weights
369: should be one.
371: Level: beginner
373: Notes:
374: The array weights is freed by PETSc so the user should not free the array. In C/C++
375: the array must be obtained with a call to PetscMalloc(), not malloc().
377: .keywords: Partitioning, destroy, context
379: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetVertexWeights()
380: @*/
381: PetscErrorCode MatPartitioningSetPartitionWeights(MatPartitioning part,const PetscReal weights[])
382: {
388: PetscFree(part->part_weights);
390: part->part_weights = (PetscReal*)weights;
391: return(0);
392: }
394: /*@
395: MatPartitioningCreate - Creates a partitioning context.
397: Collective on MPI_Comm
399: Input Parameter:
400: . comm - MPI communicator
402: Output Parameter:
403: . newp - location to put the context
405: Level: beginner
407: .keywords: Partitioning, create, context
409: .seealso: MatPartitioningSetType(), MatPartitioningApply(), MatPartitioningDestroy(),
410: MatPartitioningSetAdjacency()
412: @*/
413: PetscErrorCode MatPartitioningCreate(MPI_Comm comm,MatPartitioning *newp)
414: {
415: MatPartitioning part;
416: PetscErrorCode ierr;
417: PetscMPIInt size;
420: *newp = 0;
422: MatInitializePackage();
423: PetscHeaderCreate(part,MAT_PARTITIONING_CLASSID,"MatPartitioning","Matrix/graph partitioning","MatOrderings",comm,MatPartitioningDestroy,MatPartitioningView);
424: part->vertex_weights = NULL;
425: part->part_weights = NULL;
427: MPI_Comm_size(comm,&size);
428: part->n = (PetscInt)size;
430: *newp = part;
431: return(0);
432: }
434: /*@C
435: MatPartitioningView - Prints the partitioning data structure.
437: Collective on MatPartitioning
439: Input Parameters:
440: . part - the partitioning context
441: . viewer - optional visualization context
443: Level: intermediate
445: Note:
446: The available visualization contexts include
447: + PETSC_VIEWER_STDOUT_SELF - standard output (default)
448: - PETSC_VIEWER_STDOUT_WORLD - synchronized standard
449: output where only the first processor opens
450: the file. All other processors send their
451: data to the first processor to print.
453: The user can open alternative visualization contexts with
454: . PetscViewerASCIIOpen() - output to a specified file
456: .keywords: Partitioning, view
458: .seealso: PetscViewerASCIIOpen()
459: @*/
460: PetscErrorCode MatPartitioningView(MatPartitioning part,PetscViewer viewer)
461: {
463: PetscBool iascii;
467: if (!viewer) {
468: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)part),&viewer);
469: }
473: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
474: if (iascii) {
475: PetscObjectPrintClassNamePrefixType((PetscObject)part,viewer);
476: if (part->vertex_weights) {
477: PetscViewerASCIIPrintf(viewer," Using vertex weights\n");
478: }
479: }
480: if (part->ops->view) {
481: PetscViewerASCIIPushTab(viewer);
482: (*part->ops->view)(part,viewer);
483: PetscViewerASCIIPopTab(viewer);
484: }
485: return(0);
486: }
488: /*@C
489: MatPartitioningSetType - Sets the type of partitioner to use
491: Collective on MatPartitioning
493: Input Parameter:
494: . part - the partitioning context.
495: . type - a known method
497: Options Database Command:
498: $ -mat_partitioning_type <type>
499: $ Use -help for a list of available methods
500: $ (for instance, parmetis)
502: Level: intermediate
504: .keywords: partitioning, set, method, type
506: .seealso: MatPartitioningCreate(), MatPartitioningApply(), MatPartitioningType
508: @*/
509: PetscErrorCode MatPartitioningSetType(MatPartitioning part,MatPartitioningType type)
510: {
511: PetscErrorCode ierr,(*r)(MatPartitioning);
512: PetscBool match;
518: PetscObjectTypeCompare((PetscObject)part,type,&match);
519: if (match) return(0);
521: if (part->setupcalled) {
522: (*part->ops->destroy)(part);
524: part->ops->destroy = NULL;
525: part->data = 0;
526: part->setupcalled = 0;
527: }
529: PetscFunctionListFind(MatPartitioningList,type,&r);
530: if (!r) SETERRQ1(PetscObjectComm((PetscObject)part),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown partitioning type %s",type);
532: part->ops->destroy = (PetscErrorCode (*)(MatPartitioning)) 0;
533: part->ops->view = (PetscErrorCode (*)(MatPartitioning,PetscViewer)) 0;
535: (*r)(part);
537: PetscFree(((PetscObject)part)->type_name);
538: PetscStrallocpy(type,&((PetscObject)part)->type_name);
539: return(0);
540: }
542: /*@
543: MatPartitioningSetFromOptions - Sets various partitioning options from the
544: options database.
546: Collective on MatPartitioning
548: Input Parameter:
549: . part - the partitioning context.
551: Options Database Command:
552: $ -mat_partitioning_type <type>
553: $ Use -help for a list of available methods
554: $ (for instance, parmetis)
557: Notes: If the partitioner has not been set by the user it uses one of the installed partitioner such as ParMetis. If there are
558: no installed partitioners it uses current which means no repartioning.
560: Level: beginner
562: .keywords: partitioning, set, method, type
563: @*/
564: PetscErrorCode MatPartitioningSetFromOptions(MatPartitioning part)
565: {
567: PetscBool flag;
568: char type[256];
569: const char *def;
572: PetscObjectOptionsBegin((PetscObject)part);
573: if (!((PetscObject)part)->type_name) {
574: #if defined(PETSC_HAVE_PARMETIS)
575: def = MATPARTITIONINGPARMETIS;
576: #elif defined(PETSC_HAVE_CHACO)
577: def = MATPARTITIONINGCHACO;
578: #elif defined(PETSC_HAVE_PARTY)
579: def = MATPARTITIONINGPARTY;
580: #elif defined(PETSC_HAVE_PTSCOTCH)
581: def = MATPARTITIONINGPTSCOTCH;
582: #else
583: def = MATPARTITIONINGCURRENT;
584: #endif
585: } else {
586: def = ((PetscObject)part)->type_name;
587: }
588: PetscOptionsFList("-mat_partitioning_type","Type of partitioner","MatPartitioningSetType",MatPartitioningList,def,type,256,&flag);
589: if (flag) {
590: MatPartitioningSetType(part,type);
591: }
592: /*
593: Set the type if it was never set.
594: */
595: if (!((PetscObject)part)->type_name) {
596: MatPartitioningSetType(part,def);
597: }
599: if (part->ops->setfromoptions) {
600: (*part->ops->setfromoptions)(PetscOptionsObject,part);
601: }
602: PetscOptionsEnd();
603: return(0);
604: }