Actual source code: partition.c
petsc-3.13.6 2020-09-29
2: #include <petsc/private/matimpl.h>
4: /* Logging support */
5: PetscClassId MAT_PARTITIONING_CLASSID;
7: /*
8: Simplest partitioning, keeps the current partitioning.
9: */
10: static PetscErrorCode MatPartitioningApply_Current(MatPartitioning part,IS *partitioning)
11: {
13: PetscInt m;
14: PetscMPIInt rank,size;
17: MPI_Comm_size(PetscObjectComm((PetscObject)part),&size);
18: if (part->n != size) {
19: const char *prefix;
20: PetscObjectGetOptionsPrefix((PetscObject)part,&prefix);
21: SETERRQ1(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"This is the DEFAULT NO-OP partitioner, it currently only supports one domain per processor\nuse -%smat_partitioning_type parmetis or chaco or ptscotch for more than one subdomain per processor",prefix ? prefix : "");
22: }
23: MPI_Comm_rank(PetscObjectComm((PetscObject)part),&rank);
25: MatGetLocalSize(part->adj,&m,NULL);
26: ISCreateStride(PetscObjectComm((PetscObject)part),m,rank,0,partitioning);
27: return(0);
28: }
30: /*
31: partition an index to rebalance the computation
32: */
33: static PetscErrorCode MatPartitioningApply_Average(MatPartitioning part,IS *partitioning)
34: {
36: PetscInt m,M,nparts,*indices,r,d,*parts,i,start,end,loc;
39: MatGetSize(part->adj,&M,NULL);
40: MatGetLocalSize(part->adj,&m,NULL);
41: nparts = part->n;
42: PetscMalloc1(nparts,&parts);
43: d = M/nparts;
44: for (i=0; i<nparts; i++) parts[i] = d;
45: r = M%nparts;
46: for (i=0; i<r; i++) parts[i] += 1;
47: for (i=1; i<nparts; i++) parts[i] += parts[i-1];
48: PetscMalloc1(m,&indices);
49: MatGetOwnershipRange(part->adj,&start,&end);
50: for (i=start; i<end; i++) {
51: PetscFindInt(i,nparts,parts,&loc);
52: if (loc<0) loc = -(loc+1);
53: else loc = loc+1;
54: indices[i-start] = loc;
55: }
56: PetscFree(parts);
57: ISCreateGeneral(PetscObjectComm((PetscObject)part),m,indices,PETSC_OWN_POINTER,partitioning);
58: return(0);
59: }
61: static PetscErrorCode MatPartitioningApply_Square(MatPartitioning part,IS *partitioning)
62: {
64: PetscInt cell,n,N,p,rstart,rend,*color;
65: PetscMPIInt size;
68: MPI_Comm_size(PetscObjectComm((PetscObject)part),&size);
69: if (part->n != size) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Currently only supports one domain per processor");
70: p = (PetscInt)PetscSqrtReal((PetscReal)part->n);
71: if (p*p != part->n) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Square partitioning requires \"perfect square\" number of domains");
73: MatGetSize(part->adj,&N,NULL);
74: n = (PetscInt)PetscSqrtReal((PetscReal)N);
75: if (n*n != N) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Square partitioning requires square domain");
76: if (n%p != 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Square partitioning requires p to divide n");
77: MatGetOwnershipRange(part->adj,&rstart,&rend);
78: PetscMalloc1(rend-rstart,&color);
79: /* for (int cell=rstart; cell<rend; cell++) { color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); } */
80: for (cell=rstart; cell<rend; cell++) {
81: color[cell-rstart] = ((cell%n) / (n/p)) + p * ((cell/n) / (n/p));
82: }
83: ISCreateGeneral(PetscObjectComm((PetscObject)part),rend-rstart,color,PETSC_OWN_POINTER,partitioning);
84: return(0);
85: }
87: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Current(MatPartitioning part)
88: {
90: part->ops->apply = MatPartitioningApply_Current;
91: part->ops->view = 0;
92: part->ops->destroy = 0;
93: return(0);
94: }
96: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Average(MatPartitioning part)
97: {
99: part->ops->apply = MatPartitioningApply_Average;
100: part->ops->view = 0;
101: part->ops->destroy = 0;
102: return(0);
103: }
105: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Square(MatPartitioning part)
106: {
108: part->ops->apply = MatPartitioningApply_Square;
109: part->ops->view = 0;
110: part->ops->destroy = 0;
111: return(0);
112: }
115: /* gets as input the "sizes" array computed by ParMetis_*_NodeND and returns
116: seps[ 0 : 2*p) : the start and end node of each subdomain
117: seps[2*p : 2*p+2*(p-1)) : the start and end node of each separator
118: levels[ 0 : p-1) : level in the tree for each separator (-1 root, -2 and -3 first level and so on)
119: The arrays must be large enough
120: */
121: PETSC_INTERN PetscErrorCode MatPartitioningSizesToSep_Private(PetscInt p, PetscInt sizes[], PetscInt seps[], PetscInt level[])
122: {
123: PetscInt l2p,i,pTree,pStartTree;
127: l2p = PetscLog2Real(p);
128: if (l2p - (PetscInt)PetscLog2Real(p)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"%D is not a power of 2",p);
129: if (!p) return(0);
130: PetscArrayzero(seps,2*p-2);
131: PetscArrayzero(level,p-1);
132: seps[2*p-2] = sizes[2*p-2];
133: pTree = p;
134: pStartTree = 0;
135: while (pTree != 1) {
136: for (i = pStartTree; i < pStartTree + pTree; i++) {
137: seps[i] += sizes[i];
138: seps[pStartTree + pTree + (i-pStartTree)/2] += seps[i];
139: }
140: pStartTree += pTree;
141: pTree = pTree/2;
142: }
143: seps[2*p-2] -= sizes[2*p-2];
145: pStartTree = 2*p-2;
146: pTree = 1;
147: while (pStartTree > 0) {
148: for (i = pStartTree; i < pStartTree + pTree; i++) {
149: PetscInt k = 2*i - (pStartTree +2*pTree);
150: PetscInt n = seps[k+1];
152: seps[k+1] = seps[i] - sizes[k+1];
153: seps[k] = seps[k+1] + sizes[k+1] - n - sizes[k];
154: level[i-p] = -pTree - i + pStartTree;
155: }
156: pTree *= 2;
157: pStartTree -= pTree;
158: }
159: /* I know there should be a formula */
160: PetscSortIntWithArrayPair(p-1,seps+p,sizes+p,level);
161: for (i=2*p-2;i>=0;i--) { seps[2*i] = seps[i]; seps[2*i+1] = seps[i] + PetscMax(sizes[i] - 1,0); }
162: return(0);
163: }
165: /* ===========================================================================================*/
167: PetscFunctionList MatPartitioningList = 0;
168: PetscBool MatPartitioningRegisterAllCalled = PETSC_FALSE;
171: /*@C
172: MatPartitioningRegister - Adds a new sparse matrix partitioning to the matrix package.
174: Not Collective
176: Input Parameters:
177: + sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
178: - function - function pointer that creates the partitioning type
180: Level: developer
182: Sample usage:
183: .vb
184: MatPartitioningRegister("my_part",MyPartCreate);
185: .ve
187: Then, your partitioner can be chosen with the procedural interface via
188: $ MatPartitioningSetType(part,"my_part")
189: or at runtime via the option
190: $ -mat_partitioning_type my_part
192: .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
193: @*/
194: PetscErrorCode MatPartitioningRegister(const char sname[],PetscErrorCode (*function)(MatPartitioning))
195: {
199: MatInitializePackage();
200: PetscFunctionListAdd(&MatPartitioningList,sname,function);
201: return(0);
202: }
204: /*@C
205: MatPartitioningGetType - Gets the Partitioning method type and name (as a string)
206: from the partitioning context.
208: Not collective
210: Input Parameter:
211: . partitioning - the partitioning context
213: Output Parameter:
214: . type - partitioner type
216: Level: intermediate
218: Not Collective
220: @*/
221: PetscErrorCode MatPartitioningGetType(MatPartitioning partitioning,MatPartitioningType *type)
222: {
226: *type = ((PetscObject)partitioning)->type_name;
227: return(0);
228: }
230: /*@C
231: MatPartitioningSetNParts - Set how many partitions need to be created;
232: by default this is one per processor. Certain partitioning schemes may
233: in fact only support that option.
235: Not collective
237: Input Parameter:
238: + partitioning - the partitioning context
239: - n - the number of partitions
241: Level: intermediate
243: Not Collective
245: .seealso: MatPartitioningCreate(), MatPartitioningApply()
246: @*/
247: PetscErrorCode MatPartitioningSetNParts(MatPartitioning part,PetscInt n)
248: {
250: part->n = n;
251: return(0);
252: }
254: /*@
255: MatPartitioningApplyND - Gets a nested dissection partitioning for a matrix.
257: Collective on Mat
259: Input Parameters:
260: . matp - the matrix partitioning object
262: Output Parameters:
263: . partitioning - the partitioning. For each local node, a positive value indicates the processor
264: number the node has been assigned to. Negative x values indicate the separator level -(x+1).
266: Level: beginner
268: The user can define additional partitionings; see MatPartitioningRegister().
270: .seealso: MatPartitioningRegister(), MatPartitioningCreate(),
271: MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
272: ISPartitioningCount()
273: @*/
274: PetscErrorCode MatPartitioningApplyND(MatPartitioning matp,IS *partitioning)
275: {
281: if (!matp->adj->assembled) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
282: if (matp->adj->factortype) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
283: if (!matp->ops->applynd) SETERRQ1(PetscObjectComm((PetscObject)matp),PETSC_ERR_SUP,"Nested dissection not provided by MatPartitioningType %s",((PetscObject)matp)->type_name);
284: PetscLogEventBegin(MAT_PartitioningND,matp,0,0,0);
285: (*matp->ops->applynd)(matp,partitioning);
286: PetscLogEventEnd(MAT_PartitioningND,matp,0,0,0);
288: MatPartitioningViewFromOptions(matp,NULL,"-mat_partitioning_view");
289: ISViewFromOptions(*partitioning,NULL,"-mat_partitioning_view");
290: return(0);
291: }
293: /*@
294: MatPartitioningApply - Gets a partitioning for a matrix.
296: Collective on Mat
298: Input Parameters:
299: . matp - the matrix partitioning object
301: Output Parameters:
302: . partitioning - the partitioning. For each local node this tells the processor
303: number that that node is assigned to.
305: Options Database Keys:
306: To specify the partitioning through the options database, use one of
307: the following
308: $ -mat_partitioning_type parmetis, -mat_partitioning current
309: To see the partitioning result
310: $ -mat_partitioning_view
312: Level: beginner
314: The user can define additional partitionings; see MatPartitioningRegister().
316: .seealso: MatPartitioningRegister(), MatPartitioningCreate(),
317: MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
318: ISPartitioningCount()
319: @*/
320: PetscErrorCode MatPartitioningApply(MatPartitioning matp,IS *partitioning)
321: {
323: PetscBool viewbalance,improve;
328: if (!matp->adj->assembled) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
329: if (matp->adj->factortype) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
330: if (!matp->ops->apply) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Must set type with MatPartitioningSetFromOptions() or MatPartitioningSetType()");
331: PetscLogEventBegin(MAT_Partitioning,matp,0,0,0);
332: (*matp->ops->apply)(matp,partitioning);
333: PetscLogEventEnd(MAT_Partitioning,matp,0,0,0);
335: MatPartitioningViewFromOptions(matp,NULL,"-mat_partitioning_view");
336: ISViewFromOptions(*partitioning,NULL,"-mat_partitioning_view");
338: PetscObjectOptionsBegin((PetscObject)matp);
339: viewbalance = PETSC_FALSE;
340: PetscOptionsBool("-mat_partitioning_view_imbalance","Display imbalance information of a partition",NULL,PETSC_FALSE,&viewbalance,NULL);
341: improve = PETSC_FALSE;
342: PetscOptionsBool("-mat_partitioning_improve","Improve the quality of a partition",NULL,PETSC_FALSE,&improve,NULL);
343: PetscOptionsEnd();
345: if (improve) {
346: MatPartitioningImprove(matp,partitioning);
347: }
349: if (viewbalance) {
350: MatPartitioningViewImbalance(matp,*partitioning);
351: }
352: return(0);
353: }
356: /*@
357: MatPartitioningImprove - Improves the quality of a given partition.
359: Collective on Mat
361: Input Parameters:
362: + matp - the matrix partitioning object
363: - partitioning - the partitioning. For each local node this tells the processor
364: number that that node is assigned to.
366: Output Parameters:
367: . partitioning - the partitioning. For each local node this tells the processor
368: number that that node is assigned to.
370: Options Database Keys:
371: To improve the quality of the partition
372: $ -mat_partitioning_improve
374: Level: beginner
377: .seealso: MatPartitioningApply(), MatPartitioningCreate(),
378: MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
379: ISPartitioningCount()
380: @*/
381: PetscErrorCode MatPartitioningImprove(MatPartitioning matp,IS *partitioning)
382: {
388: if (!matp->adj->assembled) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
389: if (matp->adj->factortype) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
390: PetscLogEventBegin(MAT_Partitioning,matp,0,0,0);
391: if (matp->ops->improve) {
392: (*matp->ops->improve)(matp,partitioning);
393: }
394: PetscLogEventEnd(MAT_Partitioning,matp,0,0,0);
395: return(0);
396: }
398: /*@
399: MatPartitioningViewImbalance - Display partitioning imbalance information.
401: Collective on MatPartitioning
403: Input Parameters:
404: + matp - the matrix partitioning object
405: - partitioning - the partitioning. For each local node this tells the processor
406: number that that node is assigned to.
408: Options Database Keys:
409: To see the partitioning imbalance information
410: $ -mat_partitioning_view_balance
412: Level: beginner
414: .seealso: MatPartitioningApply(), MatPartitioningView()
415: @*/
416: PetscErrorCode MatPartitioningViewImbalance(MatPartitioning matp, IS partitioning)
417: {
418: PetscErrorCode ierr;
419: PetscInt nparts,*subdomainsizes,*subdomainsizes_tmp,nlocal,i,maxsub,minsub,avgsub;
420: const PetscInt *indices;
421: PetscViewer viewer;
426: nparts = matp->n;
427: PetscCalloc2(nparts,&subdomainsizes,nparts,&subdomainsizes_tmp);
428: ISGetLocalSize(partitioning,&nlocal);
429: ISGetIndices(partitioning,&indices);
430: for (i=0;i<nlocal;i++) {
431: subdomainsizes_tmp[indices[i]] += matp->vertex_weights? matp->vertex_weights[i]:1;
432: }
433: MPI_Allreduce(subdomainsizes_tmp,subdomainsizes,nparts,MPIU_INT,MPI_SUM, PetscObjectComm((PetscObject)matp));
434: ISRestoreIndices(partitioning,&indices);
435: minsub = PETSC_MAX_INT, maxsub = PETSC_MIN_INT, avgsub=0;
436: for (i=0; i<nparts; i++) {
437: minsub = PetscMin(minsub,subdomainsizes[i]);
438: maxsub = PetscMax(maxsub,subdomainsizes[i]);
439: avgsub += subdomainsizes[i];
440: }
441: avgsub /=nparts;
442: PetscFree2(subdomainsizes,subdomainsizes_tmp);
443: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)matp),&viewer);
444: MatPartitioningView(matp,viewer);
445: PetscViewerASCIIPrintf(viewer,"Partitioning Imbalance Info: Max %D, Min %D, Avg %D, R %g\n",maxsub, minsub, avgsub, (double)(maxsub/(PetscReal)minsub));
446: return(0);
447: }
449: /*@
450: MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
451: partitioned.
453: Collective on MatPartitioning
455: Input Parameters:
456: + part - the partitioning context
457: - adj - the adjacency matrix
459: Level: beginner
461: .seealso: MatPartitioningCreate()
462: @*/
463: PetscErrorCode MatPartitioningSetAdjacency(MatPartitioning part,Mat adj)
464: {
468: part->adj = adj;
469: return(0);
470: }
472: /*@
473: MatPartitioningDestroy - Destroys the partitioning context.
475: Collective on Partitioning
477: Input Parameters:
478: . part - the partitioning context
480: Level: beginner
482: .seealso: MatPartitioningCreate()
483: @*/
484: PetscErrorCode MatPartitioningDestroy(MatPartitioning *part)
485: {
489: if (!*part) return(0);
491: if (--((PetscObject)(*part))->refct > 0) {*part = 0; return(0);}
493: if ((*part)->ops->destroy) {
494: (*(*part)->ops->destroy)((*part));
495: }
496: PetscFree((*part)->vertex_weights);
497: PetscFree((*part)->part_weights);
498: PetscHeaderDestroy(part);
499: return(0);
500: }
502: /*@C
503: MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.
505: Logically Collective on Partitioning
507: Input Parameters:
508: + part - the partitioning context
509: - weights - the weights, on each process this array must have the same size as the number of local rows
511: Level: beginner
513: Notes:
514: The array weights is freed by PETSc so the user should not free the array. In C/C++
515: the array must be obtained with a call to PetscMalloc(), not malloc().
517: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetPartitionWeights()
518: @*/
519: PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning part,const PetscInt weights[])
520: {
525: PetscFree(part->vertex_weights);
526: part->vertex_weights = (PetscInt*)weights;
527: return(0);
528: }
530: /*@C
531: MatPartitioningSetPartitionWeights - Sets the weights for each partition.
533: Logically Collective on Partitioning
535: Input Parameters:
536: + part - the partitioning context
537: - weights - An array of size nparts that is used to specify the fraction of
538: vertex weight that should be distributed to each sub-domain for
539: the balance constraint. If all of the sub-domains are to be of
540: the same size, then each of the nparts elements should be set
541: to a value of 1/nparts. Note that the sum of all of the weights
542: should be one.
544: Level: beginner
546: Notes:
547: The array weights is freed by PETSc so the user should not free the array. In C/C++
548: the array must be obtained with a call to PetscMalloc(), not malloc().
550: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetVertexWeights()
551: @*/
552: PetscErrorCode MatPartitioningSetPartitionWeights(MatPartitioning part,const PetscReal weights[])
553: {
558: PetscFree(part->part_weights);
559: part->part_weights = (PetscReal*)weights;
560: return(0);
561: }
563: /*@
564: MatPartitioningSetUseEdgeWeights - Set a flag to indicate whether or not to use edge weights.
566: Logically Collective on Partitioning
568: Input Parameters:
569: + part - the partitioning context
570: - use_edge_weights - the flag indicateing whether or not to use edge weights. By default no edge weights will be used,
571: that is, use_edge_weights is set to FALSE. If set use_edge_weights to TRUE, users need to make sure legal
572: edge weights are stored in an ADJ matrix.
573: Level: beginner
575: Options Database Keys:
576: . -mat_partitioning_use_edge_weights - (true or false)
578: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetVertexWeights(), MatPartitioningSetPartitionWeights()
579: @*/
580: PetscErrorCode MatPartitioningSetUseEdgeWeights(MatPartitioning part,PetscBool use_edge_weights)
581: {
584: part->use_edge_weights = use_edge_weights;
585: return(0);
586: }
588: /*@
589: MatPartitioningGetUseEdgeWeights - Get a flag that indicates whether or not to edge weights are used.
591: Logically Collective on Partitioning
593: Input Parameters:
594: . part - the partitioning context
596: Output Parameters:
597: . use_edge_weights - the flag indicateing whether or not to edge weights are used.
599: Level: beginner
601: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetVertexWeights(), MatPartitioningSetPartitionWeights(),
602: MatPartitioningSetUseEdgeWeights
603: @*/
604: PetscErrorCode MatPartitioningGetUseEdgeWeights(MatPartitioning part,PetscBool *use_edge_weights)
605: {
609: *use_edge_weights = part->use_edge_weights;
610: return(0);
611: }
613: /*@
614: MatPartitioningCreate - Creates a partitioning context.
616: Collective
618: Input Parameter:
619: . comm - MPI communicator
621: Output Parameter:
622: . newp - location to put the context
624: Level: beginner
626: .seealso: MatPartitioningSetType(), MatPartitioningApply(), MatPartitioningDestroy(),
627: MatPartitioningSetAdjacency()
629: @*/
630: PetscErrorCode MatPartitioningCreate(MPI_Comm comm,MatPartitioning *newp)
631: {
632: MatPartitioning part;
633: PetscErrorCode ierr;
634: PetscMPIInt size;
637: *newp = 0;
639: MatInitializePackage();
640: PetscHeaderCreate(part,MAT_PARTITIONING_CLASSID,"MatPartitioning","Matrix/graph partitioning","MatOrderings",comm,MatPartitioningDestroy,MatPartitioningView);
641: part->vertex_weights = NULL;
642: part->part_weights = NULL;
643: part->use_edge_weights = PETSC_FALSE; /* By default we don't use edge weights */
645: MPI_Comm_size(comm,&size);
646: part->n = (PetscInt)size;
648: *newp = part;
649: return(0);
650: }
652: /*@C
653: MatPartitioningViewFromOptions - View from Options
655: Collective on MatPartitioning
657: Input Parameters:
658: + A - the partitioning context
659: . obj - Optional object
660: - name - command line option
662: Level: intermediate
663: .seealso: MatPartitioning, MatPartitioningView, PetscObjectViewFromOptions(), MatPartitioningCreate()
664: @*/
665: PetscErrorCode MatPartitioningViewFromOptions(MatPartitioning A,PetscObject obj,const char name[])
666: {
671: PetscObjectViewFromOptions((PetscObject)A,obj,name);
672: return(0);
673: }
675: /*@C
676: MatPartitioningView - Prints the partitioning data structure.
678: Collective on MatPartitioning
680: Input Parameters:
681: + part - the partitioning context
682: - viewer - optional visualization context
684: Level: intermediate
686: Note:
687: The available visualization contexts include
688: + PETSC_VIEWER_STDOUT_SELF - standard output (default)
689: - PETSC_VIEWER_STDOUT_WORLD - synchronized standard
690: output where only the first processor opens
691: the file. All other processors send their
692: data to the first processor to print.
694: The user can open alternative visualization contexts with
695: . PetscViewerASCIIOpen() - output to a specified file
697: .seealso: PetscViewerASCIIOpen()
698: @*/
699: PetscErrorCode MatPartitioningView(MatPartitioning part,PetscViewer viewer)
700: {
702: PetscBool iascii;
706: if (!viewer) {
707: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)part),&viewer);
708: }
712: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
713: if (iascii) {
714: PetscObjectPrintClassNamePrefixType((PetscObject)part,viewer);
715: if (part->vertex_weights) {
716: PetscViewerASCIIPrintf(viewer," Using vertex weights\n");
717: }
718: }
719: if (part->ops->view) {
720: PetscViewerASCIIPushTab(viewer);
721: (*part->ops->view)(part,viewer);
722: PetscViewerASCIIPopTab(viewer);
723: }
724: return(0);
725: }
727: /*@C
728: MatPartitioningSetType - Sets the type of partitioner to use
730: Collective on MatPartitioning
732: Input Parameter:
733: + part - the partitioning context.
734: - type - a known method
736: Options Database Command:
737: $ -mat_partitioning_type <type>
738: $ Use -help for a list of available methods
739: $ (for instance, parmetis)
741: Level: intermediate
743: .seealso: MatPartitioningCreate(), MatPartitioningApply(), MatPartitioningType
745: @*/
746: PetscErrorCode MatPartitioningSetType(MatPartitioning part,MatPartitioningType type)
747: {
748: PetscErrorCode ierr,(*r)(MatPartitioning);
749: PetscBool match;
755: PetscObjectTypeCompare((PetscObject)part,type,&match);
756: if (match) return(0);
758: if (part->ops->destroy) {
759: (*part->ops->destroy)(part);
760: part->ops->destroy = NULL;
761: }
762: part->setupcalled = 0;
763: part->data = 0;
764: PetscMemzero(part->ops,sizeof(struct _MatPartitioningOps));
766: PetscFunctionListFind(MatPartitioningList,type,&r);
767: if (!r) SETERRQ1(PetscObjectComm((PetscObject)part),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown partitioning type %s",type);
769: (*r)(part);
771: PetscFree(((PetscObject)part)->type_name);
772: PetscStrallocpy(type,&((PetscObject)part)->type_name);
773: return(0);
774: }
776: /*@
777: MatPartitioningSetFromOptions - Sets various partitioning options from the
778: options database.
780: Collective on MatPartitioning
782: Input Parameter:
783: . part - the partitioning context.
785: Options Database Command:
786: $ -mat_partitioning_type <type>
787: $ Use -help for a list of available methods
788: $ (for instance, parmetis)
789: $ -mat_partitioning_nparts - number of subgraphs
792: Notes:
793: If the partitioner has not been set by the user it uses one of the installed partitioner such as ParMetis. If there are
794: no installed partitioners it uses current which means no repartioning.
796: Level: beginner
798: @*/
799: PetscErrorCode MatPartitioningSetFromOptions(MatPartitioning part)
800: {
802: PetscBool flag;
803: char type[256];
804: const char *def;
807: PetscObjectOptionsBegin((PetscObject)part);
808: if (!((PetscObject)part)->type_name) {
809: #if defined(PETSC_HAVE_PARMETIS)
810: def = MATPARTITIONINGPARMETIS;
811: #elif defined(PETSC_HAVE_CHACO)
812: def = MATPARTITIONINGCHACO;
813: #elif defined(PETSC_HAVE_PARTY)
814: def = MATPARTITIONINGPARTY;
815: #elif defined(PETSC_HAVE_PTSCOTCH)
816: def = MATPARTITIONINGPTSCOTCH;
817: #else
818: def = MATPARTITIONINGCURRENT;
819: #endif
820: } else {
821: def = ((PetscObject)part)->type_name;
822: }
823: PetscOptionsFList("-mat_partitioning_type","Type of partitioner","MatPartitioningSetType",MatPartitioningList,def,type,256,&flag);
824: if (flag) {
825: MatPartitioningSetType(part,type);
826: }
828: PetscOptionsInt("-mat_partitioning_nparts","number of fine parts",NULL,part->n,& part->n,&flag);
830: PetscOptionsBool("-mat_partitioning_use_edge_weights","whether or not to use edge weights",NULL,part->use_edge_weights,&part->use_edge_weights,&flag);
832: /*
833: Set the type if it was never set.
834: */
835: if (!((PetscObject)part)->type_name) {
836: MatPartitioningSetType(part,def);
837: }
839: if (part->ops->setfromoptions) {
840: (*part->ops->setfromoptions)(PetscOptionsObject,part);
841: }
842: PetscOptionsEnd();
843: return(0);
844: }