Actual source code: partition.c

petsc-3.12.5 2020-03-29
Report Typos and Errors

  2:  #include <petsc/private/matimpl.h>

  4: /* Logging support */
  5: PetscClassId MAT_PARTITIONING_CLASSID;

  7: /*
  8:    Simplest partitioning, keeps the current partitioning.
  9: */
 10: static PetscErrorCode MatPartitioningApply_Current(MatPartitioning part,IS *partitioning)
 11: {
 13:   PetscInt       m;
 14:   PetscMPIInt    rank,size;

 17:   MPI_Comm_size(PetscObjectComm((PetscObject)part),&size);
 18:   if (part->n != size) {
 19:     const char *prefix;
 20:     PetscObjectGetOptionsPrefix((PetscObject)part,&prefix);
 21:     SETERRQ1(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"This is the DEFAULT NO-OP partitioner, it currently only supports one domain per processor\nuse -%smat_partitioning_type parmetis or chaco or ptscotch for more than one subdomain per processor",prefix ? prefix : "");
 22:   }
 23:   MPI_Comm_rank(PetscObjectComm((PetscObject)part),&rank);

 25:   MatGetLocalSize(part->adj,&m,NULL);
 26:   ISCreateStride(PetscObjectComm((PetscObject)part),m,rank,0,partitioning);
 27:   return(0);
 28: }

 30: /*
 31:    partition an index to rebalance the computation
 32: */
 33: static PetscErrorCode MatPartitioningApply_Average(MatPartitioning part,IS *partitioning)
 34: {
 36:   PetscInt       m,M,nparts,*indices,r,d,*parts,i,start,end,loc;

 39:   MatGetSize(part->adj,&M,NULL);
 40:   MatGetLocalSize(part->adj,&m,NULL);
 41:   nparts = part->n;
 42:   PetscMalloc1(nparts,&parts);
 43:   d      = M/nparts;
 44:   for (i=0; i<nparts; i++) parts[i] = d;
 45:   r = M%nparts;
 46:   for (i=0; i<r; i++) parts[i] += 1;
 47:   for (i=1; i<nparts; i++) parts[i] += parts[i-1];
 48:   PetscMalloc1(m,&indices);
 49:   MatGetOwnershipRange(part->adj,&start,&end);
 50:   for (i=start; i<end; i++) {
 51:     PetscFindInt(i,nparts,parts,&loc);
 52:     if (loc<0) loc = -(loc+1);
 53:     else loc = loc+1;
 54:     indices[i-start] = loc;
 55:   }
 56:   PetscFree(parts);
 57:   ISCreateGeneral(PetscObjectComm((PetscObject)part),m,indices,PETSC_OWN_POINTER,partitioning);
 58:   return(0);
 59: }

 61: static PetscErrorCode MatPartitioningApply_Square(MatPartitioning part,IS *partitioning)
 62: {
 64:   PetscInt       cell,n,N,p,rstart,rend,*color;
 65:   PetscMPIInt    size;

 68:   MPI_Comm_size(PetscObjectComm((PetscObject)part),&size);
 69:   if (part->n != size) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Currently only supports one domain per processor");
 70:   p = (PetscInt)PetscSqrtReal((PetscReal)part->n);
 71:   if (p*p != part->n) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Square partitioning requires \"perfect square\" number of domains");

 73:   MatGetSize(part->adj,&N,NULL);
 74:   n    = (PetscInt)PetscSqrtReal((PetscReal)N);
 75:   if (n*n != N) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Square partitioning requires square domain");
 76:   if (n%p != 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Square partitioning requires p to divide n");
 77:   MatGetOwnershipRange(part->adj,&rstart,&rend);
 78:   PetscMalloc1(rend-rstart,&color);
 79:   /* for (int cell=rstart; cell<rend; cell++) { color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); } */
 80:   for (cell=rstart; cell<rend; cell++) {
 81:     color[cell-rstart] = ((cell%n) / (n/p)) + p * ((cell/n) / (n/p));
 82:   }
 83:   ISCreateGeneral(PetscObjectComm((PetscObject)part),rend-rstart,color,PETSC_OWN_POINTER,partitioning);
 84:   return(0);
 85: }

 87: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Current(MatPartitioning part)
 88: {
 90:   part->ops->apply   = MatPartitioningApply_Current;
 91:   part->ops->view    = 0;
 92:   part->ops->destroy = 0;
 93:   return(0);
 94: }

 96: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Average(MatPartitioning part)
 97: {
 99:   part->ops->apply   = MatPartitioningApply_Average;
100:   part->ops->view    = 0;
101:   part->ops->destroy = 0;
102:   return(0);
103: }

105: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Square(MatPartitioning part)
106: {
108:   part->ops->apply   = MatPartitioningApply_Square;
109:   part->ops->view    = 0;
110:   part->ops->destroy = 0;
111:   return(0);
112: }


115: /* gets as input the "sizes" array computed by ParMetis_*_NodeND and returns
116:        seps[  0 :         2*p) : the start and end node of each subdomain
117:        seps[2*p : 2*p+2*(p-1)) : the start and end node of each separator
118:      levels[  0 :         p-1) : level in the tree for each separator (-1 root, -2 and -3 first level and so on)
119:    The arrays must be large enough
120: */
121: PETSC_INTERN PetscErrorCode MatPartitioningSizesToSep_Private(PetscInt p, PetscInt sizes[], PetscInt seps[], PetscInt level[])
122: {
123:   PetscInt       l2p,i,pTree,pStartTree;

127:   l2p = PetscLog2Real(p);
128:   if (l2p - (PetscInt)PetscLog2Real(p)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"%D is not a power of 2",p);
129:   if (!p) return(0);
130:   PetscArrayzero(seps,2*p-2);
131:   PetscArrayzero(level,p-1);
132:   seps[2*p-2] = sizes[2*p-2];
133:   pTree = p;
134:   pStartTree = 0;
135:   while (pTree != 1) {
136:     for (i = pStartTree; i < pStartTree + pTree; i++) {
137:       seps[i] += sizes[i];
138:       seps[pStartTree + pTree + (i-pStartTree)/2] += seps[i];
139:     }
140:     pStartTree += pTree;
141:     pTree = pTree/2;
142:   }
143:   seps[2*p-2] -= sizes[2*p-2];

145:   pStartTree = 2*p-2;
146:   pTree      = 1;
147:   while (pStartTree > 0) {
148:     for (i = pStartTree; i < pStartTree + pTree; i++) {
149:       PetscInt k = 2*i - (pStartTree +2*pTree);
150:       PetscInt n = seps[k+1];

152:       seps[k+1]  = seps[i]   - sizes[k+1];
153:       seps[k]    = seps[k+1] + sizes[k+1] - n - sizes[k];
154:       level[i-p] = -pTree - i + pStartTree;
155:     }
156:     pTree *= 2;
157:     pStartTree -= pTree;
158:   }
159:   /* I know there should be a formula */
160:   PetscSortIntWithArrayPair(p-1,seps+p,sizes+p,level);
161:   for (i=2*p-2;i>=0;i--) { seps[2*i] = seps[i]; seps[2*i+1] = seps[i] + sizes[i] - 1; }
162:   return(0);
163: }

165: /* ===========================================================================================*/

167: PetscFunctionList MatPartitioningList              = 0;
168: PetscBool         MatPartitioningRegisterAllCalled = PETSC_FALSE;


171: /*@C
172:    MatPartitioningRegister - Adds a new sparse matrix partitioning to the  matrix package.

174:    Not Collective

176:    Input Parameters:
177: +  sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
178: -  function - function pointer that creates the partitioning type

180:    Level: developer

182:    Sample usage:
183: .vb
184:    MatPartitioningRegister("my_part",MyPartCreate);
185: .ve

187:    Then, your partitioner can be chosen with the procedural interface via
188: $     MatPartitioningSetType(part,"my_part")
189:    or at runtime via the option
190: $     -mat_partitioning_type my_part

192: .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
193: @*/
194: PetscErrorCode  MatPartitioningRegister(const char sname[],PetscErrorCode (*function)(MatPartitioning))
195: {

199:   MatInitializePackage();
200:   PetscFunctionListAdd(&MatPartitioningList,sname,function);
201:   return(0);
202: }

204: /*@C
205:    MatPartitioningGetType - Gets the Partitioning method type and name (as a string)
206:         from the partitioning context.

208:    Not collective

210:    Input Parameter:
211: .  partitioning - the partitioning context

213:    Output Parameter:
214: .  type - partitioner type

216:    Level: intermediate

218:    Not Collective

220: @*/
221: PetscErrorCode  MatPartitioningGetType(MatPartitioning partitioning,MatPartitioningType *type)
222: {
226:   *type = ((PetscObject)partitioning)->type_name;
227:   return(0);
228: }

230: /*@C
231:    MatPartitioningSetNParts - Set how many partitions need to be created;
232:         by default this is one per processor. Certain partitioning schemes may
233:         in fact only support that option.

235:    Not collective

237:    Input Parameter:
238: +  partitioning - the partitioning context
239: -  n - the number of partitions

241:    Level: intermediate

243:    Not Collective

245: .seealso: MatPartitioningCreate(), MatPartitioningApply()
246: @*/
247: PetscErrorCode  MatPartitioningSetNParts(MatPartitioning part,PetscInt n)
248: {
250:   part->n = n;
251:   return(0);
252: }

254: /*@
255:    MatPartitioningApplyND - Gets a nested dissection partitioning for a matrix.

257:    Collective on Mat

259:    Input Parameters:
260: .  matp - the matrix partitioning object

262:    Output Parameters:
263: .   partitioning - the partitioning. For each local node, a positive value indicates the processor
264:                    number the node has been assigned to. Negative x values indicate the separator level -(x+1).

266:    Level: beginner

268:    The user can define additional partitionings; see MatPartitioningRegister().

270: .seealso:  MatPartitioningRegister(), MatPartitioningCreate(),
271:            MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
272:            ISPartitioningCount()
273: @*/
274: PetscErrorCode  MatPartitioningApplyND(MatPartitioning matp,IS *partitioning)
275: {

281:   if (!matp->adj->assembled) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
282:   if (matp->adj->factortype) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
283:   if (!matp->ops->applynd) SETERRQ1(PetscObjectComm((PetscObject)matp),PETSC_ERR_SUP,"Nested dissection not provided by MatPartitioningType %s",((PetscObject)matp)->type_name);
284:   PetscLogEventBegin(MAT_PartitioningND,matp,0,0,0);
285:   (*matp->ops->applynd)(matp,partitioning);
286:   PetscLogEventEnd(MAT_PartitioningND,matp,0,0,0);

288:   MatPartitioningViewFromOptions(matp,NULL,"-mat_partitioning_view");
289:   ISViewFromOptions(*partitioning,NULL,"-mat_partitioning_view");
290:   return(0);
291: }

293: /*@
294:    MatPartitioningApply - Gets a partitioning for a matrix.

296:    Collective on Mat

298:    Input Parameters:
299: .  matp - the matrix partitioning object

301:    Output Parameters:
302: .   partitioning - the partitioning. For each local node this tells the processor
303:                    number that that node is assigned to.

305:    Options Database Keys:
306:    To specify the partitioning through the options database, use one of
307:    the following
308: $    -mat_partitioning_type parmetis, -mat_partitioning current
309:    To see the partitioning result
310: $    -mat_partitioning_view

312:    Level: beginner

314:    The user can define additional partitionings; see MatPartitioningRegister().

316: .seealso:  MatPartitioningRegister(), MatPartitioningCreate(),
317:            MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
318:            ISPartitioningCount()
319: @*/
320: PetscErrorCode  MatPartitioningApply(MatPartitioning matp,IS *partitioning)
321: {
323:   PetscBool      viewbalance,improve;

328:   if (!matp->adj->assembled) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
329:   if (matp->adj->factortype) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
330:   if (!matp->ops->apply) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Must set type with MatPartitioningSetFromOptions() or MatPartitioningSetType()");
331:   PetscLogEventBegin(MAT_Partitioning,matp,0,0,0);
332:   (*matp->ops->apply)(matp,partitioning);
333:   PetscLogEventEnd(MAT_Partitioning,matp,0,0,0);

335:   MatPartitioningViewFromOptions(matp,NULL,"-mat_partitioning_view");
336:   ISViewFromOptions(*partitioning,NULL,"-mat_partitioning_view");

338:   PetscObjectOptionsBegin((PetscObject)matp);
339:   viewbalance = PETSC_FALSE;
340:   PetscOptionsBool("-mat_partitioning_view_imbalance","Display imbalance information of a partition",NULL,PETSC_FALSE,&viewbalance,NULL);
341:   improve = PETSC_FALSE;
342:   PetscOptionsBool("-mat_partitioning_improve","Improve the quality of a partition",NULL,PETSC_FALSE,&improve,NULL);
343:   PetscOptionsEnd();

345:   if (improve) {
346:     MatPartitioningImprove(matp,partitioning);
347:   }

349:   if (viewbalance) {
350:     MatPartitioningViewImbalance(matp,*partitioning);
351:   }
352:   return(0);
353: }


356: /*@
357:    MatPartitioningImprove - Improves the quality of a given partition.

359:    Collective on Mat

361:    Input Parameters:
362: +  matp - the matrix partitioning object
363: -  partitioning - the partitioning. For each local node this tells the processor
364:                    number that that node is assigned to.

366:    Output Parameters:
367: .   partitioning - the partitioning. For each local node this tells the processor
368:                    number that that node is assigned to.

370:    Options Database Keys:
371:    To improve the quality of the partition
372: $    -mat_partitioning_improve

374:    Level: beginner


377: .seealso:  MatPartitioningApply(), MatPartitioningCreate(),
378:            MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
379:            ISPartitioningCount()
380: @*/
381: PetscErrorCode  MatPartitioningImprove(MatPartitioning matp,IS *partitioning)
382: {

388:   if (!matp->adj->assembled) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
389:   if (matp->adj->factortype) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
390:   PetscLogEventBegin(MAT_Partitioning,matp,0,0,0);
391:   if (matp->ops->improve) {
392:     (*matp->ops->improve)(matp,partitioning);
393:   }
394:   PetscLogEventEnd(MAT_Partitioning,matp,0,0,0);
395:   return(0);
396: }

398: /*@
399:    MatPartitioningViewImbalance - Display partitioning imbalance information.

401:    Collective on MatPartitioning

403:    Input Parameters:
404: +  matp - the matrix partitioning object
405: -  partitioning - the partitioning. For each local node this tells the processor
406:                    number that that node is assigned to.

408:    Options Database Keys:
409:    To see the partitioning imbalance information
410: $    -mat_partitioning_view_balance

412:    Level: beginner

414: .seealso:  MatPartitioningApply(), MatPartitioningView()
415: @*/
416: PetscErrorCode  MatPartitioningViewImbalance(MatPartitioning matp, IS partitioning)
417: {
418:   PetscErrorCode  ierr;
419:   PetscInt        nparts,*subdomainsizes,*subdomainsizes_tmp,nlocal,i,maxsub,minsub,avgsub;
420:   const PetscInt  *indices;
421:   PetscViewer     viewer;

426:   nparts = matp->n;
427:   PetscCalloc2(nparts,&subdomainsizes,nparts,&subdomainsizes_tmp);
428:   ISGetLocalSize(partitioning,&nlocal);
429:   ISGetIndices(partitioning,&indices);
430:   for (i=0;i<nlocal;i++) {
431:     subdomainsizes_tmp[indices[i]] += matp->vertex_weights? matp->vertex_weights[i]:1;
432:   }
433:   MPI_Allreduce(subdomainsizes_tmp,subdomainsizes,nparts,MPIU_INT,MPI_SUM, PetscObjectComm((PetscObject)matp));
434:   ISRestoreIndices(partitioning,&indices);
435:   minsub = PETSC_MAX_INT, maxsub = PETSC_MIN_INT, avgsub=0;
436:   for (i=0; i<nparts; i++) {
437:     minsub = PetscMin(minsub,subdomainsizes[i]);
438:     maxsub = PetscMax(maxsub,subdomainsizes[i]);
439:     avgsub += subdomainsizes[i];
440:   }
441:   avgsub /=nparts;
442:   PetscFree2(subdomainsizes,subdomainsizes_tmp);
443:   PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)matp),&viewer);
444:   MatPartitioningView(matp,viewer);
445:   PetscViewerASCIIPrintf(viewer,"Partitioning Imbalance Info: Max %D, Min %D, Avg %D, R %g\n",maxsub, minsub, avgsub, (double)(maxsub/(PetscReal)minsub));
446:   return(0);
447: }

449: /*@
450:    MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
451:       partitioned.

453:    Collective on MatPartitioning

455:    Input Parameters:
456: +  part - the partitioning context
457: -  adj - the adjacency matrix

459:    Level: beginner

461: .seealso: MatPartitioningCreate()
462: @*/
463: PetscErrorCode  MatPartitioningSetAdjacency(MatPartitioning part,Mat adj)
464: {
468:   part->adj = adj;
469:   return(0);
470: }

472: /*@
473:    MatPartitioningDestroy - Destroys the partitioning context.

475:    Collective on Partitioning

477:    Input Parameters:
478: .  part - the partitioning context

480:    Level: beginner

482: .seealso: MatPartitioningCreate()
483: @*/
484: PetscErrorCode  MatPartitioningDestroy(MatPartitioning *part)
485: {

489:   if (!*part) return(0);
491:   if (--((PetscObject)(*part))->refct > 0) {*part = 0; return(0);}

493:   if ((*part)->ops->destroy) {
494:     (*(*part)->ops->destroy)((*part));
495:   }
496:   PetscFree((*part)->vertex_weights);
497:   PetscFree((*part)->part_weights);
498:   PetscHeaderDestroy(part);
499:   return(0);
500: }

502: /*@C
503:    MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.

505:    Logically Collective on Partitioning

507:    Input Parameters:
508: +  part - the partitioning context
509: -  weights - the weights, on each process this array must have the same size as the number of local rows

511:    Level: beginner

513:    Notes:
514:       The array weights is freed by PETSc so the user should not free the array. In C/C++
515:    the array must be obtained with a call to PetscMalloc(), not malloc().

517: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetPartitionWeights()
518: @*/
519: PetscErrorCode  MatPartitioningSetVertexWeights(MatPartitioning part,const PetscInt weights[])
520: {


526:   PetscFree(part->vertex_weights);

528:   part->vertex_weights = (PetscInt*)weights;
529:   return(0);
530: }

532: /*@C
533:    MatPartitioningSetPartitionWeights - Sets the weights for each partition.

535:    Logically Collective on Partitioning

537:    Input Parameters:
538: +  part - the partitioning context
539: -  weights - An array of size nparts that is used to specify the fraction of
540:              vertex weight that should be distributed to each sub-domain for
541:              the balance constraint. If all of the sub-domains are to be of
542:              the same size, then each of the nparts elements should be set
543:              to a value of 1/nparts. Note that the sum of all of the weights
544:              should be one.

546:    Level: beginner

548:    Notes:
549:       The array weights is freed by PETSc so the user should not free the array. In C/C++
550:    the array must be obtained with a call to PetscMalloc(), not malloc().

552: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetVertexWeights()
553: @*/
554: PetscErrorCode  MatPartitioningSetPartitionWeights(MatPartitioning part,const PetscReal weights[])
555: {


561:   PetscFree(part->part_weights);

563:   part->part_weights = (PetscReal*)weights;
564:   return(0);
565: }

567: /*@
568:    MatPartitioningSetUseEdgeWeights - Set a flag to indicate whether or not to use edge weights.

570:    Logically Collective on Partitioning

572:    Input Parameters:
573: +  part - the partitioning context
574: -  use_edge_weights - the flag indicateing whether or not to use edge weights. By default no edge weights will be used,
575:                       that is, use_edge_weights is set to FALSE. If set use_edge_weights to TRUE, users need to make sure legal
576:                       edge weights are stored in an ADJ matrix.
577:    Level: beginner

579:    Options Database Keys:
580: .  -mat_partitioning_use_edge_weights - (true or false)

582: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetVertexWeights(), MatPartitioningSetPartitionWeights()
583: @*/
584: PetscErrorCode  MatPartitioningSetUseEdgeWeights(MatPartitioning part,PetscBool use_edge_weights)
585: {
588:   part->use_edge_weights = use_edge_weights;
589:   return(0);
590: }

592: /*@
593:    MatPartitioningGetUseEdgeWeights - Get a flag that indicates whether or not to edge weights are used.

595:    Logically Collective on Partitioning

597:    Input Parameters:
598: .  part - the partitioning context

600:    Output Parameters:
601: .  use_edge_weights - the flag indicateing whether or not to edge weights are used.

603:    Level: beginner

605: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetVertexWeights(), MatPartitioningSetPartitionWeights(),
606:           MatPartitioningSetUseEdgeWeights
607: @*/
608: PetscErrorCode  MatPartitioningGetUseEdgeWeights(MatPartitioning part,PetscBool *use_edge_weights)
609: {
613:   *use_edge_weights = part->use_edge_weights;
614:   return(0);
615: }

617: /*@
618:    MatPartitioningCreate - Creates a partitioning context.

620:    Collective

622:    Input Parameter:
623: .   comm - MPI communicator

625:    Output Parameter:
626: .  newp - location to put the context

628:    Level: beginner

630: .seealso: MatPartitioningSetType(), MatPartitioningApply(), MatPartitioningDestroy(),
631:           MatPartitioningSetAdjacency()

633: @*/
634: PetscErrorCode  MatPartitioningCreate(MPI_Comm comm,MatPartitioning *newp)
635: {
636:   MatPartitioning part;
637:   PetscErrorCode  ierr;
638:   PetscMPIInt     size;

641:   *newp = 0;

643:   MatInitializePackage();
644:   PetscHeaderCreate(part,MAT_PARTITIONING_CLASSID,"MatPartitioning","Matrix/graph partitioning","MatOrderings",comm,MatPartitioningDestroy,MatPartitioningView);
645:   part->vertex_weights = NULL;
646:   part->part_weights   = NULL;
647:   part->use_edge_weights = PETSC_FALSE; /* By default we don't use edge weights */

649:   MPI_Comm_size(comm,&size);
650:   part->n = (PetscInt)size;

652:   *newp = part;
653:   return(0);
654: }

656: /*@C
657:    MatPartitioningView - Prints the partitioning data structure.

659:    Collective on MatPartitioning

661:    Input Parameters:
662: +  part - the partitioning context
663: -  viewer - optional visualization context

665:    Level: intermediate

667:    Note:
668:    The available visualization contexts include
669: +     PETSC_VIEWER_STDOUT_SELF - standard output (default)
670: -     PETSC_VIEWER_STDOUT_WORLD - synchronized standard
671:          output where only the first processor opens
672:          the file.  All other processors send their
673:          data to the first processor to print.

675:    The user can open alternative visualization contexts with
676: .     PetscViewerASCIIOpen() - output to a specified file

678: .seealso: PetscViewerASCIIOpen()
679: @*/
680: PetscErrorCode  MatPartitioningView(MatPartitioning part,PetscViewer viewer)
681: {
683:   PetscBool      iascii;

687:   if (!viewer) {
688:     PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)part),&viewer);
689:   }

693:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
694:   if (iascii) {
695:     PetscObjectPrintClassNamePrefixType((PetscObject)part,viewer);
696:     if (part->vertex_weights) {
697:       PetscViewerASCIIPrintf(viewer,"  Using vertex weights\n");
698:     }
699:   }
700:   if (part->ops->view) {
701:     PetscViewerASCIIPushTab(viewer);
702:     (*part->ops->view)(part,viewer);
703:     PetscViewerASCIIPopTab(viewer);
704:   }
705:   return(0);
706: }

708: /*@C
709:    MatPartitioningSetType - Sets the type of partitioner to use

711:    Collective on MatPartitioning

713:    Input Parameter:
714: +  part - the partitioning context.
715: -  type - a known method

717:    Options Database Command:
718: $  -mat_partitioning_type  <type>
719: $      Use -help for a list of available methods
720: $      (for instance, parmetis)

722:    Level: intermediate

724: .seealso: MatPartitioningCreate(), MatPartitioningApply(), MatPartitioningType

726: @*/
727: PetscErrorCode  MatPartitioningSetType(MatPartitioning part,MatPartitioningType type)
728: {
729:   PetscErrorCode ierr,(*r)(MatPartitioning);
730:   PetscBool      match;


736:   PetscObjectTypeCompare((PetscObject)part,type,&match);
737:   if (match) return(0);

739:   if (part->ops->destroy) {
740:     (*part->ops->destroy)(part);
741:     part->ops->destroy = NULL;
742:   }
743:   part->setupcalled = 0;
744:   part->data        = 0;
745:   PetscMemzero(part->ops,sizeof(struct _MatPartitioningOps));

747:   PetscFunctionListFind(MatPartitioningList,type,&r);
748:   if (!r) SETERRQ1(PetscObjectComm((PetscObject)part),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown partitioning type %s",type);

750:   (*r)(part);

752:   PetscFree(((PetscObject)part)->type_name);
753:   PetscStrallocpy(type,&((PetscObject)part)->type_name);
754:   return(0);
755: }

757: /*@
758:    MatPartitioningSetFromOptions - Sets various partitioning options from the
759:         options database.

761:    Collective on MatPartitioning

763:    Input Parameter:
764: .  part - the partitioning context.

766:    Options Database Command:
767: $  -mat_partitioning_type  <type>
768: $      Use -help for a list of available methods
769: $      (for instance, parmetis)
770: $  -mat_partitioning_nparts - number of subgraphs


773:    Notes:
774:     If the partitioner has not been set by the user it uses one of the installed partitioner such as ParMetis. If there are
775:    no installed partitioners it uses current which means no repartioning.

777:    Level: beginner

779: @*/
780: PetscErrorCode  MatPartitioningSetFromOptions(MatPartitioning part)
781: {
783:   PetscBool      flag;
784:   char           type[256];
785:   const char     *def;

788:   PetscObjectOptionsBegin((PetscObject)part);
789:   if (!((PetscObject)part)->type_name) {
790: #if defined(PETSC_HAVE_PARMETIS)
791:     def = MATPARTITIONINGPARMETIS;
792: #elif defined(PETSC_HAVE_CHACO)
793:     def = MATPARTITIONINGCHACO;
794: #elif defined(PETSC_HAVE_PARTY)
795:     def = MATPARTITIONINGPARTY;
796: #elif defined(PETSC_HAVE_PTSCOTCH)
797:     def = MATPARTITIONINGPTSCOTCH;
798: #else
799:     def = MATPARTITIONINGCURRENT;
800: #endif
801:   } else {
802:     def = ((PetscObject)part)->type_name;
803:   }
804:   PetscOptionsFList("-mat_partitioning_type","Type of partitioner","MatPartitioningSetType",MatPartitioningList,def,type,256,&flag);
805:   if (flag) {
806:     MatPartitioningSetType(part,type);
807:   }

809:   PetscOptionsInt("-mat_partitioning_nparts","number of fine parts",NULL,part->n,& part->n,&flag);

811:   PetscOptionsBool("-mat_partitioning_use_edge_weights","whether or not to use edge weights",NULL,part->use_edge_weights,&part->use_edge_weights,&flag);

813:   /*
814:     Set the type if it was never set.
815:   */
816:   if (!((PetscObject)part)->type_name) {
817:     MatPartitioningSetType(part,def);
818:   }

820:   if (part->ops->setfromoptions) {
821:     (*part->ops->setfromoptions)(PetscOptionsObject,part);
822:   }
823:   PetscOptionsEnd();
824:   return(0);
825: }