Actual source code: partition.c


  2: #include <petsc/private/matimpl.h>

  4: /* Logging support */
  5: PetscClassId MAT_PARTITIONING_CLASSID;

  7: /*
  8:    Simplest partitioning, keeps the current partitioning.
  9: */
 10: static PetscErrorCode MatPartitioningApply_Current(MatPartitioning part,IS *partitioning)
 11: {
 12:   PetscInt       m;
 13:   PetscMPIInt    rank,size;

 15:   MPI_Comm_size(PetscObjectComm((PetscObject)part),&size);
 16:   if (part->n != size) {
 17:     const char *prefix;
 18:     PetscObjectGetOptionsPrefix((PetscObject)part,&prefix);
 19:     SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"This is the DEFAULT NO-OP partitioner, it currently only supports one domain per processor\nuse -%smat_partitioning_type parmetis or chaco or ptscotch for more than one subdomain per processor",prefix ? prefix : "");
 20:   }
 21:   MPI_Comm_rank(PetscObjectComm((PetscObject)part),&rank);

 23:   MatGetLocalSize(part->adj,&m,NULL);
 24:   ISCreateStride(PetscObjectComm((PetscObject)part),m,rank,0,partitioning);
 25:   return 0;
 26: }

 28: /*
 29:    partition an index to rebalance the computation
 30: */
 31: static PetscErrorCode MatPartitioningApply_Average(MatPartitioning part,IS *partitioning)
 32: {
 33:   PetscInt       m,M,nparts,*indices,r,d,*parts,i,start,end,loc;

 35:   MatGetSize(part->adj,&M,NULL);
 36:   MatGetLocalSize(part->adj,&m,NULL);
 37:   nparts = part->n;
 38:   PetscMalloc1(nparts,&parts);
 39:   d      = M/nparts;
 40:   for (i=0; i<nparts; i++) parts[i] = d;
 41:   r = M%nparts;
 42:   for (i=0; i<r; i++) parts[i] += 1;
 43:   for (i=1; i<nparts; i++) parts[i] += parts[i-1];
 44:   PetscMalloc1(m,&indices);
 45:   MatGetOwnershipRange(part->adj,&start,&end);
 46:   for (i=start; i<end; i++) {
 47:     PetscFindInt(i,nparts,parts,&loc);
 48:     if (loc<0) loc = -(loc+1);
 49:     else loc = loc+1;
 50:     indices[i-start] = loc;
 51:   }
 52:   PetscFree(parts);
 53:   ISCreateGeneral(PetscObjectComm((PetscObject)part),m,indices,PETSC_OWN_POINTER,partitioning);
 54:   return 0;
 55: }

 57: static PetscErrorCode MatPartitioningApply_Square(MatPartitioning part,IS *partitioning)
 58: {
 59:   PetscInt       cell,n,N,p,rstart,rend,*color;
 60:   PetscMPIInt    size;

 62:   MPI_Comm_size(PetscObjectComm((PetscObject)part),&size);
 64:   p = (PetscInt)PetscSqrtReal((PetscReal)part->n);

 67:   MatGetSize(part->adj,&N,NULL);
 68:   n    = (PetscInt)PetscSqrtReal((PetscReal)N);
 71:   MatGetOwnershipRange(part->adj,&rstart,&rend);
 72:   PetscMalloc1(rend-rstart,&color);
 73:   /* for (int cell=rstart; cell<rend; cell++) { color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); } */
 74:   for (cell=rstart; cell<rend; cell++) {
 75:     color[cell-rstart] = ((cell%n) / (n/p)) + p * ((cell/n) / (n/p));
 76:   }
 77:   ISCreateGeneral(PetscObjectComm((PetscObject)part),rend-rstart,color,PETSC_OWN_POINTER,partitioning);
 78:   return 0;
 79: }

 81: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Current(MatPartitioning part)
 82: {
 83:   part->ops->apply   = MatPartitioningApply_Current;
 84:   part->ops->view    = NULL;
 85:   part->ops->destroy = NULL;
 86:   return 0;
 87: }

 89: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Average(MatPartitioning part)
 90: {
 91:   part->ops->apply   = MatPartitioningApply_Average;
 92:   part->ops->view    = NULL;
 93:   part->ops->destroy = NULL;
 94:   return 0;
 95: }

 97: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Square(MatPartitioning part)
 98: {
 99:   part->ops->apply   = MatPartitioningApply_Square;
100:   part->ops->view    = NULL;
101:   part->ops->destroy = NULL;
102:   return 0;
103: }

105: /* gets as input the "sizes" array computed by ParMetis_*_NodeND and returns
106:        seps[  0 :         2*p) : the start and end node of each subdomain
107:        seps[2*p : 2*p+2*(p-1)) : the start and end node of each separator
108:      levels[  0 :         p-1) : level in the tree for each separator (-1 root, -2 and -3 first level and so on)
109:    The arrays must be large enough
110: */
111: PETSC_INTERN PetscErrorCode MatPartitioningSizesToSep_Private(PetscInt p, PetscInt sizes[], PetscInt seps[], PetscInt level[])
112: {
113:   PetscInt       l2p,i,pTree,pStartTree;

115:   l2p = PetscLog2Real(p);
117:   if (!p) return 0;
118:   PetscArrayzero(seps,2*p-2);
119:   PetscArrayzero(level,p-1);
120:   seps[2*p-2] = sizes[2*p-2];
121:   pTree = p;
122:   pStartTree = 0;
123:   while (pTree != 1) {
124:     for (i = pStartTree; i < pStartTree + pTree; i++) {
125:       seps[i] += sizes[i];
126:       seps[pStartTree + pTree + (i-pStartTree)/2] += seps[i];
127:     }
128:     pStartTree += pTree;
129:     pTree = pTree/2;
130:   }
131:   seps[2*p-2] -= sizes[2*p-2];

133:   pStartTree = 2*p-2;
134:   pTree      = 1;
135:   while (pStartTree > 0) {
136:     for (i = pStartTree; i < pStartTree + pTree; i++) {
137:       PetscInt k = 2*i - (pStartTree +2*pTree);
138:       PetscInt n = seps[k+1];

140:       seps[k+1]  = seps[i]   - sizes[k+1];
141:       seps[k]    = seps[k+1] + sizes[k+1] - n - sizes[k];
142:       level[i-p] = -pTree - i + pStartTree;
143:     }
144:     pTree *= 2;
145:     pStartTree -= pTree;
146:   }
147:   /* I know there should be a formula */
148:   PetscSortIntWithArrayPair(p-1,seps+p,sizes+p,level);
149:   for (i=2*p-2;i>=0;i--) { seps[2*i] = seps[i]; seps[2*i+1] = seps[i] + PetscMax(sizes[i] - 1,0); }
150:   return 0;
151: }

153: /* ===========================================================================================*/

155: PetscFunctionList MatPartitioningList              = NULL;
156: PetscBool         MatPartitioningRegisterAllCalled = PETSC_FALSE;

158: /*@C
159:    MatPartitioningRegister - Adds a new sparse matrix partitioning to the  matrix package.

161:    Not Collective

163:    Input Parameters:
164: +  sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
165: -  function - function pointer that creates the partitioning type

167:    Level: developer

169:    Sample usage:
170: .vb
171:    MatPartitioningRegister("my_part",MyPartCreate);
172: .ve

174:    Then, your partitioner can be chosen with the procedural interface via
175: $     MatPartitioningSetType(part,"my_part")
176:    or at runtime via the option
177: $     -mat_partitioning_type my_part

179: .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
180: @*/
181: PetscErrorCode  MatPartitioningRegister(const char sname[],PetscErrorCode (*function)(MatPartitioning))
182: {
183:   MatInitializePackage();
184:   PetscFunctionListAdd(&MatPartitioningList,sname,function);
185:   return 0;
186: }

188: /*@C
189:    MatPartitioningGetType - Gets the Partitioning method type and name (as a string)
190:         from the partitioning context.

192:    Not collective

194:    Input Parameter:
195: .  partitioning - the partitioning context

197:    Output Parameter:
198: .  type - partitioner type

200:    Level: intermediate

202:    Not Collective

204: @*/
205: PetscErrorCode  MatPartitioningGetType(MatPartitioning partitioning,MatPartitioningType *type)
206: {
209:   *type = ((PetscObject)partitioning)->type_name;
210:   return 0;
211: }

213: /*@C
214:    MatPartitioningSetNParts - Set how many partitions need to be created;
215:         by default this is one per processor. Certain partitioning schemes may
216:         in fact only support that option.

218:    Not collective

220:    Input Parameters:
221: +  partitioning - the partitioning context
222: -  n - the number of partitions

224:    Level: intermediate

226:    Not Collective

228: .seealso: MatPartitioningCreate(), MatPartitioningApply()
229: @*/
230: PetscErrorCode  MatPartitioningSetNParts(MatPartitioning part,PetscInt n)
231: {
232:   part->n = n;
233:   return 0;
234: }

236: /*@
237:    MatPartitioningApplyND - Gets a nested dissection partitioning for a matrix.

239:    Collective on Mat

241:    Input Parameters:
242: .  matp - the matrix partitioning object

244:    Output Parameters:
245: .   partitioning - the partitioning. For each local node, a positive value indicates the processor
246:                    number the node has been assigned to. Negative x values indicate the separator level -(x+1).

248:    Level: beginner

250:    The user can define additional partitionings; see MatPartitioningRegister().

252: .seealso:  MatPartitioningRegister(), MatPartitioningCreate(),
253:            MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
254:            ISPartitioningCount()
255: @*/
256: PetscErrorCode  MatPartitioningApplyND(MatPartitioning matp,IS *partitioning)
257: {
263:   PetscLogEventBegin(MAT_PartitioningND,matp,0,0,0);
264:   (*matp->ops->applynd)(matp,partitioning);
265:   PetscLogEventEnd(MAT_PartitioningND,matp,0,0,0);

267:   MatPartitioningViewFromOptions(matp,NULL,"-mat_partitioning_view");
268:   ISViewFromOptions(*partitioning,NULL,"-mat_partitioning_view");
269:   return 0;
270: }

272: /*@
273:    MatPartitioningApply - Gets a partitioning for a matrix.

275:    Collective on Mat

277:    Input Parameters:
278: .  matp - the matrix partitioning object

280:    Output Parameters:
281: .   partitioning - the partitioning. For each local node this tells the processor
282:                    number that that node is assigned to.

284:    Options Database Keys:
285:    To specify the partitioning through the options database, use one of
286:    the following
287: $    -mat_partitioning_type parmetis, -mat_partitioning current
288:    To see the partitioning result
289: $    -mat_partitioning_view

291:    Level: beginner

293:    The user can define additional partitionings; see MatPartitioningRegister().

295: .seealso:  MatPartitioningRegister(), MatPartitioningCreate(),
296:            MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
297:            ISPartitioningCount()
298: @*/
299: PetscErrorCode  MatPartitioningApply(MatPartitioning matp,IS *partitioning)
300: {
302:   PetscBool      viewbalance,improve;

309:   PetscLogEventBegin(MAT_Partitioning,matp,0,0,0);
310:   (*matp->ops->apply)(matp,partitioning);
311:   PetscLogEventEnd(MAT_Partitioning,matp,0,0,0);

313:   MatPartitioningViewFromOptions(matp,NULL,"-mat_partitioning_view");
314:   ISViewFromOptions(*partitioning,NULL,"-mat_partitioning_view");

316:   PetscObjectOptionsBegin((PetscObject)matp);
317:   viewbalance = PETSC_FALSE;
318:   PetscOptionsBool("-mat_partitioning_view_imbalance","Display imbalance information of a partition",NULL,PETSC_FALSE,&viewbalance,NULL);
319:   improve = PETSC_FALSE;
320:   PetscOptionsBool("-mat_partitioning_improve","Improve the quality of a partition",NULL,PETSC_FALSE,&improve,NULL);
321:   PetscOptionsEnd();

323:   if (improve) {
324:     MatPartitioningImprove(matp,partitioning);
325:   }

327:   if (viewbalance) {
328:     MatPartitioningViewImbalance(matp,*partitioning);
329:   }
330:   return 0;
331: }

333: /*@
334:    MatPartitioningImprove - Improves the quality of a given partition.

336:    Collective on Mat

338:    Input Parameters:
339: +  matp - the matrix partitioning object
340: -  partitioning - the partitioning. For each local node this tells the processor
341:                    number that that node is assigned to.

343:    Output Parameters:
344: .   partitioning - the partitioning. For each local node this tells the processor
345:                    number that that node is assigned to.

347:    Options Database Keys:
348:    To improve the quality of the partition
349: $    -mat_partitioning_improve

351:    Level: beginner

353: .seealso:  MatPartitioningApply(), MatPartitioningCreate(),
354:            MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
355:            ISPartitioningCount()
356: @*/
357: PetscErrorCode  MatPartitioningImprove(MatPartitioning matp,IS *partitioning)
358: {
363:   PetscLogEventBegin(MAT_Partitioning,matp,0,0,0);
364:   if (matp->ops->improve) (*matp->ops->improve)(matp,partitioning);
365:   PetscLogEventEnd(MAT_Partitioning,matp,0,0,0);
366:   return 0;
367: }

369: /*@
370:    MatPartitioningViewImbalance - Display partitioning imbalance information.

372:    Collective on MatPartitioning

374:    Input Parameters:
375: +  matp - the matrix partitioning object
376: -  partitioning - the partitioning. For each local node this tells the processor
377:                    number that that node is assigned to.

379:    Options Database Keys:
380:    To see the partitioning imbalance information
381: $    -mat_partitioning_view_balance

383:    Level: beginner

385: .seealso:  MatPartitioningApply(), MatPartitioningView()
386: @*/
387: PetscErrorCode  MatPartitioningViewImbalance(MatPartitioning matp, IS partitioning)
388: {
389:   PetscInt        nparts,*subdomainsizes,*subdomainsizes_tmp,nlocal,i,maxsub,minsub,avgsub;
390:   const PetscInt  *indices;
391:   PetscViewer     viewer;

395:   nparts = matp->n;
396:   PetscCalloc2(nparts,&subdomainsizes,nparts,&subdomainsizes_tmp);
397:   ISGetLocalSize(partitioning,&nlocal);
398:   ISGetIndices(partitioning,&indices);
399:   for (i=0;i<nlocal;i++) {
400:     subdomainsizes_tmp[indices[i]] += matp->vertex_weights? matp->vertex_weights[i]:1;
401:   }
402:   MPI_Allreduce(subdomainsizes_tmp,subdomainsizes,nparts,MPIU_INT,MPI_SUM, PetscObjectComm((PetscObject)matp));
403:   ISRestoreIndices(partitioning,&indices);
404:   minsub = PETSC_MAX_INT, maxsub = PETSC_MIN_INT, avgsub=0;
405:   for (i=0; i<nparts; i++) {
406:     minsub = PetscMin(minsub,subdomainsizes[i]);
407:     maxsub = PetscMax(maxsub,subdomainsizes[i]);
408:     avgsub += subdomainsizes[i];
409:   }
410:   avgsub /=nparts;
411:   PetscFree2(subdomainsizes,subdomainsizes_tmp);
412:   PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)matp),&viewer);
413:   MatPartitioningView(matp,viewer);
414:   PetscViewerASCIIPrintf(viewer,"Partitioning Imbalance Info: Max %" PetscInt_FMT ", Min %" PetscInt_FMT ", Avg %" PetscInt_FMT ", R %g\n",maxsub, minsub, avgsub, (double)(maxsub/(PetscReal)minsub));
415:   return 0;
416: }

418: /*@
419:    MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
420:       partitioned.

422:    Collective on MatPartitioning

424:    Input Parameters:
425: +  part - the partitioning context
426: -  adj - the adjacency matrix

428:    Level: beginner

430: .seealso: MatPartitioningCreate()
431: @*/
432: PetscErrorCode  MatPartitioningSetAdjacency(MatPartitioning part,Mat adj)
433: {
436:   part->adj = adj;
437:   return 0;
438: }

440: /*@
441:    MatPartitioningDestroy - Destroys the partitioning context.

443:    Collective on Partitioning

445:    Input Parameters:
446: .  part - the partitioning context

448:    Level: beginner

450: .seealso: MatPartitioningCreate()
451: @*/
452: PetscErrorCode  MatPartitioningDestroy(MatPartitioning *part)
453: {
454:   if (!*part) return 0;
456:   if (--((PetscObject)(*part))->refct > 0) {*part = NULL; return 0;}

458:   if ((*part)->ops->destroy) {
459:     (*(*part)->ops->destroy)((*part));
460:   }
461:   PetscFree((*part)->vertex_weights);
462:   PetscFree((*part)->part_weights);
463:   PetscHeaderDestroy(part);
464:   return 0;
465: }

467: /*@C
468:    MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.

470:    Logically Collective on Partitioning

472:    Input Parameters:
473: +  part - the partitioning context
474: -  weights - the weights, on each process this array must have the same size as the number of local rows

476:    Level: beginner

478:    Notes:
479:       The array weights is freed by PETSc so the user should not free the array. In C/C++
480:    the array must be obtained with a call to PetscMalloc(), not malloc().

482: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetPartitionWeights()
483: @*/
484: PetscErrorCode  MatPartitioningSetVertexWeights(MatPartitioning part,const PetscInt weights[])
485: {
487:   PetscFree(part->vertex_weights);
488:   part->vertex_weights = (PetscInt*)weights;
489:   return 0;
490: }

492: /*@C
493:    MatPartitioningSetPartitionWeights - Sets the weights for each partition.

495:    Logically Collective on Partitioning

497:    Input Parameters:
498: +  part - the partitioning context
499: -  weights - An array of size nparts that is used to specify the fraction of
500:              vertex weight that should be distributed to each sub-domain for
501:              the balance constraint. If all of the sub-domains are to be of
502:              the same size, then each of the nparts elements should be set
503:              to a value of 1/nparts. Note that the sum of all of the weights
504:              should be one.

506:    Level: beginner

508:    Notes:
509:       The array weights is freed by PETSc so the user should not free the array. In C/C++
510:    the array must be obtained with a call to PetscMalloc(), not malloc().

512: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetVertexWeights()
513: @*/
514: PetscErrorCode  MatPartitioningSetPartitionWeights(MatPartitioning part,const PetscReal weights[])
515: {
517:   PetscFree(part->part_weights);
518:   part->part_weights = (PetscReal*)weights;
519:   return 0;
520: }

522: /*@
523:    MatPartitioningSetUseEdgeWeights - Set a flag to indicate whether or not to use edge weights.

525:    Logically Collective on Partitioning

527:    Input Parameters:
528: +  part - the partitioning context
529: -  use_edge_weights - the flag indicateing whether or not to use edge weights. By default no edge weights will be used,
530:                       that is, use_edge_weights is set to FALSE. If set use_edge_weights to TRUE, users need to make sure legal
531:                       edge weights are stored in an ADJ matrix.
532:    Level: beginner

534:    Options Database Keys:
535: .  -mat_partitioning_use_edge_weights - (true or false)

537: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetVertexWeights(), MatPartitioningSetPartitionWeights()
538: @*/
539: PetscErrorCode  MatPartitioningSetUseEdgeWeights(MatPartitioning part,PetscBool use_edge_weights)
540: {
542:   part->use_edge_weights = use_edge_weights;
543:   return 0;
544: }

546: /*@
547:    MatPartitioningGetUseEdgeWeights - Get a flag that indicates whether or not to edge weights are used.

549:    Logically Collective on Partitioning

551:    Input Parameters:
552: .  part - the partitioning context

554:    Output Parameters:
555: .  use_edge_weights - the flag indicateing whether or not to edge weights are used.

557:    Level: beginner

559: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetVertexWeights(), MatPartitioningSetPartitionWeights(),
560:           MatPartitioningSetUseEdgeWeights
561: @*/
562: PetscErrorCode  MatPartitioningGetUseEdgeWeights(MatPartitioning part,PetscBool *use_edge_weights)
563: {
566:   *use_edge_weights = part->use_edge_weights;
567:   return 0;
568: }

570: /*@
571:    MatPartitioningCreate - Creates a partitioning context.

573:    Collective

575:    Input Parameter:
576: .   comm - MPI communicator

578:    Output Parameter:
579: .  newp - location to put the context

581:    Level: beginner

583: .seealso: MatPartitioningSetType(), MatPartitioningApply(), MatPartitioningDestroy(),
584:           MatPartitioningSetAdjacency()

586: @*/
587: PetscErrorCode  MatPartitioningCreate(MPI_Comm comm,MatPartitioning *newp)
588: {
589:   MatPartitioning part;
590:   PetscMPIInt     size;

592:   *newp = NULL;

594:   MatInitializePackage();
595:   PetscHeaderCreate(part,MAT_PARTITIONING_CLASSID,"MatPartitioning","Matrix/graph partitioning","MatOrderings",comm,MatPartitioningDestroy,MatPartitioningView);
596:   part->vertex_weights = NULL;
597:   part->part_weights   = NULL;
598:   part->use_edge_weights = PETSC_FALSE; /* By default we don't use edge weights */

600:   MPI_Comm_size(comm,&size);
601:   part->n = (PetscInt)size;

603:   *newp = part;
604:   return 0;
605: }

607: /*@C
608:    MatPartitioningViewFromOptions - View from Options

610:    Collective on MatPartitioning

612:    Input Parameters:
613: +  A - the partitioning context
614: .  obj - Optional object
615: -  name - command line option

617:    Level: intermediate
618: .seealso:  MatPartitioning, MatPartitioningView, PetscObjectViewFromOptions(), MatPartitioningCreate()
619: @*/
620: PetscErrorCode  MatPartitioningViewFromOptions(MatPartitioning A,PetscObject obj,const char name[])
621: {
623:   PetscObjectViewFromOptions((PetscObject)A,obj,name);
624:   return 0;
625: }

627: /*@C
628:    MatPartitioningView - Prints the partitioning data structure.

630:    Collective on MatPartitioning

632:    Input Parameters:
633: +  part - the partitioning context
634: -  viewer - optional visualization context

636:    Level: intermediate

638:    Note:
639:    The available visualization contexts include
640: +     PETSC_VIEWER_STDOUT_SELF - standard output (default)
641: -     PETSC_VIEWER_STDOUT_WORLD - synchronized standard
642:          output where only the first processor opens
643:          the file.  All other processors send their
644:          data to the first processor to print.

646:    The user can open alternative visualization contexts with
647: .     PetscViewerASCIIOpen() - output to a specified file

649: .seealso: PetscViewerASCIIOpen()
650: @*/
651: PetscErrorCode  MatPartitioningView(MatPartitioning part,PetscViewer viewer)
652: {
653:   PetscBool      iascii;

656:   if (!viewer) {
657:     PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)part),&viewer);
658:   }

662:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
663:   if (iascii) {
664:     PetscObjectPrintClassNamePrefixType((PetscObject)part,viewer);
665:     if (part->vertex_weights) {
666:       PetscViewerASCIIPrintf(viewer,"  Using vertex weights\n");
667:     }
668:   }
669:   if (part->ops->view) {
670:     PetscViewerASCIIPushTab(viewer);
671:     (*part->ops->view)(part,viewer);
672:     PetscViewerASCIIPopTab(viewer);
673:   }
674:   return 0;
675: }

677: /*@C
678:    MatPartitioningSetType - Sets the type of partitioner to use

680:    Collective on MatPartitioning

682:    Input Parameters:
683: +  part - the partitioning context.
684: -  type - a known method

686:    Options Database Command:
687: $  -mat_partitioning_type  <type>
688: $      Use -help for a list of available methods
689: $      (for instance, parmetis)

691:    Level: intermediate

693: .seealso: MatPartitioningCreate(), MatPartitioningApply(), MatPartitioningType

695: @*/
696: PetscErrorCode  MatPartitioningSetType(MatPartitioning part,MatPartitioningType type)
697: {
698:   PetscBool      match;
699:   PetscErrorCode (*r)(MatPartitioning);


704:   PetscObjectTypeCompare((PetscObject)part,type,&match);
705:   if (match) return 0;

707:   if (part->ops->destroy) {
708:     (*part->ops->destroy)(part);
709:     part->ops->destroy = NULL;
710:   }
711:   part->setupcalled = 0;
712:   part->data        = NULL;
713:   PetscMemzero(part->ops,sizeof(struct _MatPartitioningOps));

715:   PetscFunctionListFind(MatPartitioningList,type,&r);

718:   (*r)(part);

720:   PetscFree(((PetscObject)part)->type_name);
721:   PetscStrallocpy(type,&((PetscObject)part)->type_name);
722:   return 0;
723: }

725: /*@
726:    MatPartitioningSetFromOptions - Sets various partitioning options from the
727:         options database.

729:    Collective on MatPartitioning

731:    Input Parameter:
732: .  part - the partitioning context.

734:    Options Database Command:
735: $  -mat_partitioning_type  <type>
736: $      Use -help for a list of available methods
737: $      (for instance, parmetis)
738: $  -mat_partitioning_nparts - number of subgraphs

740:    Notes:
741:     If the partitioner has not been set by the user it uses one of the installed partitioner such as ParMetis. If there are
742:    no installed partitioners it uses current which means no repartioning.

744:    Level: beginner

746: @*/
747: PetscErrorCode  MatPartitioningSetFromOptions(MatPartitioning part)
748: {
750:   PetscBool      flag;
751:   char           type[256];
752:   const char     *def;

754:   PetscObjectOptionsBegin((PetscObject)part);
755:   if (!((PetscObject)part)->type_name) {
756: #if defined(PETSC_HAVE_PARMETIS)
757:     def = MATPARTITIONINGPARMETIS;
758: #elif defined(PETSC_HAVE_CHACO)
759:     def = MATPARTITIONINGCHACO;
760: #elif defined(PETSC_HAVE_PARTY)
761:     def = MATPARTITIONINGPARTY;
762: #elif defined(PETSC_HAVE_PTSCOTCH)
763:     def = MATPARTITIONINGPTSCOTCH;
764: #else
765:     def = MATPARTITIONINGCURRENT;
766: #endif
767:   } else {
768:     def = ((PetscObject)part)->type_name;
769:   }
770:   PetscOptionsFList("-mat_partitioning_type","Type of partitioner","MatPartitioningSetType",MatPartitioningList,def,type,256,&flag);
771:   if (flag) {
772:     MatPartitioningSetType(part,type);
773:   }

775:   PetscOptionsInt("-mat_partitioning_nparts","number of fine parts",NULL,part->n,& part->n,&flag);

777:   PetscOptionsBool("-mat_partitioning_use_edge_weights","whether or not to use edge weights",NULL,part->use_edge_weights,&part->use_edge_weights,&flag);

779:   /*
780:     Set the type if it was never set.
781:   */
782:   if (!((PetscObject)part)->type_name) {
783:     MatPartitioningSetType(part,def);
784:   }

786:   if (part->ops->setfromoptions) {
787:     (*part->ops->setfromoptions)(PetscOptionsObject,part);
788:   }
789:   PetscOptionsEnd();
790:   return 0;
791: }