Actual source code: partition.c

petsc-3.10.0 2018-09-12
Report Typos and Errors

  2:  #include <petsc/private/matimpl.h>

  4: /* Logging support */
  5: PetscClassId MAT_PARTITIONING_CLASSID;

  7: /*
  8:    Simplest partitioning, keeps the current partitioning.
  9: */
 10: static PetscErrorCode MatPartitioningApply_Current(MatPartitioning part,IS *partitioning)
 11: {
 13:   PetscInt       m;
 14:   PetscMPIInt    rank,size;

 17:   MPI_Comm_size(PetscObjectComm((PetscObject)part),&size);
 18:   if (part->n != size) {
 19:     const char *prefix;
 20:     PetscObjectGetOptionsPrefix((PetscObject)part,&prefix);
 21:     SETERRQ1(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"This is the DEFAULT NO-OP partitioner, it currently only supports one domain per processor\nuse -%smat_partitioning_type parmetis or chaco or ptscotch for more than one subdomain per processor",prefix ? prefix : "");
 22:   }
 23:   MPI_Comm_rank(PetscObjectComm((PetscObject)part),&rank);

 25:   MatGetLocalSize(part->adj,&m,NULL);
 26:   ISCreateStride(PetscObjectComm((PetscObject)part),m,rank,0,partitioning);
 27:   return(0);
 28: }

 30: /*
 31:    partition an index to rebalance the computation
 32: */
 33: static PetscErrorCode MatPartitioningApply_Average(MatPartitioning part,IS *partitioning)
 34: {
 36:   PetscInt       m,M,nparts,*indices,r,d,*parts,i,start,end,loc;

 39:   MatGetSize(part->adj,&M,NULL);
 40:   MatGetLocalSize(part->adj,&m,NULL);
 41:   nparts = part->n;
 42:   PetscCalloc1(nparts,&parts);
 43:   d = M/nparts;
 44:   for (i=0; i<nparts; i++) parts[i] = d;
 45:   r = M%nparts;
 46:   for (i=0; i<r; i++) parts[i] += 1;
 47:   for (i=1; i<nparts; i++) parts[i] += parts[i-1];
 48:   PetscCalloc1(m,&indices);
 49:   MatGetOwnershipRange(part->adj,&start,&end);
 50:   for (i=start; i<end; i++) {
 51:     PetscFindInt(i,nparts,parts,&loc);
 52:     if (loc<0) loc = -(loc+1);
 53:     else loc = loc+1;
 54:     indices[i-start] = loc;
 55:   }
 56:   PetscFree(parts);
 57:   ISCreateGeneral(PetscObjectComm((PetscObject)part),m,indices,PETSC_OWN_POINTER,partitioning);
 58:   return(0);
 59: }

 61: static PetscErrorCode MatPartitioningApply_Square(MatPartitioning part,IS *partitioning)
 62: {
 64:   PetscInt       cell,n,N,p,rstart,rend,*color;
 65:   PetscMPIInt    size;

 68:   MPI_Comm_size(PetscObjectComm((PetscObject)part),&size);
 69:   if (part->n != size) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Currently only supports one domain per processor");
 70:   p = (PetscInt)PetscSqrtReal((PetscReal)part->n);
 71:   if (p*p != part->n) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Square partitioning requires \"perfect square\" number of domains");

 73:   MatGetSize(part->adj,&N,NULL);
 74:   n    = (PetscInt)PetscSqrtReal((PetscReal)N);
 75:   if (n*n != N) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Square partitioning requires square domain");
 76:   if (n%p != 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Square partitioning requires p to divide n");
 77:   MatGetOwnershipRange(part->adj,&rstart,&rend);
 78:   PetscMalloc1(rend-rstart,&color);
 79:   /* for (int cell=rstart; cell<rend; cell++) { color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); } */
 80:   for (cell=rstart; cell<rend; cell++) {
 81:     color[cell-rstart] = ((cell%n) / (n/p)) + p * ((cell/n) / (n/p));
 82:   }
 83:   ISCreateGeneral(PetscObjectComm((PetscObject)part),rend-rstart,color,PETSC_OWN_POINTER,partitioning);
 84:   return(0);
 85: }

 87: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Current(MatPartitioning part)
 88: {
 90:   part->ops->apply   = MatPartitioningApply_Current;
 91:   part->ops->view    = 0;
 92:   part->ops->destroy = 0;
 93:   return(0);
 94: }

 96: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Average(MatPartitioning part)
 97: {
 99:   part->ops->apply   = MatPartitioningApply_Average;
100:   part->ops->view    = 0;
101:   part->ops->destroy = 0;
102:   return(0);
103: }

105: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Square(MatPartitioning part)
106: {
108:   part->ops->apply   = MatPartitioningApply_Square;
109:   part->ops->view    = 0;
110:   part->ops->destroy = 0;
111:   return(0);
112: }


115: /* gets as input the "sizes" array computed by ParMetis_*_NodeND and returns
116:        seps[  0 :         2*p) : the start and end node of each subdomain
117:        seps[2*p : 2*p+2*(p-1)) : the start and end node of each separator
118:      levels[  0 :         p-1) : level in the tree for each separator (-1 root, -2 and -3 first level and so on)
119:    The arrays must be large enough
120: */
121: PETSC_INTERN PetscErrorCode MatPartitioningSizesToSep_Private(PetscInt p, PetscInt sizes[], PetscInt seps[], PetscInt level[])
122: {
123:   PetscInt       l2p,i,pTree,pStartTree;

127:   l2p = PetscLog2Real(p);
128:   if (l2p - (PetscInt)PetscLog2Real(p)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"%D is not a power of 2",p);
129:   if (!p) return(0);
130:   PetscMemzero(seps,(2*p-2)*sizeof(PetscInt));
131:   PetscMemzero(level,(p-1)*sizeof(PetscInt));
132:   seps[2*p-2] = sizes[2*p-2];
133:   pTree = p;
134:   pStartTree = 0;
135:   while (pTree != 1) {
136:     for (i = pStartTree; i < pStartTree + pTree; i++) {
137:       seps[i] += sizes[i];
138:       seps[pStartTree + pTree + (i-pStartTree)/2] += seps[i];
139:     }
140:     pStartTree += pTree;
141:     pTree = pTree/2;
142:   }
143:   seps[2*p-2] -= sizes[2*p-2];

145:   pStartTree = 2*p-2;
146:   pTree      = 1;
147:   while (pStartTree > 0) {
148:     for (i = pStartTree; i < pStartTree + pTree; i++) {
149:       PetscInt k = 2*i - (pStartTree +2*pTree);
150:       PetscInt n = seps[k+1];

152:       seps[k+1]  = seps[i]   - sizes[k+1];
153:       seps[k]    = seps[k+1] + sizes[k+1] - n - sizes[k];
154:       level[i-p] = -pTree - i + pStartTree;
155:     }
156:     pTree *= 2;
157:     pStartTree -= pTree;
158:   }
159:   /* I know there should be a formula */
160:   PetscSortIntWithArrayPair(p-1,seps+p,sizes+p,level);
161:   for (i=2*p-2;i>=0;i--) { seps[2*i] = seps[i]; seps[2*i+1] = seps[i] + sizes[i] - 1; }
162:   return(0);
163: }

165: /* ===========================================================================================*/

167: PetscFunctionList MatPartitioningList              = 0;
168: PetscBool         MatPartitioningRegisterAllCalled = PETSC_FALSE;


171: /*@C
172:    MatPartitioningRegister - Adds a new sparse matrix partitioning to the  matrix package.

174:    Not Collective

176:    Input Parameters:
177: +  sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
178: -  function - function pointer that creates the partitioning type

180:    Level: developer

182:    Sample usage:
183: .vb
184:    MatPartitioningRegister("my_part",MyPartCreate);
185: .ve

187:    Then, your partitioner can be chosen with the procedural interface via
188: $     MatPartitioningSetType(part,"my_part")
189:    or at runtime via the option
190: $     -mat_partitioning_type my_part

192: .keywords: matrix, partitioning, register

194: .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
195: @*/
196: PetscErrorCode  MatPartitioningRegister(const char sname[],PetscErrorCode (*function)(MatPartitioning))
197: {

201:   PetscFunctionListAdd(&MatPartitioningList,sname,function);
202:   return(0);
203: }

205: /*@C
206:    MatPartitioningGetType - Gets the Partitioning method type and name (as a string)
207:         from the partitioning context.

209:    Not collective

211:    Input Parameter:
212: .  partitioning - the partitioning context

214:    Output Parameter:
215: .  type - partitioner type

217:    Level: intermediate

219:    Not Collective

221: .keywords: Partitioning, get, method, name, type
222: @*/
223: PetscErrorCode  MatPartitioningGetType(MatPartitioning partitioning,MatPartitioningType *type)
224: {
228:   *type = ((PetscObject)partitioning)->type_name;
229:   return(0);
230: }

232: /*@C
233:    MatPartitioningSetNParts - Set how many partitions need to be created;
234:         by default this is one per processor. Certain partitioning schemes may
235:         in fact only support that option.

237:    Not collective

239:    Input Parameter:
240: .  partitioning - the partitioning context
241: .  n - the number of partitions

243:    Level: intermediate

245:    Not Collective

247: .keywords: Partitioning, set

249: .seealso: MatPartitioningCreate(), MatPartitioningApply()
250: @*/
251: PetscErrorCode  MatPartitioningSetNParts(MatPartitioning part,PetscInt n)
252: {
254:   part->n = n;
255:   return(0);
256: }

258: /*@
259:    MatPartitioningApplyND - Gets a nested dissection partitioning for a matrix.

261:    Collective on Mat

263:    Input Parameters:
264: .  matp - the matrix partitioning object

266:    Output Parameters:
267: .   partitioning - the partitioning. For each local node, a positive value indicates the processor
268:                    number the node has been assigned to. Negative x values indicate the separator level -(x+1).

270:    Level: beginner

272:    The user can define additional partitionings; see MatPartitioningRegister().

274: .keywords: matrix, get, partitioning

276: .seealso:  MatPartitioningRegister(), MatPartitioningCreate(),
277:            MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
278:            ISPartitioningCount()
279: @*/
280: PetscErrorCode  MatPartitioningApplyND(MatPartitioning matp,IS *partitioning)
281: {

287:   if (!matp->adj->assembled) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
288:   if (matp->adj->factortype) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
289:   if (!matp->ops->applynd) SETERRQ1(PetscObjectComm((PetscObject)matp),PETSC_ERR_SUP,"Nested dissection not provided by MatPartitioningType %s",((PetscObject)matp)->type_name);
290:   PetscLogEventBegin(MAT_PartitioningND,matp,0,0,0);
291:   (*matp->ops->applynd)(matp,partitioning);
292:   PetscLogEventEnd(MAT_PartitioningND,matp,0,0,0);

294:   MatPartitioningViewFromOptions(matp,NULL,"-mat_partitioning_view");
295:   ISViewFromOptions(*partitioning,NULL,"-mat_partitioning_view");
296:   return(0);
297: }

299: /*@
300:    MatPartitioningApply - Gets a partitioning for a matrix.

302:    Collective on Mat

304:    Input Parameters:
305: .  matp - the matrix partitioning object

307:    Output Parameters:
308: .   partitioning - the partitioning. For each local node this tells the processor
309:                    number that that node is assigned to.

311:    Options Database Keys:
312:    To specify the partitioning through the options database, use one of
313:    the following
314: $    -mat_partitioning_type parmetis, -mat_partitioning current
315:    To see the partitioning result
316: $    -mat_partitioning_view

318:    Level: beginner

320:    The user can define additional partitionings; see MatPartitioningRegister().

322: .keywords: matrix, get, partitioning

324: .seealso:  MatPartitioningRegister(), MatPartitioningCreate(),
325:            MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
326:            ISPartitioningCount()
327: @*/
328: PetscErrorCode  MatPartitioningApply(MatPartitioning matp,IS *partitioning)
329: {

335:   if (!matp->adj->assembled) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
336:   if (matp->adj->factortype) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
337:   if (!matp->ops->apply) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Must set type with MatPartitioningSetFromOptions() or MatPartitioningSetType()");
338:   PetscLogEventBegin(MAT_Partitioning,matp,0,0,0);
339:   (*matp->ops->apply)(matp,partitioning);
340:   PetscLogEventEnd(MAT_Partitioning,matp,0,0,0);

342:   MatPartitioningViewFromOptions(matp,NULL,"-mat_partitioning_view");
343:   ISViewFromOptions(*partitioning,NULL,"-mat_partitioning_view");
344:   return(0);
345: }

347: /*@
348:    MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
349:       partitioned.

351:    Collective on MatPartitioning and Mat

353:    Input Parameters:
354: +  part - the partitioning context
355: -  adj - the adjacency matrix

357:    Level: beginner

359: .keywords: Partitioning, adjacency

361: .seealso: MatPartitioningCreate()
362: @*/
363: PetscErrorCode  MatPartitioningSetAdjacency(MatPartitioning part,Mat adj)
364: {
368:   part->adj = adj;
369:   return(0);
370: }

372: /*@
373:    MatPartitioningDestroy - Destroys the partitioning context.

375:    Collective on Partitioning

377:    Input Parameters:
378: .  part - the partitioning context

380:    Level: beginner

382: .keywords: Partitioning, destroy, context

384: .seealso: MatPartitioningCreate()
385: @*/
386: PetscErrorCode  MatPartitioningDestroy(MatPartitioning *part)
387: {

391:   if (!*part) return(0);
393:   if (--((PetscObject)(*part))->refct > 0) {*part = 0; return(0);}

395:   if ((*part)->ops->destroy) {
396:     (*(*part)->ops->destroy)((*part));
397:   }
398:   PetscFree((*part)->vertex_weights);
399:   PetscFree((*part)->part_weights);
400:   PetscHeaderDestroy(part);
401:   return(0);
402: }

404: /*@C
405:    MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.

407:    Logically Collective on Partitioning

409:    Input Parameters:
410: +  part - the partitioning context
411: -  weights - the weights, on each process this array must have the same size as the number of local rows

413:    Level: beginner

415:    Notes:
416:       The array weights is freed by PETSc so the user should not free the array. In C/C++
417:    the array must be obtained with a call to PetscMalloc(), not malloc().

419: .keywords: Partitioning, destroy, context

421: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetPartitionWeights()
422: @*/
423: PetscErrorCode  MatPartitioningSetVertexWeights(MatPartitioning part,const PetscInt weights[])
424: {


430:   PetscFree(part->vertex_weights);

432:   part->vertex_weights = (PetscInt*)weights;
433:   return(0);
434: }

436: /*@C
437:    MatPartitioningSetPartitionWeights - Sets the weights for each partition.

439:    Logically Collective on Partitioning

441:    Input Parameters:
442: +  part - the partitioning context
443: -  weights - An array of size nparts that is used to specify the fraction of
444:              vertex weight that should be distributed to each sub-domain for
445:              the balance constraint. If all of the sub-domains are to be of
446:              the same size, then each of the nparts elements should be set
447:              to a value of 1/nparts. Note that the sum of all of the weights
448:              should be one.

450:    Level: beginner

452:    Notes:
453:       The array weights is freed by PETSc so the user should not free the array. In C/C++
454:    the array must be obtained with a call to PetscMalloc(), not malloc().

456: .keywords: Partitioning, destroy, context

458: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetVertexWeights()
459: @*/
460: PetscErrorCode  MatPartitioningSetPartitionWeights(MatPartitioning part,const PetscReal weights[])
461: {


467:   PetscFree(part->part_weights);

469:   part->part_weights = (PetscReal*)weights;
470:   return(0);
471: }

473: /*@
474:    MatPartitioningCreate - Creates a partitioning context.

476:    Collective on MPI_Comm

478:    Input Parameter:
479: .   comm - MPI communicator

481:    Output Parameter:
482: .  newp - location to put the context

484:    Level: beginner

486: .keywords: Partitioning, create, context

488: .seealso: MatPartitioningSetType(), MatPartitioningApply(), MatPartitioningDestroy(),
489:           MatPartitioningSetAdjacency()

491: @*/
492: PetscErrorCode  MatPartitioningCreate(MPI_Comm comm,MatPartitioning *newp)
493: {
494:   MatPartitioning part;
495:   PetscErrorCode  ierr;
496:   PetscMPIInt     size;

499:   *newp = 0;

501:   MatInitializePackage();
502:   PetscHeaderCreate(part,MAT_PARTITIONING_CLASSID,"MatPartitioning","Matrix/graph partitioning","MatOrderings",comm,MatPartitioningDestroy,MatPartitioningView);
503:   part->vertex_weights = NULL;
504:   part->part_weights   = NULL;

506:   MPI_Comm_size(comm,&size);
507:   part->n = (PetscInt)size;

509:   *newp = part;
510:   return(0);
511: }

513: /*@C
514:    MatPartitioningView - Prints the partitioning data structure.

516:    Collective on MatPartitioning

518:    Input Parameters:
519: .  part - the partitioning context
520: .  viewer - optional visualization context

522:    Level: intermediate

524:    Note:
525:    The available visualization contexts include
526: +     PETSC_VIEWER_STDOUT_SELF - standard output (default)
527: -     PETSC_VIEWER_STDOUT_WORLD - synchronized standard
528:          output where only the first processor opens
529:          the file.  All other processors send their
530:          data to the first processor to print.

532:    The user can open alternative visualization contexts with
533: .     PetscViewerASCIIOpen() - output to a specified file

535: .keywords: Partitioning, view

537: .seealso: PetscViewerASCIIOpen()
538: @*/
539: PetscErrorCode  MatPartitioningView(MatPartitioning part,PetscViewer viewer)
540: {
542:   PetscBool      iascii;

546:   if (!viewer) {
547:     PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)part),&viewer);
548:   }

552:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
553:   if (iascii) {
554:     PetscObjectPrintClassNamePrefixType((PetscObject)part,viewer);
555:     if (part->vertex_weights) {
556:       PetscViewerASCIIPrintf(viewer,"  Using vertex weights\n");
557:     }
558:   }
559:   if (part->ops->view) {
560:     PetscViewerASCIIPushTab(viewer);
561:     (*part->ops->view)(part,viewer);
562:     PetscViewerASCIIPopTab(viewer);
563:   }
564:   return(0);
565: }

567: /*@C
568:    MatPartitioningSetType - Sets the type of partitioner to use

570:    Collective on MatPartitioning

572:    Input Parameter:
573: .  part - the partitioning context.
574: .  type - a known method

576:    Options Database Command:
577: $  -mat_partitioning_type  <type>
578: $      Use -help for a list of available methods
579: $      (for instance, parmetis)

581:    Level: intermediate

583: .keywords: partitioning, set, method, type

585: .seealso: MatPartitioningCreate(), MatPartitioningApply(), MatPartitioningType

587: @*/
588: PetscErrorCode  MatPartitioningSetType(MatPartitioning part,MatPartitioningType type)
589: {
590:   PetscErrorCode ierr,(*r)(MatPartitioning);
591:   PetscBool      match;


597:   PetscObjectTypeCompare((PetscObject)part,type,&match);
598:   if (match) return(0);

600:   if (part->ops->destroy) {
601:      (*part->ops->destroy)(part);

603:     part->ops->destroy = NULL;
604:     part->data         = 0;
605:     part->setupcalled  = 0;
606:   }

608:   PetscFunctionListFind(MatPartitioningList,type,&r);
609:   if (!r) SETERRQ1(PetscObjectComm((PetscObject)part),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown partitioning type %s",type);

611:   part->ops->destroy = (PetscErrorCode (*)(MatPartitioning)) 0;
612:   part->ops->view    = (PetscErrorCode (*)(MatPartitioning,PetscViewer)) 0;

614:   (*r)(part);

616:   PetscFree(((PetscObject)part)->type_name);
617:   PetscStrallocpy(type,&((PetscObject)part)->type_name);
618:   return(0);
619: }

621: /*@
622:    MatPartitioningSetFromOptions - Sets various partitioning options from the
623:         options database.

625:    Collective on MatPartitioning

627:    Input Parameter:
628: .  part - the partitioning context.

630:    Options Database Command:
631: $  -mat_partitioning_type  <type>
632: $      Use -help for a list of available methods
633: $      (for instance, parmetis)


636:    Notes:
637:     If the partitioner has not been set by the user it uses one of the installed partitioner such as ParMetis. If there are
638:    no installed partitioners it uses current which means no repartioning.

640:    Level: beginner

642: .keywords: partitioning, set, method, type
643: @*/
644: PetscErrorCode  MatPartitioningSetFromOptions(MatPartitioning part)
645: {
647:   PetscBool      flag;
648:   char           type[256];
649:   const char     *def;

652:   PetscObjectOptionsBegin((PetscObject)part);
653:   if (!((PetscObject)part)->type_name) {
654: #if defined(PETSC_HAVE_PARMETIS)
655:     def = MATPARTITIONINGPARMETIS;
656: #elif defined(PETSC_HAVE_CHACO)
657:     def = MATPARTITIONINGCHACO;
658: #elif defined(PETSC_HAVE_PARTY)
659:     def = MATPARTITIONINGPARTY;
660: #elif defined(PETSC_HAVE_PTSCOTCH)
661:     def = MATPARTITIONINGPTSCOTCH;
662: #else
663:     def = MATPARTITIONINGCURRENT;
664: #endif
665:   } else {
666:     def = ((PetscObject)part)->type_name;
667:   }
668:   PetscOptionsFList("-mat_partitioning_type","Type of partitioner","MatPartitioningSetType",MatPartitioningList,def,type,256,&flag);
669:   if (flag) {
670:     MatPartitioningSetType(part,type);
671:   }
672:   /*
673:     Set the type if it was never set.
674:   */
675:   if (!((PetscObject)part)->type_name) {
676:     MatPartitioningSetType(part,def);
677:   }

679:   if (part->ops->setfromoptions) {
680:     (*part->ops->setfromoptions)(PetscOptionsObject,part);
681:   }
682:   PetscOptionsEnd();
683:   return(0);
684: }