Actual source code: partition.c

petsc-3.9.4 2018-09-11
Report Typos and Errors

  2:  #include <petsc/private/matimpl.h>

  4: /* Logging support */
  5: PetscClassId MAT_PARTITIONING_CLASSID;

  7: /*
  8:    Simplest partitioning, keeps the current partitioning.
  9: */
 10: static PetscErrorCode MatPartitioningApply_Current(MatPartitioning part,IS *partitioning)
 11: {
 13:   PetscInt       m;
 14:   PetscMPIInt    rank,size;

 17:   MPI_Comm_size(PetscObjectComm((PetscObject)part),&size);
 18:   if (part->n != size) {
 19:     const char *prefix;
 20:     PetscObjectGetOptionsPrefix((PetscObject)part,&prefix);
 21:     SETERRQ1(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"This is the DEFAULT NO-OP partitioner, it currently only supports one domain per processor\nuse -%smat_partitioning_type parmetis or chaco or ptscotch for more than one subdomain per processor",prefix ? prefix : "");
 22:   }
 23:   MPI_Comm_rank(PetscObjectComm((PetscObject)part),&rank);

 25:   MatGetLocalSize(part->adj,&m,NULL);
 26:   ISCreateStride(PetscObjectComm((PetscObject)part),m,rank,0,partitioning);
 27:   return(0);
 28: }

 30: /*
 31:    partition an index to rebalance the computation
 32: */
 33: static PetscErrorCode MatPartitioningApply_Average(MatPartitioning part,IS *partitioning)
 34: {
 36:   PetscInt       m,M,nparts,*indices,r,d,*parts,i,start,end,loc;

 39:   MatGetSize(part->adj,&M,NULL);
 40:   MatGetLocalSize(part->adj,&m,NULL);
 41:   nparts = part->n;
 42:   PetscCalloc1(nparts,&parts);
 43:   d = M/nparts;
 44:   for (i=0; i<nparts; i++) parts[i] = d;
 45:   r = M%nparts;
 46:   for (i=0; i<r; i++) parts[i] += 1;
 47:   for (i=1; i<nparts; i++) parts[i] += parts[i-1];
 48:   PetscCalloc1(m,&indices);
 49:   MatGetOwnershipRange(part->adj,&start,&end);
 50:   for (i=start; i<end; i++) {
 51:     PetscFindInt(i,nparts,parts,&loc);
 52:     if (loc<0) loc = -(loc+1);
 53:     else loc = loc+1;
 54:     indices[i-start] = loc;
 55:   }
 56:   PetscFree(parts);
 57:   ISCreateGeneral(PetscObjectComm((PetscObject)part),m,indices,PETSC_OWN_POINTER,partitioning);
 58:   return(0);
 59: }

 61: static PetscErrorCode MatPartitioningApply_Square(MatPartitioning part,IS *partitioning)
 62: {
 64:   PetscInt       cell,n,N,p,rstart,rend,*color;
 65:   PetscMPIInt    size;

 68:   MPI_Comm_size(PetscObjectComm((PetscObject)part),&size);
 69:   if (part->n != size) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Currently only supports one domain per processor");
 70:   p = (PetscInt)PetscSqrtReal((PetscReal)part->n);
 71:   if (p*p != part->n) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Square partitioning requires \"perfect square\" number of domains");

 73:   MatGetSize(part->adj,&N,NULL);
 74:   n    = (PetscInt)PetscSqrtReal((PetscReal)N);
 75:   if (n*n != N) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Square partitioning requires square domain");
 76:   if (n%p != 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Square partitioning requires p to divide n");
 77:   MatGetOwnershipRange(part->adj,&rstart,&rend);
 78:   PetscMalloc1(rend-rstart,&color);
 79:   /* for (int cell=rstart; cell<rend; cell++) { color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); } */
 80:   for (cell=rstart; cell<rend; cell++) {
 81:     color[cell-rstart] = ((cell%n) / (n/p)) + p * ((cell/n) / (n/p));
 82:   }
 83:   ISCreateGeneral(PetscObjectComm((PetscObject)part),rend-rstart,color,PETSC_OWN_POINTER,partitioning);
 84:   return(0);
 85: }

 87: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Current(MatPartitioning part)
 88: {
 90:   part->ops->apply   = MatPartitioningApply_Current;
 91:   part->ops->view    = 0;
 92:   part->ops->destroy = 0;
 93:   return(0);
 94: }

 96: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Average(MatPartitioning part)
 97: {
 99:   part->ops->apply   = MatPartitioningApply_Average;
100:   part->ops->view    = 0;
101:   part->ops->destroy = 0;
102:   return(0);
103: }

105: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Square(MatPartitioning part)
106: {
108:   part->ops->apply   = MatPartitioningApply_Square;
109:   part->ops->view    = 0;
110:   part->ops->destroy = 0;
111:   return(0);
112: }


115: /* ===========================================================================================*/

117: PetscFunctionList MatPartitioningList              = 0;
118: PetscBool         MatPartitioningRegisterAllCalled = PETSC_FALSE;


121: /*@C
122:    MatPartitioningRegister - Adds a new sparse matrix partitioning to the  matrix package.

124:    Not Collective

126:    Input Parameters:
127: +  sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
128: -  function - function pointer that creates the partitioning type

130:    Level: developer

132:    Sample usage:
133: .vb
134:    MatPartitioningRegister("my_part",MyPartCreate);
135: .ve

137:    Then, your partitioner can be chosen with the procedural interface via
138: $     MatPartitioningSetType(part,"my_part")
139:    or at runtime via the option
140: $     -mat_partitioning_type my_part

142: .keywords: matrix, partitioning, register

144: .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
145: @*/
146: PetscErrorCode  MatPartitioningRegister(const char sname[],PetscErrorCode (*function)(MatPartitioning))
147: {

151:   PetscFunctionListAdd(&MatPartitioningList,sname,function);
152:   return(0);
153: }

155: /*@C
156:    MatPartitioningGetType - Gets the Partitioning method type and name (as a string)
157:         from the partitioning context.

159:    Not collective

161:    Input Parameter:
162: .  partitioning - the partitioning context

164:    Output Parameter:
165: .  type - partitioner type

167:    Level: intermediate

169:    Not Collective

171: .keywords: Partitioning, get, method, name, type
172: @*/
173: PetscErrorCode  MatPartitioningGetType(MatPartitioning partitioning,MatPartitioningType *type)
174: {
178:   *type = ((PetscObject)partitioning)->type_name;
179:   return(0);
180: }

182: /*@C
183:    MatPartitioningSetNParts - Set how many partitions need to be created;
184:         by default this is one per processor. Certain partitioning schemes may
185:         in fact only support that option.

187:    Not collective

189:    Input Parameter:
190: .  partitioning - the partitioning context
191: .  n - the number of partitions

193:    Level: intermediate

195:    Not Collective

197: .keywords: Partitioning, set

199: .seealso: MatPartitioningCreate(), MatPartitioningApply()
200: @*/
201: PetscErrorCode  MatPartitioningSetNParts(MatPartitioning part,PetscInt n)
202: {
204:   part->n = n;
205:   return(0);
206: }

208: /*@
209:    MatPartitioningApply - Gets a partitioning for a matrix.

211:    Collective on Mat

213:    Input Parameters:
214: .  matp - the matrix partitioning object

216:    Output Parameters:
217: .   partitioning - the partitioning. For each local node this tells the processor
218:                    number that that node is assigned to.

220:    Options Database Keys:
221:    To specify the partitioning through the options database, use one of
222:    the following
223: $    -mat_partitioning_type parmetis, -mat_partitioning current
224:    To see the partitioning result
225: $    -mat_partitioning_view

227:    Level: beginner

229:    The user can define additional partitionings; see MatPartitioningRegister().

231: .keywords: matrix, get, partitioning

233: .seealso:  MatPartitioningRegister(), MatPartitioningCreate(),
234:            MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
235:            ISPartitioningCount()
236: @*/
237: PetscErrorCode  MatPartitioningApply(MatPartitioning matp,IS *partitioning)
238: {
240:   PetscBool      flag = PETSC_FALSE;

245:   if (!matp->adj->assembled) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
246:   if (matp->adj->factortype) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
247:   if (!matp->ops->apply) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Must set type with MatPartitioningSetFromOptions() or MatPartitioningSetType()");
248:   PetscLogEventBegin(MAT_Partitioning,matp,0,0,0);
249:   (*matp->ops->apply)(matp,partitioning);
250:   PetscLogEventEnd(MAT_Partitioning,matp,0,0,0);

252:   PetscOptionsGetBool(((PetscObject)matp)->options,((PetscObject)matp)->prefix,"-mat_partitioning_view",&flag,NULL);
253:   if (flag) {
254:     PetscViewer viewer;
255:     PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)matp),&viewer);
256:     MatPartitioningView(matp,viewer);
257:     ISView(*partitioning,viewer);
258:   }
259:   return(0);
260: }

262: /*@
263:    MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
264:       partitioned.

266:    Collective on MatPartitioning and Mat

268:    Input Parameters:
269: +  part - the partitioning context
270: -  adj - the adjacency matrix

272:    Level: beginner

274: .keywords: Partitioning, adjacency

276: .seealso: MatPartitioningCreate()
277: @*/
278: PetscErrorCode  MatPartitioningSetAdjacency(MatPartitioning part,Mat adj)
279: {
283:   part->adj = adj;
284:   return(0);
285: }

287: /*@
288:    MatPartitioningDestroy - Destroys the partitioning context.

290:    Collective on Partitioning

292:    Input Parameters:
293: .  part - the partitioning context

295:    Level: beginner

297: .keywords: Partitioning, destroy, context

299: .seealso: MatPartitioningCreate()
300: @*/
301: PetscErrorCode  MatPartitioningDestroy(MatPartitioning *part)
302: {

306:   if (!*part) return(0);
308:   if (--((PetscObject)(*part))->refct > 0) {*part = 0; return(0);}

310:   if ((*part)->ops->destroy) {
311:     (*(*part)->ops->destroy)((*part));
312:   }
313:   PetscFree((*part)->vertex_weights);
314:   PetscFree((*part)->part_weights);
315:   PetscHeaderDestroy(part);
316:   return(0);
317: }

319: /*@C
320:    MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.

322:    Logically Collective on Partitioning

324:    Input Parameters:
325: +  part - the partitioning context
326: -  weights - the weights, on each process this array must have the same size as the number of local rows

328:    Level: beginner

330:    Notes:
331:       The array weights is freed by PETSc so the user should not free the array. In C/C++
332:    the array must be obtained with a call to PetscMalloc(), not malloc().

334: .keywords: Partitioning, destroy, context

336: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetPartitionWeights()
337: @*/
338: PetscErrorCode  MatPartitioningSetVertexWeights(MatPartitioning part,const PetscInt weights[])
339: {


345:   PetscFree(part->vertex_weights);

347:   part->vertex_weights = (PetscInt*)weights;
348:   return(0);
349: }

351: /*@C
352:    MatPartitioningSetPartitionWeights - Sets the weights for each partition.

354:    Logically Collective on Partitioning

356:    Input Parameters:
357: +  part - the partitioning context
358: -  weights - An array of size nparts that is used to specify the fraction of
359:              vertex weight that should be distributed to each sub-domain for
360:              the balance constraint. If all of the sub-domains are to be of
361:              the same size, then each of the nparts elements should be set
362:              to a value of 1/nparts. Note that the sum of all of the weights
363:              should be one.

365:    Level: beginner

367:    Notes:
368:       The array weights is freed by PETSc so the user should not free the array. In C/C++
369:    the array must be obtained with a call to PetscMalloc(), not malloc().

371: .keywords: Partitioning, destroy, context

373: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetVertexWeights()
374: @*/
375: PetscErrorCode  MatPartitioningSetPartitionWeights(MatPartitioning part,const PetscReal weights[])
376: {


382:   PetscFree(part->part_weights);

384:   part->part_weights = (PetscReal*)weights;
385:   return(0);
386: }

388: /*@
389:    MatPartitioningCreate - Creates a partitioning context.

391:    Collective on MPI_Comm

393:    Input Parameter:
394: .   comm - MPI communicator

396:    Output Parameter:
397: .  newp - location to put the context

399:    Level: beginner

401: .keywords: Partitioning, create, context

403: .seealso: MatPartitioningSetType(), MatPartitioningApply(), MatPartitioningDestroy(),
404:           MatPartitioningSetAdjacency()

406: @*/
407: PetscErrorCode  MatPartitioningCreate(MPI_Comm comm,MatPartitioning *newp)
408: {
409:   MatPartitioning part;
410:   PetscErrorCode  ierr;
411:   PetscMPIInt     size;

414:   *newp = 0;

416:   MatInitializePackage();
417:   PetscHeaderCreate(part,MAT_PARTITIONING_CLASSID,"MatPartitioning","Matrix/graph partitioning","MatOrderings",comm,MatPartitioningDestroy,MatPartitioningView);
418:   part->vertex_weights = NULL;
419:   part->part_weights   = NULL;

421:   MPI_Comm_size(comm,&size);
422:   part->n = (PetscInt)size;

424:   *newp = part;
425:   return(0);
426: }

428: /*@C
429:    MatPartitioningView - Prints the partitioning data structure.

431:    Collective on MatPartitioning

433:    Input Parameters:
434: .  part - the partitioning context
435: .  viewer - optional visualization context

437:    Level: intermediate

439:    Note:
440:    The available visualization contexts include
441: +     PETSC_VIEWER_STDOUT_SELF - standard output (default)
442: -     PETSC_VIEWER_STDOUT_WORLD - synchronized standard
443:          output where only the first processor opens
444:          the file.  All other processors send their
445:          data to the first processor to print.

447:    The user can open alternative visualization contexts with
448: .     PetscViewerASCIIOpen() - output to a specified file

450: .keywords: Partitioning, view

452: .seealso: PetscViewerASCIIOpen()
453: @*/
454: PetscErrorCode  MatPartitioningView(MatPartitioning part,PetscViewer viewer)
455: {
457:   PetscBool      iascii;

461:   if (!viewer) {
462:     PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)part),&viewer);
463:   }

467:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
468:   if (iascii) {
469:     PetscObjectPrintClassNamePrefixType((PetscObject)part,viewer);
470:     if (part->vertex_weights) {
471:       PetscViewerASCIIPrintf(viewer,"  Using vertex weights\n");
472:     }
473:   }
474:   if (part->ops->view) {
475:     PetscViewerASCIIPushTab(viewer);
476:     (*part->ops->view)(part,viewer);
477:     PetscViewerASCIIPopTab(viewer);
478:   }
479:   return(0);
480: }

482: /*@C
483:    MatPartitioningSetType - Sets the type of partitioner to use

485:    Collective on MatPartitioning

487:    Input Parameter:
488: .  part - the partitioning context.
489: .  type - a known method

491:    Options Database Command:
492: $  -mat_partitioning_type  <type>
493: $      Use -help for a list of available methods
494: $      (for instance, parmetis)

496:    Level: intermediate

498: .keywords: partitioning, set, method, type

500: .seealso: MatPartitioningCreate(), MatPartitioningApply(), MatPartitioningType

502: @*/
503: PetscErrorCode  MatPartitioningSetType(MatPartitioning part,MatPartitioningType type)
504: {
505:   PetscErrorCode ierr,(*r)(MatPartitioning);
506:   PetscBool      match;


512:   PetscObjectTypeCompare((PetscObject)part,type,&match);
513:   if (match) return(0);

515:   if (part->ops->destroy) {
516:      (*part->ops->destroy)(part);

518:     part->ops->destroy = NULL;
519:     part->data         = 0;
520:     part->setupcalled  = 0;
521:   }

523:   PetscFunctionListFind(MatPartitioningList,type,&r);
524:   if (!r) SETERRQ1(PetscObjectComm((PetscObject)part),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown partitioning type %s",type);

526:   part->ops->destroy = (PetscErrorCode (*)(MatPartitioning)) 0;
527:   part->ops->view    = (PetscErrorCode (*)(MatPartitioning,PetscViewer)) 0;

529:   (*r)(part);

531:   PetscFree(((PetscObject)part)->type_name);
532:   PetscStrallocpy(type,&((PetscObject)part)->type_name);
533:   return(0);
534: }

536: /*@
537:    MatPartitioningSetFromOptions - Sets various partitioning options from the
538:         options database.

540:    Collective on MatPartitioning

542:    Input Parameter:
543: .  part - the partitioning context.

545:    Options Database Command:
546: $  -mat_partitioning_type  <type>
547: $      Use -help for a list of available methods
548: $      (for instance, parmetis)


551:    Notes: If the partitioner has not been set by the user it uses one of the installed partitioner such as ParMetis. If there are
552:    no installed partitioners it uses current which means no repartioning.

554:    Level: beginner

556: .keywords: partitioning, set, method, type
557: @*/
558: PetscErrorCode  MatPartitioningSetFromOptions(MatPartitioning part)
559: {
561:   PetscBool      flag;
562:   char           type[256];
563:   const char     *def;

566:   PetscObjectOptionsBegin((PetscObject)part);
567:   if (!((PetscObject)part)->type_name) {
568: #if defined(PETSC_HAVE_PARMETIS)
569:     def = MATPARTITIONINGPARMETIS;
570: #elif defined(PETSC_HAVE_CHACO)
571:     def = MATPARTITIONINGCHACO;
572: #elif defined(PETSC_HAVE_PARTY)
573:     def = MATPARTITIONINGPARTY;
574: #elif defined(PETSC_HAVE_PTSCOTCH)
575:     def = MATPARTITIONINGPTSCOTCH;
576: #else
577:     def = MATPARTITIONINGCURRENT;
578: #endif
579:   } else {
580:     def = ((PetscObject)part)->type_name;
581:   }
582:   PetscOptionsFList("-mat_partitioning_type","Type of partitioner","MatPartitioningSetType",MatPartitioningList,def,type,256,&flag);
583:   if (flag) {
584:     MatPartitioningSetType(part,type);
585:   }
586:   /*
587:     Set the type if it was never set.
588:   */
589:   if (!((PetscObject)part)->type_name) {
590:     MatPartitioningSetType(part,def);
591:   }

593:   if (part->ops->setfromoptions) {
594:     (*part->ops->setfromoptions)(PetscOptionsObject,part);
595:   }
596:   PetscOptionsEnd();
597:   return(0);
598: }