Actual source code: partition.c

petsc-3.5.4 2015-05-23
Report Typos and Errors
  2: #include <petsc-private/matimpl.h>               /*I "petscmat.h" I*/

  4: /* Logging support */
  5: PetscClassId MAT_PARTITIONING_CLASSID;

  7: /*
  8:    Simplest partitioning, keeps the current partitioning.
  9: */
 12: static PetscErrorCode MatPartitioningApply_Current(MatPartitioning part,IS *partitioning)
 13: {
 15:   PetscInt       m;
 16:   PetscMPIInt    rank,size;

 19:   MPI_Comm_size(PetscObjectComm((PetscObject)part),&size);
 20:   if (part->n != size) {
 21:     const char *prefix;
 22:     PetscObjectGetOptionsPrefix((PetscObject)part,&prefix);
 23:     SETERRQ1(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"This is the DEFAULT NO-OP partitioner, it currently only supports one domain per processor\nuse -%smat_partitioning_type parmetis or chaco or ptscotch for more than one subdomain per processor",prefix ? prefix : "");
 24:   }
 25:   MPI_Comm_rank(PetscObjectComm((PetscObject)part),&rank);

 27:   MatGetLocalSize(part->adj,&m,NULL);
 28:   ISCreateStride(PetscObjectComm((PetscObject)part),m,rank,0,partitioning);
 29:   return(0);
 30: }

 34: static PetscErrorCode MatPartitioningApply_Square(MatPartitioning part,IS *partitioning)
 35: {
 37:   PetscInt       cell,n,N,p,rstart,rend,*color;
 38:   PetscMPIInt    size;

 41:   MPI_Comm_size(PetscObjectComm((PetscObject)part),&size);
 42:   if (part->n != size) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Currently only supports one domain per processor");
 43:   p = (PetscInt)PetscSqrtReal((PetscReal)part->n);
 44:   if (p*p != part->n) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Square partitioning requires \"perfect square\" number of domains");

 46:   MatGetSize(part->adj,&N,NULL);
 47:   n    = (PetscInt)PetscSqrtReal((PetscReal)N);
 48:   if (n*n != N) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Square partitioning requires square domain");
 49:   if (n%p != 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Square partitioning requires p to divide n");
 50:   MatGetOwnershipRange(part->adj,&rstart,&rend);
 51:   PetscMalloc1((rend-rstart),&color);
 52:   /* for (int cell=rstart; cell<rend; cell++) { color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); } */
 53:   for (cell=rstart; cell<rend; cell++) {
 54:     color[cell-rstart] = ((cell%n) / (n/p)) + p * ((cell/n) / (n/p));
 55:   }
 56:   ISCreateGeneral(PetscObjectComm((PetscObject)part),rend-rstart,color,PETSC_OWN_POINTER,partitioning);
 57:   return(0);
 58: }

 62: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Current(MatPartitioning part)
 63: {
 65:   part->ops->apply   = MatPartitioningApply_Current;
 66:   part->ops->view    = 0;
 67:   part->ops->destroy = 0;
 68:   return(0);
 69: }

 73: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Square(MatPartitioning part)
 74: {
 76:   part->ops->apply   = MatPartitioningApply_Square;
 77:   part->ops->view    = 0;
 78:   part->ops->destroy = 0;
 79:   return(0);
 80: }


 83: /* ===========================================================================================*/

 85: PetscFunctionList MatPartitioningList              = 0;
 86: PetscBool         MatPartitioningRegisterAllCalled = PETSC_FALSE;


 91: /*@C
 92:    MatPartitioningRegister - Adds a new sparse matrix partitioning to the  matrix package.

 94:    Not Collective

 96:    Input Parameters:
 97: +  sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
 98: -  function - function pointer that creates the partitioning type

100:    Level: developer

102:    Sample usage:
103: .vb
104:    MatPartitioningRegister("my_part",MyPartCreate);
105: .ve

107:    Then, your partitioner can be chosen with the procedural interface via
108: $     MatPartitioningSetType(part,"my_part")
109:    or at runtime via the option
110: $     -mat_partitioning_type my_part

112: .keywords: matrix, partitioning, register

114: .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
115: @*/
116: PetscErrorCode  MatPartitioningRegister(const char sname[],PetscErrorCode (*function)(MatPartitioning))
117: {

121:   PetscFunctionListAdd(&MatPartitioningList,sname,function);
122:   return(0);
123: }

127: /*@C
128:    MatPartitioningGetType - Gets the Partitioning method type and name (as a string)
129:         from the partitioning context.

131:    Not collective

133:    Input Parameter:
134: .  partitioning - the partitioning context

136:    Output Parameter:
137: .  type - partitioner type

139:    Level: intermediate

141:    Not Collective

143: .keywords: Partitioning, get, method, name, type
144: @*/
145: PetscErrorCode  MatPartitioningGetType(MatPartitioning partitioning,MatPartitioningType *type)
146: {
150:   *type = ((PetscObject)partitioning)->type_name;
151:   return(0);
152: }

156: /*@C
157:    MatPartitioningSetNParts - Set how many partitions need to be created;
158:         by default this is one per processor. Certain partitioning schemes may
159:         in fact only support that option.

161:    Not collective

163:    Input Parameter:
164: .  partitioning - the partitioning context
165: .  n - the number of partitions

167:    Level: intermediate

169:    Not Collective

171: .keywords: Partitioning, set

173: .seealso: MatPartitioningCreate(), MatPartitioningApply()
174: @*/
175: PetscErrorCode  MatPartitioningSetNParts(MatPartitioning part,PetscInt n)
176: {
178:   part->n = n;
179:   return(0);
180: }

184: /*@
185:    MatPartitioningApply - Gets a partitioning for a matrix.

187:    Collective on Mat

189:    Input Parameters:
190: .  matp - the matrix partitioning object

192:    Output Parameters:
193: .   partitioning - the partitioning. For each local node this tells the processor
194:                    number that that node is assigned to.

196:    Options Database Keys:
197:    To specify the partitioning through the options database, use one of
198:    the following
199: $    -mat_partitioning_type parmetis, -mat_partitioning current
200:    To see the partitioning result
201: $    -mat_partitioning_view

203:    Level: beginner

205:    The user can define additional partitionings; see MatPartitioningRegister().

207: .keywords: matrix, get, partitioning

209: .seealso:  MatPartitioningRegister(), MatPartitioningCreate(),
210:            MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
211:            ISPartitioningCount()
212: @*/
213: PetscErrorCode  MatPartitioningApply(MatPartitioning matp,IS *partitioning)
214: {
216:   PetscBool      flag = PETSC_FALSE;

221:   if (!matp->adj->assembled) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
222:   if (matp->adj->factortype) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
223:   if (!matp->ops->apply) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Must set type with MatPartitioningSetFromOptions() or MatPartitioningSetType()");
224:   PetscLogEventBegin(MAT_Partitioning,matp,0,0,0);
225:   (*matp->ops->apply)(matp,partitioning);
226:   PetscLogEventEnd(MAT_Partitioning,matp,0,0,0);

228:   PetscOptionsGetBool(NULL,"-mat_partitioning_view",&flag,NULL);
229:   if (flag) {
230:     PetscViewer viewer;
231:     PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)matp),&viewer);
232:     MatPartitioningView(matp,viewer);
233:     ISView(*partitioning,viewer);
234:   }
235:   return(0);
236: }

240: /*@
241:    MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
242:       partitioned.

244:    Collective on MatPartitioning and Mat

246:    Input Parameters:
247: +  part - the partitioning context
248: -  adj - the adjacency matrix

250:    Level: beginner

252: .keywords: Partitioning, adjacency

254: .seealso: MatPartitioningCreate()
255: @*/
256: PetscErrorCode  MatPartitioningSetAdjacency(MatPartitioning part,Mat adj)
257: {
261:   part->adj = adj;
262:   return(0);
263: }

267: /*@
268:    MatPartitioningDestroy - Destroys the partitioning context.

270:    Collective on Partitioning

272:    Input Parameters:
273: .  part - the partitioning context

275:    Level: beginner

277: .keywords: Partitioning, destroy, context

279: .seealso: MatPartitioningCreate()
280: @*/
281: PetscErrorCode  MatPartitioningDestroy(MatPartitioning *part)
282: {

286:   if (!*part) return(0);
288:   if (--((PetscObject)(*part))->refct > 0) {*part = 0; return(0);}

290:   if ((*part)->ops->destroy) {
291:     (*(*part)->ops->destroy)((*part));
292:   }
293:   PetscFree((*part)->vertex_weights);
294:   PetscFree((*part)->part_weights);
295:   PetscHeaderDestroy(part);
296:   return(0);
297: }

301: /*@C
302:    MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.

304:    Logically Collective on Partitioning

306:    Input Parameters:
307: +  part - the partitioning context
308: -  weights - the weights, on each process this array must have the same size as the number of local rows

310:    Level: beginner

312:    Notes:
313:       The array weights is freed by PETSc so the user should not free the array. In C/C++
314:    the array must be obtained with a call to PetscMalloc(), not malloc().

316: .keywords: Partitioning, destroy, context

318: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetPartitionWeights()
319: @*/
320: PetscErrorCode  MatPartitioningSetVertexWeights(MatPartitioning part,const PetscInt weights[])
321: {


327:   PetscFree(part->vertex_weights);

329:   part->vertex_weights = (PetscInt*)weights;
330:   return(0);
331: }

335: /*@C
336:    MatPartitioningSetPartitionWeights - Sets the weights for each partition.

338:    Logically Collective on Partitioning

340:    Input Parameters:
341: +  part - the partitioning context
342: -  weights - An array of size nparts that is used to specify the fraction of
343:              vertex weight that should be distributed to each sub-domain for
344:              the balance constraint. If all of the sub-domains are to be of
345:              the same size, then each of the nparts elements should be set
346:              to a value of 1/nparts. Note that the sum of all of the weights
347:              should be one.

349:    Level: beginner

351:    Notes:
352:       The array weights is freed by PETSc so the user should not free the array. In C/C++
353:    the array must be obtained with a call to PetscMalloc(), not malloc().

355: .keywords: Partitioning, destroy, context

357: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetVertexWeights()
358: @*/
359: PetscErrorCode  MatPartitioningSetPartitionWeights(MatPartitioning part,const PetscReal weights[])
360: {


366:   PetscFree(part->part_weights);

368:   part->part_weights = (PetscReal*)weights;
369:   return(0);
370: }

374: /*@
375:    MatPartitioningCreate - Creates a partitioning context.

377:    Collective on MPI_Comm

379:    Input Parameter:
380: .   comm - MPI communicator

382:    Output Parameter:
383: .  newp - location to put the context

385:    Level: beginner

387: .keywords: Partitioning, create, context

389: .seealso: MatPartitioningSetType(), MatPartitioningApply(), MatPartitioningDestroy(),
390:           MatPartitioningSetAdjacency()

392: @*/
393: PetscErrorCode  MatPartitioningCreate(MPI_Comm comm,MatPartitioning *newp)
394: {
395:   MatPartitioning part;
396:   PetscErrorCode  ierr;
397:   PetscMPIInt     size;

400:   *newp = 0;

402:   MatInitializePackage();
403:   PetscHeaderCreate(part,_p_MatPartitioning,struct _MatPartitioningOps,MAT_PARTITIONING_CLASSID,"MatPartitioning","Matrix/graph partitioning","MatOrderings",comm,MatPartitioningDestroy,MatPartitioningView);
404:   part->vertex_weights = NULL;
405:   part->part_weights   = NULL;

407:   MPI_Comm_size(comm,&size);
408:   part->n = (PetscInt)size;

410:   *newp = part;
411:   return(0);
412: }

416: /*@C
417:    MatPartitioningView - Prints the partitioning data structure.

419:    Collective on MatPartitioning

421:    Input Parameters:
422: .  part - the partitioning context
423: .  viewer - optional visualization context

425:    Level: intermediate

427:    Note:
428:    The available visualization contexts include
429: +     PETSC_VIEWER_STDOUT_SELF - standard output (default)
430: -     PETSC_VIEWER_STDOUT_WORLD - synchronized standard
431:          output where only the first processor opens
432:          the file.  All other processors send their
433:          data to the first processor to print.

435:    The user can open alternative visualization contexts with
436: .     PetscViewerASCIIOpen() - output to a specified file

438: .keywords: Partitioning, view

440: .seealso: PetscViewerASCIIOpen()
441: @*/
442: PetscErrorCode  MatPartitioningView(MatPartitioning part,PetscViewer viewer)
443: {
445:   PetscBool      iascii;

449:   if (!viewer) {
450:     PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)part),&viewer);
451:   }

455:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
456:   if (iascii) {
457:     PetscObjectPrintClassNamePrefixType((PetscObject)part,viewer);
458:     if (part->vertex_weights) {
459:       PetscViewerASCIIPrintf(viewer,"  Using vertex weights\n");
460:     }
461:   }
462:   if (part->ops->view) {
463:     PetscViewerASCIIPushTab(viewer);
464:     (*part->ops->view)(part,viewer);
465:     PetscViewerASCIIPopTab(viewer);
466:   }
467:   return(0);
468: }

472: /*@C
473:    MatPartitioningSetType - Sets the type of partitioner to use

475:    Collective on MatPartitioning

477:    Input Parameter:
478: .  part - the partitioning context.
479: .  type - a known method

481:    Options Database Command:
482: $  -mat_partitioning_type  <type>
483: $      Use -help for a list of available methods
484: $      (for instance, parmetis)

486:    Level: intermediate

488: .keywords: partitioning, set, method, type

490: .seealso: MatPartitioningCreate(), MatPartitioningApply(), MatPartitioningType

492: @*/
493: PetscErrorCode  MatPartitioningSetType(MatPartitioning part,MatPartitioningType type)
494: {
495:   PetscErrorCode ierr,(*r)(MatPartitioning);
496:   PetscBool      match;


502:   PetscObjectTypeCompare((PetscObject)part,type,&match);
503:   if (match) return(0);

505:   if (part->setupcalled) {
506:      (*part->ops->destroy)(part);

508:     part->ops->destroy = NULL;
509:     part->data         = 0;
510:     part->setupcalled  = 0;
511:   }

513:   PetscFunctionListFind(MatPartitioningList,type,&r);
514:   if (!r) SETERRQ1(PetscObjectComm((PetscObject)part),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown partitioning type %s",type);

516:   part->ops->destroy = (PetscErrorCode (*)(MatPartitioning)) 0;
517:   part->ops->view    = (PetscErrorCode (*)(MatPartitioning,PetscViewer)) 0;

519:   (*r)(part);

521:   PetscFree(((PetscObject)part)->type_name);
522:   PetscStrallocpy(type,&((PetscObject)part)->type_name);
523:   return(0);
524: }

528: /*@
529:    MatPartitioningSetFromOptions - Sets various partitioning options from the
530:         options database.

532:    Collective on MatPartitioning

534:    Input Parameter:
535: .  part - the partitioning context.

537:    Options Database Command:
538: $  -mat_partitioning_type  <type>
539: $      Use -help for a list of available methods
540: $      (for instance, parmetis)

542:    Level: beginner

544: .keywords: partitioning, set, method, type
545: @*/
546: PetscErrorCode  MatPartitioningSetFromOptions(MatPartitioning part)
547: {
549:   PetscBool      flag;
550:   char           type[256];
551:   const char     *def;

554:   PetscObjectOptionsBegin((PetscObject)part);
555:   if (!((PetscObject)part)->type_name) {
556: #if defined(PETSC_HAVE_PARMETIS)
557:     def = MATPARTITIONINGPARMETIS;
558: #else
559:     def = MATPARTITIONINGCURRENT;
560: #endif
561:   } else {
562:     def = ((PetscObject)part)->type_name;
563:   }
564:   PetscOptionsFList("-mat_partitioning_type","Type of partitioner","MatPartitioningSetType",MatPartitioningList,def,type,256,&flag);
565:   if (flag) {
566:     MatPartitioningSetType(part,type);
567:   }
568:   /*
569:     Set the type if it was never set.
570:   */
571:   if (!((PetscObject)part)->type_name) {
572:     MatPartitioningSetType(part,def);
573:   }

575:   if (part->ops->setfromoptions) {
576:     (*part->ops->setfromoptions)(part);
577:   }
578:   PetscOptionsEnd();
579:   return(0);
580: }