Actual source code: hierarchical.c

petsc-3.14.6 2021-03-30
Report Typos and Errors

  2: #include <../src/mat/impls/adj/mpi/mpiadj.h>
  3: #include <petscsf.h>
  4: #include <petsc/private/matimpl.h>

  6: /*
  7:   It is a hierarchical partitioning. The partitioner has two goals:
  8:   (1) Most of current partitioners fail at a large scale. The hierarchical partitioning
  9:   strategy is trying to produce large number of subdomains when number of processor cores is large.
 10:   (2) PCGASM needs one 'big' subdomain across multi-cores. The partitioner provides two
 11:   consistent partitions, coarse parts and fine parts. A coarse part is a 'big' subdomain consisting
 12:   of several small subdomains.
 13: */

 15: PetscErrorCode MatPartitioningHierarchical_DetermineDestination(MatPartitioning,IS,PetscInt,PetscInt,IS*);
 16: PetscErrorCode MatPartitioningHierarchical_AssembleSubdomain(Mat,IS,IS,IS*,Mat*,ISLocalToGlobalMapping*);
 17: PetscErrorCode MatPartitioningHierarchical_ReassembleFineparts(Mat,IS,ISLocalToGlobalMapping,IS*);

 19: typedef struct {
 20:   char*                fineparttype; /* partitioner on fine level */
 21:   char*                coarseparttype; /* partitioner on coarse level */
 22:   PetscInt             nfineparts; /* number of fine parts on each coarse subdomain */
 23:   PetscInt             ncoarseparts; /* number of coarse parts */
 24:   IS                   coarseparts; /* partitioning on coarse level */
 25:   IS                   fineparts; /* partitioning on fine level */
 26:   MatPartitioning      coarseMatPart; /* MatPartititioning on coarse level (first level) */
 27:   MatPartitioning      fineMatPart; /* MatPartitioning on fine level (second level) */
 28:   MatPartitioning      improver; /* Improve the quality of a partition */
 29: } MatPartitioning_Hierarchical;

 31: /*
 32:    Uses a hierarchical partitioning strategy to partition the matrix in parallel.
 33:    Use this interface to make the partitioner consistent with others
 34: */
 35: static PetscErrorCode MatPartitioningApply_Hierarchical(MatPartitioning part,IS *partitioning)
 36: {
 37:   MatPartitioning_Hierarchical *hpart  = (MatPartitioning_Hierarchical*)part->data;
 38:   const PetscInt               *fineparts_indices, *coarseparts_indices;
 39:   PetscInt                     *fineparts_indices_tmp;
 40:   PetscInt                     *parts_indices,i,j,mat_localsize, *offsets;
 41:   Mat                           mat    = part->adj,adj,sadj;
 42:   PetscReal                    *part_weights;
 43:   PetscBool                     flg;
 44:   PetscInt                      bs     = 1;
 45:   PetscInt                     *coarse_vertex_weights = NULL;
 46:   PetscMPIInt                   size,rank;
 47:   MPI_Comm                      comm,scomm;
 48:   IS                            destination,fineparts_temp, vweights, svweights;
 49:   PetscInt                      nsvwegihts,*fp_vweights;
 50:   const PetscInt                *svweights_indices;
 51:   ISLocalToGlobalMapping        mapping;
 52:   const char                    *prefix;
 53:   PetscBool                     use_edge_weights;
 54:   PetscErrorCode                ierr;

 57:   PetscObjectGetComm((PetscObject)part,&comm);
 58:   MPI_Comm_size(comm,&size);
 59:   MPI_Comm_rank(comm,&rank);
 60:   PetscObjectTypeCompare((PetscObject)mat,MATMPIADJ,&flg);
 61:   if (flg) {
 62:     adj = mat;
 63:     PetscObjectReference((PetscObject)adj);
 64:   }else {
 65:     /* bs indicates if the converted matrix is "reduced" from the original and hence the
 66:        resulting partition results need to be stretched to match the original matrix */
 67:    MatConvert(mat,MATMPIADJ,MAT_INITIAL_MATRIX,&adj);
 68:    if (adj->rmap->n > 0) bs = mat->rmap->n/adj->rmap->n;
 69:   }
 70:   /* local size of mat */
 71:   mat_localsize = adj->rmap->n;
 72:   /* check parameters */
 73:   /* how many small subdomains we want from a given 'big' suddomain */
 74:   if (!hpart->nfineparts) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG," must set number of small subdomains for each big subdomain \n");
 75:   if (!hpart->ncoarseparts && !part->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE," did not either set number of coarse parts or total number of parts \n");

 77:   /* Partitioning the domain into one single subdomain is a trivial case, and we should just return  */
 78:   if (part->n==1) {
 79:     PetscCalloc1(bs*adj->rmap->n,&parts_indices);
 80:     ISCreateGeneral(comm,bs*adj->rmap->n,parts_indices,PETSC_OWN_POINTER,partitioning);
 81:     hpart->ncoarseparts = 1;
 82:     hpart->nfineparts = 1;
 83:     PetscStrallocpy("NONE",&hpart->coarseparttype);
 84:     PetscStrallocpy("NONE",&hpart->fineparttype);
 85:     MatDestroy(&adj);
 86:     return(0);
 87:   }

 89:   if (part->n){
 90:     hpart->ncoarseparts = part->n/hpart->nfineparts;

 92:     if (part->n%hpart->nfineparts != 0) hpart->ncoarseparts++;
 93:   }else{
 94:     part->n = hpart->ncoarseparts*hpart->nfineparts;
 95:   }

 97:   PetscMalloc1(hpart->ncoarseparts+1, &offsets);
 98:   PetscMalloc1(hpart->ncoarseparts, &part_weights);

100:   offsets[0] = 0;
101:   if (part->n%hpart->nfineparts != 0) offsets[1] = part->n%hpart->nfineparts;
102:   else offsets[1] = hpart->nfineparts;

104:   part_weights[0] = ((PetscReal)offsets[1])/part->n;

106:   for (i=2; i<=hpart->ncoarseparts; i++) {
107:     offsets[i] = hpart->nfineparts;
108:     part_weights[i-1] = ((PetscReal)offsets[i])/part->n;
109:   }

111:   offsets[0] = 0;
112:   for (i=1;i<=hpart->ncoarseparts; i++)
113:     offsets[i] += offsets[i-1];

115:   /* If these exists a mat partitioner, we should delete it */
116:   MatPartitioningDestroy(&hpart->coarseMatPart);
117:   MatPartitioningCreate(comm,&hpart->coarseMatPart);
118:   PetscObjectGetOptionsPrefix((PetscObject)part,&prefix);
119:   PetscObjectSetOptionsPrefix((PetscObject)hpart->coarseMatPart,prefix);
120:   PetscObjectAppendOptionsPrefix((PetscObject)hpart->coarseMatPart,"hierarch_coarse_");
121:     /* if did not set partitioning type yet, use parmetis by default */
122:   if (!hpart->coarseparttype){
123: #if defined(PETSC_HAVE_PARMETIS)
124:     MatPartitioningSetType(hpart->coarseMatPart,MATPARTITIONINGPARMETIS);
125:     PetscStrallocpy(MATPARTITIONINGPARMETIS,&hpart->coarseparttype);
126: #elif defined(PETSC_HAVE_PTSCOTCH)
127:     MatPartitioningSetType(hpart->coarseMatPart,MATPARTITIONINGPTSCOTCH);
128:     PetscStrallocpy(MATPARTITIONINGPTSCOTCH,&hpart->coarseparttype);
129: #else
130:     SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Requires PETSc be installed with ParMetis or run with -mat_partitioning_hierarchical_coarseparttype partitiontype");
131: #endif
132:   } else {
133:     MatPartitioningSetType(hpart->coarseMatPart,hpart->coarseparttype);
134:   }
135:   MatPartitioningSetAdjacency(hpart->coarseMatPart,adj);
136:   MatPartitioningSetNParts(hpart->coarseMatPart, hpart->ncoarseparts);
137:   /* copy over vertex weights */
138:   if (part->vertex_weights){
139:     PetscMalloc1(mat_localsize,&coarse_vertex_weights);
140:     PetscArraycpy(coarse_vertex_weights,part->vertex_weights,mat_localsize);
141:     MatPartitioningSetVertexWeights(hpart->coarseMatPart,coarse_vertex_weights);
142:   }
143:   /* Copy use_edge_weights flag from part to coarse part */
144:   MatPartitioningGetUseEdgeWeights(part,&use_edge_weights);
145:   MatPartitioningSetUseEdgeWeights(hpart->coarseMatPart,use_edge_weights);

147:   MatPartitioningSetPartitionWeights(hpart->coarseMatPart, part_weights);
148:   MatPartitioningApply(hpart->coarseMatPart,&hpart->coarseparts);

150:   /* Wrap the original vertex weights into an index set so that we can extract the corresponding
151:    * vertex weights for each big subdomain using ISCreateSubIS().
152:    * */
153:   if (part->vertex_weights) {
154:     ISCreateGeneral(comm,mat_localsize,part->vertex_weights,PETSC_COPY_VALUES,&vweights);
155:   }

157:   PetscCalloc1(mat_localsize, &fineparts_indices_tmp);
158:   for (i=0; i<hpart->ncoarseparts; i+=size){
159:     /* Determine where we want to send big subdomains */
160:     MatPartitioningHierarchical_DetermineDestination(part,hpart->coarseparts,i,i+size,&destination);
161:     /* Assemble a submatrix and its vertex weights for partitioning subdomains  */
162:     MatPartitioningHierarchical_AssembleSubdomain(adj,part->vertex_weights? vweights:NULL,destination,part->vertex_weights? &svweights:NULL,&sadj,&mapping);
163:     /* We have to create a new array to hold vertex weights since coarse partitioner needs to own the vertex-weights array */
164:     if (part->vertex_weights) {
165:       ISGetLocalSize(svweights,&nsvwegihts);
166:       PetscMalloc1(nsvwegihts,&fp_vweights);
167:       ISGetIndices(svweights,&svweights_indices);
168:       PetscArraycpy(fp_vweights,svweights_indices,nsvwegihts);
169:       ISRestoreIndices(svweights,&svweights_indices);
170:       ISDestroy(&svweights);
171:     }

173:     ISDestroy(&destination);
174:     PetscObjectGetComm((PetscObject)sadj,&scomm);

176:     /*
177:      * If the number of big subdomains is smaller than the number of processor cores, the higher ranks do not
178:      * need to do partitioning
179:      * */
180:     if ((i+rank)<hpart->ncoarseparts) {
181:       MatPartitioningDestroy(&hpart->fineMatPart);
182:       /* create a fine partitioner */
183:       MatPartitioningCreate(scomm,&hpart->fineMatPart);
184:       PetscObjectSetOptionsPrefix((PetscObject)hpart->fineMatPart,prefix);
185:       PetscObjectAppendOptionsPrefix((PetscObject)hpart->fineMatPart,"hierarch_fine_");
186:       /* if do not set partitioning type, use parmetis by default */
187:       if (!hpart->fineparttype){
188: #if defined(PETSC_HAVE_PARMETIS)
189:         MatPartitioningSetType(hpart->fineMatPart,MATPARTITIONINGPARMETIS);
190:         PetscStrallocpy(MATPARTITIONINGPARMETIS,&hpart->fineparttype);
191: #elif defined(PETSC_HAVE_PTSCOTCH)
192:         MatPartitioningSetType(hpart->fineMatPart,MATPARTITIONINGPTSCOTCH);
193:         PetscStrallocpy(MATPARTITIONINGPTSCOTCH,&hpart->fineparttype);
194: #elif defined(PETSC_HAVE_CHACO)
195:         MatPartitioningSetType(hpart->fineMatPart,MATPARTITIONINGCHACO);
196:         PetscStrallocpy(MATPARTITIONINGCHACO,&hpart->fineparttype);
197: #elif defined(PETSC_HAVE_PARTY)
198:         MatPartitioningSetType(hpart->fineMatPart,MATPARTITIONINGPARTY);
199:         PetscStrallocpy(PETSC_HAVE_PARTY,&hpart->fineparttype);
200: #else
201:         SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Requires PETSc be installed with ParMetis or run with -mat_partitioning_hierarchical_coarseparttype partitiontype");
202: #endif
203:       } else {
204:         MatPartitioningSetType(hpart->fineMatPart,hpart->fineparttype);
205:       }
206:       MatPartitioningSetUseEdgeWeights(hpart->fineMatPart,use_edge_weights);
207:       MatPartitioningSetAdjacency(hpart->fineMatPart,sadj);
208:       MatPartitioningSetNParts(hpart->fineMatPart, offsets[rank+1+i]-offsets[rank+i]);
209:       if (part->vertex_weights) {
210:         MatPartitioningSetVertexWeights(hpart->fineMatPart,fp_vweights);
211:       }
212:       MatPartitioningApply(hpart->fineMatPart,&fineparts_temp);
213:     } else {
214:       ISCreateGeneral(scomm,0,NULL,PETSC_OWN_POINTER,&fineparts_temp);
215:     }

217:     MatDestroy(&sadj);

219:     /* Send partition back to the original owners */
220:     MatPartitioningHierarchical_ReassembleFineparts(adj,fineparts_temp,mapping,&hpart->fineparts);
221:     ISGetIndices(hpart->fineparts,&fineparts_indices);
222:     for (j=0;j<mat_localsize;j++)
223:       if (fineparts_indices[j] >=0) fineparts_indices_tmp[j] = fineparts_indices[j];

225:     ISRestoreIndices(hpart->fineparts,&fineparts_indices);
226:     ISDestroy(&hpart->fineparts);
227:     ISDestroy(&fineparts_temp);
228:     ISLocalToGlobalMappingDestroy(&mapping);
229:   }

231:   if (part->vertex_weights) {
232:     ISDestroy(&vweights);
233:   }

235:   ISCreateGeneral(comm,mat_localsize,fineparts_indices_tmp,PETSC_OWN_POINTER,&hpart->fineparts);
236:   ISGetIndices(hpart->fineparts,&fineparts_indices);
237:   ISGetIndices(hpart->coarseparts,&coarseparts_indices);
238:   PetscMalloc1(bs*adj->rmap->n,&parts_indices);
239:   /* Modify the local indices to the global indices by combing the coarse partition and the fine partitions */
240:   for (i=0; i<adj->rmap->n; i++){
241:     for (j=0; j<bs; j++){
242:       parts_indices[bs*i+j] = fineparts_indices[i]+offsets[coarseparts_indices[i]];
243:     }
244:   }
245:   ISRestoreIndices(hpart->fineparts,&fineparts_indices);
246:   ISRestoreIndices(hpart->coarseparts,&coarseparts_indices);
247:   PetscFree(offsets);
248:   ISCreateGeneral(comm,bs*adj->rmap->n,parts_indices,PETSC_OWN_POINTER,partitioning);
249:   MatDestroy(&adj);
250:   return(0);
251: }


254: PetscErrorCode MatPartitioningHierarchical_ReassembleFineparts(Mat adj, IS fineparts, ISLocalToGlobalMapping mapping, IS *sfineparts)
255: {
256:   PetscInt            *local_indices, *global_indices,*sfineparts_indices,localsize,i;
257:   const PetscInt      *ranges,*fineparts_indices;
258:   PetscMPIInt         rank,*owners;
259:   MPI_Comm            comm;
260:   PetscLayout         rmap;
261:   PetscSFNode        *remote;
262:   PetscSF             sf;
263:   PetscErrorCode      ierr;

267:   PetscObjectGetComm((PetscObject)adj,&comm);
268:   MPI_Comm_rank(comm,&rank);
269:   MatGetLayouts(adj,&rmap,NULL);
270:   ISGetLocalSize(fineparts,&localsize);
271:   PetscMalloc2(localsize,&global_indices,localsize,&local_indices);
272:   for (i=0; i<localsize; i++){
273:     local_indices[i] = i;
274:   }
275:   /* map local indices back to global so that we can permulate data globally */
276:   ISLocalToGlobalMappingApply(mapping,localsize,local_indices,global_indices);
277:   PetscCalloc1(localsize,&owners);
278:   /* find owners for global indices */
279:   for (i=0; i<localsize; i++){
280:     PetscLayoutFindOwner(rmap,global_indices[i],&owners[i]);
281:   }
282:   PetscLayoutGetRanges(rmap,&ranges);
283:   PetscMalloc1(ranges[rank+1]-ranges[rank],&sfineparts_indices);

285:   for (i=0; i<ranges[rank+1]-ranges[rank]; i++) {
286:     sfineparts_indices[i] = -1;
287:   }

289:   ISGetIndices(fineparts,&fineparts_indices);
290:   PetscSFCreate(comm,&sf);
291:   PetscMalloc1(localsize,&remote);
292:   for (i=0; i<localsize; i++){
293:     remote[i].rank  = owners[i];
294:     remote[i].index = global_indices[i]-ranges[owners[i]];
295:   }
296:   PetscSFSetType(sf,PETSCSFBASIC);
297:   /* not sure how to add prefix to sf */
298:   PetscSFSetFromOptions(sf);
299:   PetscSFSetGraph(sf,localsize,localsize,NULL,PETSC_OWN_POINTER,remote,PETSC_OWN_POINTER);
300:   PetscSFReduceBegin(sf,MPIU_INT,fineparts_indices,sfineparts_indices,MPIU_REPLACE);
301:   PetscSFReduceEnd(sf,MPIU_INT,fineparts_indices,sfineparts_indices,MPIU_REPLACE);
302:   PetscSFDestroy(&sf);
303:   ISRestoreIndices(fineparts,&fineparts_indices);
304:   ISCreateGeneral(comm,ranges[rank+1]-ranges[rank],sfineparts_indices,PETSC_OWN_POINTER,sfineparts);
305:   PetscFree2(global_indices,local_indices);
306:   PetscFree(owners);
307:   return(0);
308: }


311: PetscErrorCode MatPartitioningHierarchical_AssembleSubdomain(Mat adj,IS vweights, IS destination,IS *svweights,Mat *sadj,ISLocalToGlobalMapping *mapping)
312: {
313:   IS              irows,icols;
314:   PetscInt        irows_ln;
315:   PetscMPIInt     rank;
316:   const PetscInt *irows_indices;
317:   MPI_Comm        comm;
318:   PetscErrorCode  ierr;

321:   PetscObjectGetComm((PetscObject)adj,&comm);
322:   MPI_Comm_rank(comm,&rank);
323:   /* figure out where data comes from  */
324:   ISBuildTwoSided(destination,NULL,&irows);
325:   ISDuplicate(irows,&icols);
326:   ISGetLocalSize(irows,&irows_ln);
327:   ISGetIndices(irows,&irows_indices);
328:   ISLocalToGlobalMappingCreate(comm,1,irows_ln,irows_indices,PETSC_COPY_VALUES,mapping);
329:   ISRestoreIndices(irows,&irows_indices);
330:   MatCreateSubMatrices(adj,1,&irows,&icols,MAT_INITIAL_MATRIX,&sadj);
331:   if (vweights && svweights) {
332:     ISCreateSubIS(vweights,irows,svweights);
333:   }
334:   ISDestroy(&irows);
335:   ISDestroy(&icols);
336:   return(0);
337: }


340: PetscErrorCode MatPartitioningHierarchical_DetermineDestination(MatPartitioning part, IS partitioning, PetscInt pstart, PetscInt pend, IS *destination)
341: {
342:   MPI_Comm            comm;
343:   PetscMPIInt         rank,size,target;
344:   PetscInt            plocalsize,*dest_indices,i;
345:   const PetscInt     *part_indices;
346:   PetscErrorCode      ierr;

349:   PetscObjectGetComm((PetscObject)part,&comm);
350:   MPI_Comm_rank(comm,&rank);
351:   MPI_Comm_size(comm,&size);
352:   if ((pend-pstart)>size) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"range [%D, %D] should be smaller than or equal to size %D",pstart,pend,size);
353:   if (pstart>pend) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," pstart %D should be smaller than pend %D",pstart,pend);
354:   ISGetLocalSize(partitioning,&plocalsize);
355:   PetscMalloc1(plocalsize,&dest_indices);
356:   ISGetIndices(partitioning,&part_indices);
357:   for (i=0; i<plocalsize; i++){
358:     /* compute target */
359:     target = part_indices[i]-pstart;
360:     /* mark out of range entity as -1 */
361:     if (part_indices[i]<pstart || part_indices[i]>=pend) target = -1;
362:     dest_indices[i] = target;
363:   }
364:   ISCreateGeneral(comm,plocalsize,dest_indices,PETSC_OWN_POINTER,destination);
365:   return(0);
366: }


369: PetscErrorCode MatPartitioningView_Hierarchical(MatPartitioning part,PetscViewer viewer)
370: {
371:   MatPartitioning_Hierarchical *hpart = (MatPartitioning_Hierarchical*)part->data;
372:   PetscErrorCode           ierr;
373:   PetscMPIInt              rank;
374:   PetscBool                iascii;
375:   PetscViewer              sviewer;

378:   MPI_Comm_rank(PetscObjectComm((PetscObject)part),&rank);
379:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
380:   if (iascii){
381:     PetscViewerASCIIPrintf(viewer," Number of coarse parts: %D\n",hpart->ncoarseparts);
382:     PetscViewerASCIIPrintf(viewer," Coarse partitioner: %s\n",hpart->coarseparttype);
383:     if (hpart->coarseMatPart) {
384:       PetscViewerASCIIPushTab(viewer);
385:       MatPartitioningView(hpart->coarseMatPart,viewer);
386:       PetscViewerASCIIPopTab(viewer);
387:     }
388:     PetscViewerASCIIPrintf(viewer," Number of fine parts: %D\n",hpart->nfineparts);
389:     PetscViewerASCIIPrintf(viewer," Fine partitioner: %s\n",hpart->fineparttype);
390:     PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
391:     if (!rank && hpart->fineMatPart) {
392:       PetscViewerASCIIPushTab(viewer);
393:       MatPartitioningView(hpart->fineMatPart,sviewer);
394:       PetscViewerASCIIPopTab(viewer);
395:     }
396:     PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
397:   }
398:   return(0);
399: }


402: PetscErrorCode MatPartitioningHierarchicalGetFineparts(MatPartitioning part,IS *fineparts)
403: {
404:   MatPartitioning_Hierarchical *hpart = (MatPartitioning_Hierarchical*)part->data;
405:   PetscErrorCode                ierr;

408:   *fineparts = hpart->fineparts;
409:   PetscObjectReference((PetscObject)hpart->fineparts);
410:   return(0);
411: }

413: PetscErrorCode MatPartitioningHierarchicalGetCoarseparts(MatPartitioning part,IS *coarseparts)
414: {
415:   MatPartitioning_Hierarchical *hpart = (MatPartitioning_Hierarchical*)part->data;
416:   PetscErrorCode                ierr;

419:   *coarseparts = hpart->coarseparts;
420:   PetscObjectReference((PetscObject)hpart->coarseparts);
421:   return(0);
422: }

424: PetscErrorCode MatPartitioningHierarchicalSetNcoarseparts(MatPartitioning part, PetscInt ncoarseparts)
425: {
426:   MatPartitioning_Hierarchical *hpart = (MatPartitioning_Hierarchical*)part->data;

429:   hpart->ncoarseparts = ncoarseparts;
430:   return(0);
431: }

433: PetscErrorCode MatPartitioningHierarchicalSetNfineparts(MatPartitioning part, PetscInt nfineparts)
434: {
435:   MatPartitioning_Hierarchical *hpart = (MatPartitioning_Hierarchical*)part->data;

438:   hpart->nfineparts = nfineparts;
439:   return(0);
440: }

442: PetscErrorCode MatPartitioningSetFromOptions_Hierarchical(PetscOptionItems *PetscOptionsObject,MatPartitioning part)
443: {
444:   MatPartitioning_Hierarchical *hpart = (MatPartitioning_Hierarchical*)part->data;
446:   char           value[1024];
447:   PetscBool      flag = PETSC_FALSE;

450:   PetscOptionsHead(PetscOptionsObject,"Set hierarchical partitioning options");
451:   PetscOptionsString("-mat_partitioning_hierarchical_coarseparttype","coarse part type",NULL,NULL,value,sizeof(value),&flag);
452:   if (flag){
453:    PetscStrallocpy(value,&hpart->coarseparttype);
454:   }
455:   PetscOptionsString("-mat_partitioning_hierarchical_fineparttype","fine part type",NULL,NULL,value,sizeof(value),&flag);
456:   if (flag){
457:     PetscStrallocpy(value,&hpart->fineparttype);
458:   }
459:   PetscOptionsInt("-mat_partitioning_hierarchical_ncoarseparts","number of coarse parts",NULL,hpart->ncoarseparts,&hpart->ncoarseparts,&flag);
460:   PetscOptionsInt("-mat_partitioning_hierarchical_nfineparts","number of fine parts",NULL,hpart->nfineparts,&hpart->nfineparts,&flag);
461:   PetscOptionsTail();
462:   return(0);
463: }


466: PetscErrorCode MatPartitioningDestroy_Hierarchical(MatPartitioning part)
467: {
468:   MatPartitioning_Hierarchical *hpart = (MatPartitioning_Hierarchical*)part->data;
469:   PetscErrorCode           ierr;

472:   if (hpart->coarseparttype) {PetscFree(hpart->coarseparttype);}
473:   if (hpart->fineparttype) {PetscFree(hpart->fineparttype);}
474:   ISDestroy(&hpart->fineparts);
475:   ISDestroy(&hpart->coarseparts);
476:   MatPartitioningDestroy(&hpart->coarseMatPart);
477:   MatPartitioningDestroy(&hpart->fineMatPart);
478:   MatPartitioningDestroy(&hpart->improver);
479:   PetscFree(hpart);
480:   return(0);
481: }

483: /*
484:    Improves the quality  of a partition
485: */
486: static PetscErrorCode MatPartitioningImprove_Hierarchical(MatPartitioning part, IS *partitioning)
487: {
488:   PetscErrorCode               ierr;
489:   MatPartitioning_Hierarchical *hpart = (MatPartitioning_Hierarchical*)part->data;
490:   Mat                           mat = part->adj, adj;
491:   PetscBool                    flg;
492:   const char                   *prefix;
493: #if defined(PETSC_HAVE_PARMETIS)
494:   PetscInt                     *vertex_weights;
495: #endif

498:   PetscObjectTypeCompare((PetscObject)mat,MATMPIADJ,&flg);
499:   if (flg) {
500:     adj = mat;
501:     PetscObjectReference((PetscObject)adj);
502:   }else {
503:     /* bs indicates if the converted matrix is "reduced" from the original and hence the
504:        resulting partition results need to be stretched to match the original matrix */
505:    MatConvert(mat,MATMPIADJ,MAT_INITIAL_MATRIX,&adj);
506:   }

508:   /* If there exists a mat partitioner, we should delete it */
509:   MatPartitioningDestroy(&hpart->improver);
510:   MatPartitioningCreate(PetscObjectComm((PetscObject)part),&hpart->improver);
511:   PetscObjectGetOptionsPrefix((PetscObject)part,&prefix);
512:   PetscObjectSetOptionsPrefix((PetscObject)hpart->improver,prefix);
513:   PetscObjectAppendOptionsPrefix((PetscObject)hpart->improver,"hierarch_improver_");
514:   /* Only parmetis supports to refine a partition */
515: #if defined(PETSC_HAVE_PARMETIS)
516:   MatPartitioningSetType(hpart->improver,MATPARTITIONINGPARMETIS);
517:   MatPartitioningSetAdjacency(hpart->improver,adj);
518:   MatPartitioningSetNParts(hpart->improver, part->n);
519:   /* copy over vertex weights */
520:   if (part->vertex_weights){
521:     PetscMalloc1(adj->rmap->n,&vertex_weights);
522:     PetscArraycpy(vertex_weights,part->vertex_weights,adj->rmap->n);
523:     MatPartitioningSetVertexWeights(hpart->improver,vertex_weights);
524:   }
525:   MatPartitioningImprove(hpart->improver,partitioning);
526:   MatDestroy(&adj);
527:   return(0);
528: #else
529:   SETERRQ(PetscObjectComm((PetscObject)adj),PETSC_ERR_SUP,"Requires PETSc be installed with ParMetis\n");
530: #endif
531: }


534: /*MC
535:    MATPARTITIONINGHIERARCH - Creates a partitioning context via hierarchical partitioning strategy.
536:    The graph is partitioned into a number of subgraphs, and each subgraph is further split into a few smaller
537:    subgraphs. The idea can be applied in a recursive manner. It is useful when you want to partition the graph
538:    into a large number of subgraphs (often more than 10K) since partitions obtained with existing partitioners
539:    such as ParMETIS and PTScotch are far from ideal. The hierarchical partitioning also tries to avoid off-node
540:    communication as much as possible for multi-core processor. Another user case for the hierarchical partitioning
541:    is to improve PCGASM convergence by generating multi-rank connected subdomain.

543:    Collective

545:    Input Parameter:
546: .  part - the partitioning context

548:    Options Database Keys:
549: +     -mat_partitioning_hierarchical_coarseparttype - partitioner type at the first level and parmetis is used by default
550: .     -mat_partitioning_hierarchical_fineparttype - partitioner type at the second level and parmetis is used by default
551: .     -mat_partitioning_hierarchical_ncoarseparts - number of subgraphs is required at the first level, which is often the number of compute nodes
552: -     -mat_partitioning_hierarchical_nfineparts - number of smaller subgraphs for each subgraph, which is often the number of cores per compute node

554:    Level: beginner

556:    References:
557:   1.  Fande Kong, Xiao-Chuan Cai, A highly scalable multilevel Schwarz method with boundary geometry preserving coarse spaces for 3D elasticity
558:       problems on domains with complex geometry,   SIAM Journal on Scientific Computing 38 (2), C73-C95, 2016
559:   2.  Fande Kong, Roy H. Stogner, Derek Gaston, John W. Peterson, Cody J. Permann, Andrew E. Slaughter, and Richard C. Martineau,
560:       A general-purpose hierarchical mesh partitioning method with node balancing strategies for large-scale numerical simulations,
561:       arXiv preprint arXiv:1809.02666CoRR, 2018.

563: .seealso: MatPartitioningSetType(), MatPartitioningType

565: M*/

567: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Hierarchical(MatPartitioning part)
568: {
569:   PetscErrorCode                ierr;
570:   MatPartitioning_Hierarchical *hpart;

573:   PetscNewLog(part,&hpart);
574:   part->data = (void*)hpart;

576:   hpart->fineparttype       = NULL; /* fine level (second) partitioner */
577:   hpart->coarseparttype     = NULL; /* coarse level (first) partitioner */
578:   hpart->nfineparts         = 1;    /* we do not further partition coarse partition any more by default */
579:   hpart->ncoarseparts       = 0;    /* number of coarse parts (first level) */
580:   hpart->coarseparts        = NULL;
581:   hpart->fineparts          = NULL;
582:   hpart->coarseMatPart      = NULL;
583:   hpart->fineMatPart        = NULL;

585:   part->ops->apply          = MatPartitioningApply_Hierarchical;
586:   part->ops->view           = MatPartitioningView_Hierarchical;
587:   part->ops->destroy        = MatPartitioningDestroy_Hierarchical;
588:   part->ops->setfromoptions = MatPartitioningSetFromOptions_Hierarchical;
589:   part->ops->improve        = MatPartitioningImprove_Hierarchical;
590:   return(0);
591: }