Actual source code: hierarchical.c
petsc-3.9.4 2018-09-11
2: #include <../src/mat/impls/adj/mpi/mpiadj.h>
3: #include <petscsf.h>
4: #include <petsc/private/matimpl.h>
6: /*
7: It is a hierarchical partitioning. The partitioner has two goals:
8: (1) Most of current partitioners fail at a large scale. The hierarchical partitioning
9: strategy is trying to produce large number of subdomains when number of processor cores is large.
10: (2) PCGASM needs one 'big' subdomain across multi-cores. The partitioner provides two
11: consistent partitions, coarse parts and fine parts. A coarse part is a 'big' subdomain consisting
12: of several small subdomains.
13: */
15: PetscErrorCode MatPartitioningHierarchical_DetermineDestination(MatPartitioning part, IS partitioning, PetscInt pstart, PetscInt pend, IS *destination);
16: PetscErrorCode MatPartitioningHierarchical_AssembleSubdomain(Mat adj,IS destination,Mat *sadj, ISLocalToGlobalMapping *mapping);
17: PetscErrorCode MatPartitioningHierarchical_ReassembleFineparts(Mat adj, IS fineparts, ISLocalToGlobalMapping mapping, IS *sfineparts);
19: typedef struct {
20: char* fineparttype; /* partitioner on fine level */
21: char* coarseparttype; /* partitioner on coarse level */
22: PetscInt Nfineparts; /* number of fine parts on each coarse subdomain */
23: PetscInt Ncoarseparts; /* number of coarse parts */
24: IS coarseparts; /* partitioning on coarse level */
25: IS fineparts; /* partitioning on fine level */
26: } MatPartitioning_Hierarchical;
28: /*
29: Uses a hierarchical partitioning strategy to partition the matrix in parallel.
30: Use this interface to make the partitioner consistent with others
31: */
32: static PetscErrorCode MatPartitioningApply_Hierarchical(MatPartitioning part,IS *partitioning)
33: {
34: MatPartitioning_Hierarchical *hpart = (MatPartitioning_Hierarchical*)part->data;
35: const PetscInt *fineparts_indices, *coarseparts_indices;
36: PetscInt *parts_indices,i,j,mat_localsize;
37: Mat mat = part->adj,adj,sadj;
38: PetscBool flg;
39: PetscInt bs = 1;
40: MatPartitioning finePart, coarsePart;
41: PetscInt *coarse_vertex_weights = 0;
42: PetscMPIInt size,rank;
43: MPI_Comm comm,scomm;
44: IS destination,fineparts_temp;
45: ISLocalToGlobalMapping mapping;
46: PetscErrorCode ierr;
49: PetscObjectGetComm((PetscObject)part,&comm);
50: MPI_Comm_size(comm,&size);
51: MPI_Comm_rank(comm,&rank);
52: PetscObjectTypeCompare((PetscObject)mat,MATMPIADJ,&flg);
53: if (flg) {
54: adj = mat;
55: PetscObjectReference((PetscObject)adj);
56: }else {
57: /* bs indicates if the converted matrix is "reduced" from the original and hence the
58: resulting partition results need to be stretched to match the original matrix */
59: MatConvert(mat,MATMPIADJ,MAT_INITIAL_MATRIX,&adj);
60: if (adj->rmap->n > 0) bs = mat->rmap->n/adj->rmap->n;
61: }
62: /* local size of mat */
63: mat_localsize = adj->rmap->n;
64: /* check parameters */
65: /* how many small subdomains we want from a given 'big' suddomain */
66: if(!hpart->Nfineparts) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG," must set number of small subdomains for each big subdomain \n");
67: if(!hpart->Ncoarseparts && !part->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE," did not either set number of coarse parts or total number of parts \n");
68: if(part->n && part->n%hpart->Nfineparts!=0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,
69: " total number of parts %D can not be divided by number of fine parts %D\n",part->n,hpart->Nfineparts);
70: if(part->n){
71: hpart->Ncoarseparts = part->n/hpart->Nfineparts;
72: }else{
73: part->n = hpart->Ncoarseparts*hpart->Nfineparts;
74: }
75: /* we do not support this case currently, but this restriction should be
76: * removed in the further
77: * */
78: if(hpart->Ncoarseparts>size) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP," we do not support number of coarse parts %D > size %D \n",hpart->Ncoarseparts,size);
79: MatPartitioningCreate(comm,&coarsePart);
80: /* if did not set partitioning type yet, use parmetis by default */
81: if (!hpart->coarseparttype){
82: #if defined(PETSC_HAVE_PARMETIS)
83: MatPartitioningSetType(coarsePart,MATPARTITIONINGPARMETIS);
84: #else
85: SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Requires PETSc be installed with ParMetis or run with -mat_partitioning_hierarchical_coarseparttype partitiontype");
86: #endif
87: } else {
88: MatPartitioningSetType(coarsePart,hpart->coarseparttype);
89: }
90: MatPartitioningSetAdjacency(coarsePart,adj);
91: MatPartitioningSetNParts(coarsePart, hpart->Ncoarseparts);
92: /* copy over vertex weights */
93: if(part->vertex_weights){
94: PetscMalloc1(mat_localsize,&coarse_vertex_weights);
95: PetscMemcpy(coarse_vertex_weights,part->vertex_weights,sizeof(PetscInt)*mat_localsize);
96: MatPartitioningSetVertexWeights(coarsePart,coarse_vertex_weights);
97: }
98: /* It looks nontrivial to support part weights,
99: * I will return back to implement it when have
100: * an idea.
101: * */
102: MatPartitioningApply(coarsePart,&hpart->coarseparts);
103: MatPartitioningDestroy(&coarsePart);
104: /* In the current implementation, destination should be the same as hpart->coarseparts,
105: * and this interface is preserved to deal with the case hpart->coarseparts>size in the
106: * future.
107: * */
108: MatPartitioningHierarchical_DetermineDestination(part,hpart->coarseparts,0,hpart->Ncoarseparts,&destination);
109: /* assemble a submatrix for partitioning subdomains */
110: MatPartitioningHierarchical_AssembleSubdomain(adj,destination,&sadj,&mapping);
111: ISDestroy(&destination);
112: PetscObjectGetComm((PetscObject)sadj,&scomm);
113: /* create a fine partitioner */
114: MatPartitioningCreate(scomm,&finePart);
115: /* if do not set partitioning type, use parmetis by default */
116: if(!hpart->fineparttype){
117: #if defined(PETSC_HAVE_PARMETIS)
118: MatPartitioningSetType(finePart,MATPARTITIONINGPARMETIS);
119: #else
120: SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Requires PETSc be installed with ParMetis or run with -mat_partitioning_hierarchical_coarseparttype partitiontype");
121: #endif
122: } else {
123: MatPartitioningSetType(finePart,hpart->fineparttype);
124: }
125: MatPartitioningSetAdjacency(finePart,sadj);
126: MatPartitioningSetNParts(finePart, hpart->Nfineparts);
127: MatPartitioningApply(finePart,&fineparts_temp);
128: MatDestroy(&sadj);
129: MatPartitioningDestroy(&finePart);
130: MatPartitioningHierarchical_ReassembleFineparts(adj,fineparts_temp,mapping,&hpart->fineparts);
131: ISDestroy(&fineparts_temp);
132: ISLocalToGlobalMappingDestroy(&mapping);
134: ISGetIndices(hpart->fineparts,&fineparts_indices);
135: ISGetIndices(hpart->coarseparts,&coarseparts_indices);
136: PetscMalloc1(bs*adj->rmap->n,&parts_indices);
137: for(i=0; i<adj->rmap->n; i++){
138: for(j=0; j<bs; j++){
139: parts_indices[bs*i+j] = fineparts_indices[i]+coarseparts_indices[i]*hpart->Nfineparts;
140: }
141: }
142: ISCreateGeneral(comm,bs*adj->rmap->n,parts_indices,PETSC_OWN_POINTER,partitioning);
143: MatDestroy(&adj);
144: return(0);
145: }
148: PetscErrorCode MatPartitioningHierarchical_ReassembleFineparts(Mat adj, IS fineparts, ISLocalToGlobalMapping mapping, IS *sfineparts)
149: {
150: PetscInt *local_indices, *global_indices,*owners,*sfineparts_indices,localsize,i;
151: const PetscInt *ranges,*fineparts_indices;
152: PetscMPIInt rank;
153: MPI_Comm comm;
154: PetscLayout rmap;
155: PetscSFNode *remote;
156: PetscSF sf;
157: PetscErrorCode ierr;
160: PetscObjectGetComm((PetscObject)adj,&comm);
161: MPI_Comm_rank(comm,&rank);
162: MatGetLayouts(adj,&rmap,NULL);
163: ISGetLocalSize(fineparts,&localsize);
164: PetscCalloc2(localsize,&global_indices,localsize,&local_indices);
165: for(i=0; i<localsize; i++){
166: local_indices[i] = i;
167: }
168: /* map local indices back to global so that we can permulate data globally */
169: ISLocalToGlobalMappingApply(mapping,localsize,local_indices,global_indices);
170: PetscCalloc1(localsize,&owners);
171: /* find owners for global indices */
172: for(i=0; i<localsize; i++){
173: PetscLayoutFindOwner(rmap,global_indices[i],&owners[i]);
174: }
175: PetscLayoutGetRanges(rmap,&ranges);
176: PetscCalloc1(ranges[rank+1]-ranges[rank],&sfineparts_indices);
177: ISGetIndices(fineparts,&fineparts_indices);
178: PetscSFCreate(comm,&sf);
179: PetscCalloc1(localsize,&remote);
180: for(i=0; i<localsize; i++){
181: remote[i].rank = owners[i];
182: remote[i].index = global_indices[i]-ranges[owners[i]];
183: }
184: PetscSFSetType(sf,PETSCSFBASIC);
185: /* not sure how to add prefix to sf */
186: PetscSFSetFromOptions(sf);
187: PetscSFSetGraph(sf,localsize,localsize,NULL,PETSC_OWN_POINTER,remote,PETSC_OWN_POINTER);
188: PetscSFReduceBegin(sf,MPIU_INT,fineparts_indices,sfineparts_indices,MPIU_REPLACE);
189: PetscSFReduceEnd(sf,MPIU_INT,fineparts_indices,sfineparts_indices,MPIU_REPLACE);
190: PetscSFDestroy(&sf);
191: ISRestoreIndices(fineparts,&fineparts_indices);
192: ISCreateGeneral(comm,ranges[rank+1]-ranges[rank],sfineparts_indices,PETSC_OWN_POINTER,sfineparts);
193: PetscFree2(global_indices,local_indices);
194: PetscFree(owners);
195: return(0);
196: }
199: PetscErrorCode MatPartitioningHierarchical_AssembleSubdomain(Mat adj,IS destination,Mat *sadj, ISLocalToGlobalMapping *mapping)
200: {
201: IS irows,icols;
202: PetscInt irows_ln;
203: PetscMPIInt rank;
204: const PetscInt *irows_indices;
205: MPI_Comm comm;
206: PetscErrorCode ierr;
209: PetscObjectGetComm((PetscObject)adj,&comm);
210: MPI_Comm_rank(comm,&rank);
211: /* figure out where data comes from */
212: ISBuildTwoSided(destination,NULL,&irows);
213: ISDuplicate(irows,&icols);
214: ISGetLocalSize(irows,&irows_ln);
215: ISGetIndices(irows,&irows_indices);
216: ISLocalToGlobalMappingCreate(comm,1,irows_ln,irows_indices,PETSC_COPY_VALUES,mapping);
217: ISRestoreIndices(irows,&irows_indices);
218: MatCreateSubMatrices(adj,1,&irows,&icols,MAT_INITIAL_MATRIX,&sadj);
219: ISDestroy(&irows);
220: ISDestroy(&icols);
221: return(0);
222: }
225: PetscErrorCode MatPartitioningHierarchical_DetermineDestination(MatPartitioning part, IS partitioning, PetscInt pstart, PetscInt pend, IS *destination)
226: {
227: MPI_Comm comm;
228: PetscMPIInt rank,size,target;
229: PetscInt plocalsize,*dest_indices,i;
230: const PetscInt *part_indices;
231: PetscErrorCode ierr;
234: PetscObjectGetComm((PetscObject)part,&comm);
235: MPI_Comm_rank(comm,&rank);
236: MPI_Comm_size(comm,&size);
237: if((pend-pstart)>size) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"range [%D, %D] should be smaller than or equal to size %D",pstart,pend,size);
238: if(pstart>pend) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," pstart %D should be smaller than pend %D",pstart,pend);
239: ISGetLocalSize(partitioning,&plocalsize);
240: PetscCalloc1(plocalsize,&dest_indices);
241: ISGetIndices(partitioning,&part_indices);
242: for(i=0; i<plocalsize; i++){
243: /* compute target */
244: target = part_indices[i]-pstart;
245: /* mark out of range entity as -1 */
246: if(part_indices[i]<pstart || part_indices[i]>pend) target = -1;
247: dest_indices[i] = target;
248: }
249: ISCreateGeneral(comm,plocalsize,dest_indices,PETSC_OWN_POINTER,destination);
250: return(0);
251: }
254: PetscErrorCode MatPartitioningView_Hierarchical(MatPartitioning part,PetscViewer viewer)
255: {
256: MatPartitioning_Hierarchical *hpart = (MatPartitioning_Hierarchical*)part->data;
257: PetscErrorCode ierr;
258: PetscMPIInt rank;
259: PetscBool iascii;
262: MPI_Comm_rank(PetscObjectComm((PetscObject)part),&rank);
263: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
264: if(iascii){
265: PetscViewerASCIIPrintf(viewer," Fine partitioner %s \n",hpart->fineparttype);
266: PetscViewerASCIIPrintf(viewer," Coarse partitioner %s \n",hpart->coarseparttype);
267: PetscViewerASCIIPrintf(viewer," Number of coarse parts %D \n",hpart->Ncoarseparts);
268: PetscViewerASCIIPrintf(viewer," Number of fine parts %D \n",hpart->Nfineparts);
269: }
270: return(0);
271: }
274: PetscErrorCode MatPartitioningHierarchicalGetFineparts(MatPartitioning part,IS *fineparts)
275: {
276: MatPartitioning_Hierarchical *hpart = (MatPartitioning_Hierarchical*)part->data;
277: PetscErrorCode ierr;
280: *fineparts = hpart->fineparts;
281: PetscObjectReference((PetscObject)hpart->fineparts);
282: return(0);
283: }
285: PetscErrorCode MatPartitioningHierarchicalGetCoarseparts(MatPartitioning part,IS *coarseparts)
286: {
287: MatPartitioning_Hierarchical *hpart = (MatPartitioning_Hierarchical*)part->data;
288: PetscErrorCode ierr;
291: *coarseparts = hpart->coarseparts;
292: PetscObjectReference((PetscObject)hpart->coarseparts);
293: return(0);
294: }
296: PetscErrorCode MatPartitioningHierarchicalSetNcoarseparts(MatPartitioning part, PetscInt Ncoarseparts)
297: {
298: MatPartitioning_Hierarchical *hpart = (MatPartitioning_Hierarchical*)part->data;
301: hpart->Ncoarseparts = Ncoarseparts;
302: return(0);
303: }
305: PetscErrorCode MatPartitioningHierarchicalSetNfineparts(MatPartitioning part, PetscInt Nfineparts)
306: {
307: MatPartitioning_Hierarchical *hpart = (MatPartitioning_Hierarchical*)part->data;
310: hpart->Nfineparts = Nfineparts;
311: return(0);
312: }
314: PetscErrorCode MatPartitioningSetFromOptions_Hierarchical(PetscOptionItems *PetscOptionsObject,MatPartitioning part)
315: {
316: MatPartitioning_Hierarchical *hpart = (MatPartitioning_Hierarchical*)part->data;
318: char value[1024];
319: PetscBool flag = PETSC_FALSE;
322: PetscOptionsHead(PetscOptionsObject,"Set hierarchical partitioning options");
323: PetscOptionsString("-mat_partitioning_hierarchical_coarseparttype","coarse part type",NULL,NULL,value,1024,&flag);
324: if(flag){
325: PetscCalloc1(1024,&hpart->coarseparttype);
326: PetscStrcpy(hpart->coarseparttype,value);
327: }
328: PetscOptionsString("-mat_partitioning_hierarchical_fineparttype","fine part type",NULL,NULL,value,1024,&flag);
329: if(flag){
330: PetscCalloc1(1024,&hpart->fineparttype);
331: PetscStrcpy(hpart->fineparttype,value);
332: }
333: PetscOptionsInt("-mat_partitioning_hierarchical_Ncoarseparts","number of coarse parts",NULL,0,&hpart->Ncoarseparts,&flag);
334: PetscOptionsInt("-mat_partitioning_hierarchical_Nfineparts","number of fine parts",NULL,1,&hpart->Nfineparts,&flag);
335: PetscOptionsTail();
336: return(0);
337: }
340: PetscErrorCode MatPartitioningDestroy_Hierarchical(MatPartitioning part)
341: {
342: MatPartitioning_Hierarchical *hpart = (MatPartitioning_Hierarchical*)part->data;
343: PetscErrorCode ierr;
346: if(hpart->coarseparttype) {PetscFree(hpart->coarseparttype);}
347: if(hpart->fineparttype) {PetscFree(hpart->fineparttype);}
348: ISDestroy(&hpart->fineparts);
349: ISDestroy(&hpart->coarseparts);
350: PetscFree(hpart);
351: return(0);
352: }
355: /*MC
356: MATPARTITIONINGHIERARCHPART - Creates a partitioning context via hierarchical partitioning strategy.
358: Collective on MPI_Comm
360: Input Parameter:
361: . part - the partitioning context
363: Options Database Keys:
365: Level: beginner
367: .keywords: Partitioning, create, context
369: .seealso: MatPartitioningSetType(), MatPartitioningType
371: M*/
373: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Hierarchical(MatPartitioning part)
374: {
375: PetscErrorCode ierr;
376: MatPartitioning_Hierarchical *hpart;
379: PetscNewLog(part,&hpart);
380: part->data = (void*)hpart;
382: hpart->fineparttype = 0; /* fine level partitioner */
383: hpart->coarseparttype = 0; /* coarse level partitioner */
384: hpart->Nfineparts = 1; /* we do not further partition coarse partition any more by default */
385: hpart->Ncoarseparts = 0; /* number of coarse parts (first level) */
386: hpart->coarseparts = 0;
387: hpart->fineparts = 0;
389: part->ops->apply = MatPartitioningApply_Hierarchical;
390: part->ops->view = MatPartitioningView_Hierarchical;
391: part->ops->destroy = MatPartitioningDestroy_Hierarchical;
392: part->ops->setfromoptions = MatPartitioningSetFromOptions_Hierarchical;
393: return(0);
394: }