Actual source code: ex183.c
petsc-3.6.1 2015-08-06
1: static char help[] =
2: "Example of extracting an array of MPI submatrices from a given MPI matrix.\n"
3: "This test can only be run in parallel.\n"
4: "\n";
6: /*T
7: Concepts: Mat^mat submatrix, parallel
8: Processors: n
9: T*/
12: #include <petscmat.h>
16: int main(int argc, char **args)
17: {
18: Mat A,*submats;
19: MPI_Comm subcomm;
20: PetscMPIInt rank,size,subrank,subsize,color;
21: PetscInt m,n,N,bs,rstart,rend,i,j,k,total_subdomains,hash,nsubdomains=1;
22: PetscInt nis,*cols,gnsubdomains,gsubdomainnums[1],gsubdomainperm[1],s,gs;
23: PetscInt *rowindices,*colindices,idx,rep;
24: PetscScalar *vals;
25: IS rowis[1],colis[1];
26: PetscViewer viewer;
27: PetscBool permute_indices,flg;
28: PetscErrorCode ierr;
32: PetscInitialize(&argc,&args,(char*)0,help);
33: MPI_Comm_size(PETSC_COMM_WORLD,&size);
34: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
36: PetscOptionsBegin(PETSC_COMM_WORLD,NULL,"ex183","Mat");
37: m = 5;
38: PetscOptionsInt("-m","Local matrix size","MatSetSizes",m,&m,&flg);
39: total_subdomains = size-1;
40: PetscOptionsInt("-total_subdomains","Number of submatrices where 0 < n < comm size","MatGetSubMatricesMPI",total_subdomains,&total_subdomains,&flg);
41: permute_indices = PETSC_FALSE;
42: PetscOptionsBool("-permute_indices","Whether to permute indices before breaking them into subdomains","ISCreateGeneral",permute_indices,&permute_indices,&flg);
43: hash = 7;
44: PetscOptionsInt("-hash","Permutation factor, which has to be relatively prime to M = size*m (total matrix size)","ISCreateGeneral",hash,&hash,&flg);
45: rep = 2;
46: PetscOptionsInt("-rep","Number of times to carry out submatrix extractions; currently only 1 & 2 are supported",NULL,rep,&rep,&flg);
47: PetscOptionsEnd();
49: if (total_subdomains > size) SETERRQ2(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Number of subdomains %D must not exceed comm size %D",total_subdomains,size);
50: if (total_subdomains < 1 || total_subdomains > size) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subdomains must be > 0 and <= %D (comm size), got total_subdomains = %D",size,total_subdomains);
51: if (rep != 1 && rep != 2) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid number of test repetitions: %D; must be 1 or 2",rep);
53: viewer = PETSC_VIEWER_STDOUT_WORLD;
54: /* Create logically sparse, but effectively dense matrix for easy verification of submatrix extraction correctness. */
55: MatCreate(PETSC_COMM_WORLD,&A);
56: MatSetSizes(A,m,m,PETSC_DECIDE,PETSC_DECIDE);
57: MatSetFromOptions(A);
58: MatSetUp(A);
59: MatGetSize(A,NULL,&N);
60: MatGetLocalSize(A,NULL,&n);
61: MatGetBlockSize(A,&bs);
62: MatSeqAIJSetPreallocation(A,n,NULL);
63: MatMPIAIJSetPreallocation(A,n,NULL,N-n,NULL);
64: MatSeqBAIJSetPreallocation(A,bs,n/bs,NULL);
65: MatMPIBAIJSetPreallocation(A,bs,n/bs,NULL,(N-n)/bs,NULL);
66: MatSeqSBAIJSetPreallocation(A,bs,n/bs,NULL);
67: MatMPISBAIJSetPreallocation(A,bs,n/bs,NULL,(N-n)/bs,NULL);
69: PetscMalloc2(N,&cols,N,&vals);
70: MatGetOwnershipRange(A,&rstart,&rend);
71: for (j = 0; j < N; ++j) cols[j] = j;
72: for (i=rstart; i<rend; i++) {
73: for (j=0;j<N;++j) {
74: vals[j] = i*10000+j;
75: }
76: MatSetValues(A,1,&i,N,cols,vals,INSERT_VALUES);
77: }
78: PetscFree2(cols,vals);
79: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
80: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
82: PetscViewerASCIIPrintf(viewer,"Initial matrix:\n");
83: MatView(A,viewer);
86: /*
87: Create subcomms and ISs so that each rank participates in one IS.
88: The IS either coalesces adjacent rank indices (contiguous),
89: or selects indices by scrambling them using a hash.
90: */
91: k = size/total_subdomains + (size%total_subdomains>0); /* There are up to k ranks to a color */
92: color = rank/k;
93: MPI_Comm_split(PETSC_COMM_WORLD,color,rank,&subcomm);
94: MPI_Comm_size(subcomm,&subsize);
95: MPI_Comm_size(subcomm,&subrank);
96: MatGetOwnershipRange(A,&rstart,&rend);
97: nis = 1;
98: PetscMalloc2(rend-rstart,&rowindices,rend-rstart,&colindices);
100: for (j = rstart; j < rend; ++j) {
101: if (permute_indices) {
102: idx = (j*hash);
103: } else {
104: idx = j;
105: }
106: rowindices[j-rstart] = idx%N;
107: colindices[j-rstart] = (idx+m)%N;
108: }
109: ISCreateGeneral(subcomm,rend-rstart,rowindices,PETSC_COPY_VALUES,&rowis[0]);
110: ISCreateGeneral(subcomm,rend-rstart,colindices,PETSC_COPY_VALUES,&colis[0]);
111: ISSort(rowis[0]);
112: ISSort(colis[0]);
113: PetscFree2(rowindices,colindices);
114: /*
115: Now view the ISs. To avoid deadlock when viewing a list of objects on different subcomms,
116: we need to obtain the global numbers of our local objects and wait for the corresponding global
117: number to be viewed.
118: */
119: PetscViewerASCIIPrintf(viewer,"Subdomains");
120: if (permute_indices) {
121: PetscViewerASCIIPrintf(viewer," (hash=%D)",hash);
122: }
123: PetscViewerASCIIPrintf(viewer,":\n");
124: PetscViewerFlush(viewer);
126: nsubdomains = 1;
127: for (s = 0; s < nsubdomains; ++s) gsubdomainperm[s] = s;
128: PetscObjectsListGetGlobalNumbering(PETSC_COMM_WORLD,1,(PetscObject*)rowis,&gnsubdomains,gsubdomainnums);
129: PetscSortIntWithPermutation(nsubdomains,gsubdomainnums,gsubdomainperm);
130: for (gs=0,s=0; gs < gnsubdomains;++gs) {
131: if (s < nsubdomains) {
132: PetscInt ss;
133: ss = gsubdomainperm[s];
134: if (gs == gsubdomainnums[ss]) { /* Global subdomain gs being viewed is my subdomain with local number ss. */
135: PetscViewer subviewer = NULL;
136: PetscViewerGetSubcomm(viewer,PetscObjectComm((PetscObject)rowis[ss]),&subviewer);
137: PetscViewerASCIIPrintf(subviewer,"Row IS %D\n",gs);
138: ISView(rowis[ss],subviewer);
139: PetscViewerFlush(subviewer);
140: PetscViewerASCIIPrintf(subviewer,"Col IS %D\n",gs);
141: ISView(colis[ss],subviewer);
142: PetscViewerFlush(subviewer);
143: PetscViewerRestoreSubcomm(viewer,PetscObjectComm((PetscObject)rowis[ss]),&subviewer);
144: ++s;
145: }
146: }
147: MPI_Barrier(PETSC_COMM_WORLD);
148: }
149: ISSort(rowis[0]);
150: ISSort(colis[0]);
151: nsubdomains = 1;
152: MatGetSubMatricesMPI(A,nsubdomains,rowis,colis,MAT_INITIAL_MATRIX,&submats);
153: /*
154: Now view the matrices. To avoid deadlock when viewing a list of objects on different subcomms,
155: we need to obtain the global numbers of our local objects and wait for the corresponding global
156: number to be viewed.
157: */
158: PetscViewerASCIIPrintf(viewer,"Submatrices (repetition 1):\n");
159: for (s = 0; s < nsubdomains; ++s) gsubdomainperm[s] = s;
160: PetscObjectsListGetGlobalNumbering(PETSC_COMM_WORLD,1,(PetscObject*)submats,&gnsubdomains,gsubdomainnums);
161: PetscSortIntWithPermutation(nsubdomains,gsubdomainnums,gsubdomainperm);
162: for (gs=0,s=0; gs < gnsubdomains;++gs) {
163: if (s < nsubdomains) {
164: PetscInt ss;
165: ss = gsubdomainperm[s];
166: if (gs == gsubdomainnums[ss]) { /* Global subdomain gs being viewed is my subdomain with local number ss. */
167: PetscViewer subviewer = NULL;
168: PetscViewerGetSubcomm(viewer,PetscObjectComm((PetscObject)submats[ss]),&subviewer);
169: MatView(submats[ss],subviewer);
170: PetscViewerFlush(subviewer);
171: PetscViewerRestoreSubcomm(viewer,PetscObjectComm((PetscObject)submats[ss]),&subviewer);
172: ++s;
173: }
174: }
175: MPI_Barrier(PETSC_COMM_WORLD);
176: }
177: if (rep == 1) goto cleanup;
178: nsubdomains = 1;
179: MatGetSubMatricesMPI(A,nsubdomains,rowis,colis,MAT_REUSE_MATRIX,&submats);
180: /*
181: Now view the matrices. To avoid deadlock when viewing a list of objects on different subcomms,
182: we need to obtain the global numbers of our local objects and wait for the corresponding global
183: number to be viewed.
184: */
185: PetscViewerASCIIPrintf(viewer,"Submatrices (repetition 2):\n");
186: for (s = 0; s < nsubdomains; ++s) gsubdomainperm[s] = s;
187: PetscObjectsListGetGlobalNumbering(PETSC_COMM_WORLD,1,(PetscObject*)submats,&gnsubdomains,gsubdomainnums);
188: PetscSortIntWithPermutation(nsubdomains,gsubdomainnums,gsubdomainperm);
189: for (gs=0,s=0; gs < gnsubdomains;++gs) {
190: if (s < nsubdomains) {
191: PetscInt ss;
192: ss = gsubdomainperm[s];
193: if (gs == gsubdomainnums[ss]) { /* Global subdomain gs being viewed is my subdomain with local number ss. */
194: PetscViewer subviewer = NULL;
195: PetscViewerGetSubcomm(viewer,PetscObjectComm((PetscObject)submats[ss]),&subviewer);
196: MatView(submats[ss],subviewer);
197: PetscViewerFlush(subviewer);
198: PetscViewerRestoreSubcomm(viewer,PetscObjectComm((PetscObject)submats[ss]),&subviewer);
199: ++s;
200: }
201: }
202: MPI_Barrier(PETSC_COMM_WORLD);
203: }
204: cleanup:
205: for (k=0;k<nsubdomains;++k) {
206: MatDestroy(submats+k);
207: }
208: PetscFree(submats);
209: for (k=0;k<nis;++k) {
210: ISDestroy(rowis+k);
211: ISDestroy(colis+k);
212: }
213: MatDestroy(&A);
214: PetscFinalize();
215: return 0;
216: }