Actual source code: ex40.c
petsc-3.10.5 2019-03-28
2: static char help[] = "Tests the parallel case for MatIncreaseOverlap(). Input arguments are:\n\
3: -f <input_file> : file to load. For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
4: -nd <size> : > 0 number of domains per processor \n\
5: -ov <overlap> : >=0 amount of overlap between domains\n\n";
7: #include <petscmat.h>
9: PetscErrorCode ISAllGatherDisjoint(IS iis, IS** ois)
10: {
11: IS *is2,is;
12: const PetscInt *idxs;
13: PetscInt i, ls,*sizes;
15: PetscMPIInt size;
18: MPI_Comm_size(PetscObjectComm((PetscObject)iis),&size);
19: PetscMalloc1(size,&is2);
20: PetscMalloc1(size,&sizes);
21: ISGetLocalSize(iis,&ls);
22: /* we don't have a public ISGetLayout */
23: MPI_Allgather(&ls,1,MPIU_INT,sizes,1,MPIU_INT,PetscObjectComm((PetscObject)iis));
24: ISAllGather(iis,&is);
25: ISGetIndices(is,&idxs);
26: for (i = 0, ls = 0; i < size; i++) {
27: ISCreateGeneral(PETSC_COMM_SELF,sizes[i],idxs+ls,PETSC_COPY_VALUES,&is2[i]);
28: ls += sizes[i];
29: }
30: ISRestoreIndices(is,&idxs);
31: ISDestroy(&is);
32: PetscFree(sizes);
33: *ois = is2;
34: return(0);
35: }
37: int main(int argc,char **args)
38: {
40: PetscInt nd = 2,ov = 1,ndpar,i,start,m,n,end,lsize;
41: PetscMPIInt rank;
42: PetscBool flg, useND = PETSC_FALSE;
43: Mat A,B;
44: char file[PETSC_MAX_PATH_LEN];
45: PetscViewer fd;
46: IS *is1,*is2;
47: PetscRandom r;
48: PetscScalar rand;
50: PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
51: #if defined(PETSC_USE_COMPLEX)
52: SETERRQ(PETSC_COMM_WORLD,1,"This example does not work with complex numbers");
53: #else
55: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
56: PetscOptionsGetString(NULL,NULL,"-f",file,PETSC_MAX_PATH_LEN,&flg);
57: if (!flg) SETERRQ(PETSC_COMM_WORLD,1,"Must use -f filename to indicate a file containing a PETSc binary matrix");
58: PetscOptionsGetInt(NULL,NULL,"-nd",&nd,NULL);
59: PetscOptionsGetInt(NULL,NULL,"-ov",&ov,NULL);
60: PetscOptionsGetBool(NULL,NULL,"-nested_dissection",&useND,NULL);
62: /* Read matrix */
63: PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd);
64: MatCreate(PETSC_COMM_WORLD,&A);
65: MatSetType(A,MATMPIAIJ);
66: MatLoad(A,fd);
67: MatSetFromOptions(A);
68: PetscViewerDestroy(&fd);
70: /* Read the matrix again as a sequential matrix */
71: PetscViewerBinaryOpen(PETSC_COMM_SELF,file,FILE_MODE_READ,&fd);
72: MatCreate(PETSC_COMM_SELF,&B);
73: MatSetType(B,MATSEQAIJ);
74: MatLoad(B,fd);
75: MatSetFromOptions(B);
76: PetscViewerDestroy(&fd);
78: /* Create the IS corresponding to subdomains */
79: if (useND) {
80: MatPartitioning part;
81: IS ndmap;
82: PetscMPIInt size;
84: ndpar = 1;
85: MPI_Comm_size(PETSC_COMM_WORLD,&size);
86: nd = (PetscInt)size;
87: PetscMalloc1(ndpar,&is1);
88: MatPartitioningCreate(PETSC_COMM_WORLD,&part);
89: MatPartitioningSetAdjacency(part,A);
90: MatPartitioningSetFromOptions(part);
91: MatPartitioningApplyND(part,&ndmap);
92: MatPartitioningDestroy(&part);
93: ISBuildTwoSided(ndmap,NULL,&is1[0]);
94: ISDestroy(&ndmap);
95: ISAllGatherDisjoint(is1[0],&is2);
96: } else {
97: /* Create the random Index Sets */
98: PetscMalloc1(nd,&is1);
99: PetscMalloc1(nd,&is2);
101: MatGetSize(A,&m,&n);
102: PetscRandomCreate(PETSC_COMM_SELF,&r);
103: PetscRandomSetFromOptions(r);
104: for (i=0; i<nd; i++) {
105: PetscRandomGetValue(r,&rand);
106: start = (PetscInt)(rand*m);
107: PetscRandomGetValue(r,&rand);
108: end = (PetscInt)(rand*m);
109: lsize = end - start;
110: if (start > end) { start = end; lsize = -lsize;}
111: ISCreateStride(PETSC_COMM_SELF,lsize,start,1,is1+i);
112: ISCreateStride(PETSC_COMM_SELF,lsize,start,1,is2+i);
113: }
114: ndpar = nd;
115: PetscRandomDestroy(&r);
116: }
117: MatIncreaseOverlap(A,ndpar,is1,ov);
118: MatIncreaseOverlap(B,nd,is2,ov);
119: if (useND) {
120: IS *is;
122: ISAllGatherDisjoint(is1[0],&is);
123: ISDestroy(&is1[0]);
124: PetscFree(is1);
125: is1 = is;
126: }
127: /* Now see if the serial and parallel case have the same answers */
128: for (i=0; i<nd; ++i) {
129: ISEqual(is1[i],is2[i],&flg);
130: if (!flg) {
131: ISViewFromOptions(is1[i],NULL,"-err_view");
132: ISViewFromOptions(is2[i],NULL,"-err_view");
133: SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"proc:[%d], i=%D, flg =%d\n",rank,i,(int)flg);
134: }
135: }
137: /* Free allocated memory */
138: for (i=0; i<nd; ++i) {
139: ISDestroy(&is1[i]);
140: ISDestroy(&is2[i]);
141: }
142: PetscFree(is1);
143: PetscFree(is2);
144: MatDestroy(&A);
145: MatDestroy(&B);
146: #endif
147: PetscFinalize();
148: return ierr;
149: }
153: /*TEST
155: build:
156: requires: !complex
158: testset:
159: nsize: 5
160: requires: datafilespath double !define(PETSC_USE_64BIT_INDICES) !complex
161: args: -f ${DATAFILESPATH}/matrices/arco1 -viewer_binary_skip_info -ov 2
162: output_file: output/ex40_1.out
163: test:
164: suffix: 1
165: args: -nd 7
166: test:
167: requires: parmetis
168: suffix: 1_nd
169: args: -nested_dissection -mat_partitioning_type parmetis
171: testset:
172: nsize: 3
173: requires: double !define(PETSC_USE_64BIT_INDICES) !complex
174: args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/ns-real-int32-float64 -mat_increase_overlap_scalable 1 -ov 2
175: output_file: output/ex40_1.out
176: test:
177: suffix: 2
178: args: -nd 7
179: test:
180: requires: parmetis
181: suffix: 2_nd
182: args: -nested_dissection -mat_partitioning_type parmetis
184: TEST*/