Actual source code: ex42.c
petsc-3.13.6 2020-09-29
2: static char help[] = "Tests MatIncreaseOverlap() and MatCreateSubmatrices() for the parallel case.\n\
3: This example is similar to ex40.c; here the index sets used are random.\n\
4: Input arguments are:\n\
5: -f <input_file> : file to load. For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
6: -nd <size> : > 0 no of domains per processor \n\
7: -ov <overlap> : >=0 amount of overlap between domains\n\n";
9: #include <petscmat.h>
11: int main(int argc,char **args)
12: {
14: PetscInt nd = 2,ov=1,i,j,lsize,m,n,*idx,bs;
15: PetscMPIInt rank, size;
16: PetscBool flg;
17: Mat A,B,*submatA,*submatB;
18: char file[PETSC_MAX_PATH_LEN];
19: PetscViewer fd;
20: IS *is1,*is2;
21: PetscRandom r;
22: PetscBool test_unsorted = PETSC_FALSE;
23: PetscScalar rand;
25: PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
26: MPI_Comm_size(PETSC_COMM_WORLD,&size);
27: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
28: PetscOptionsGetString(NULL,NULL,"-f",file,PETSC_MAX_PATH_LEN,NULL);
29: PetscOptionsGetInt(NULL,NULL,"-nd",&nd,NULL);
30: PetscOptionsGetInt(NULL,NULL,"-ov",&ov,NULL);
31: PetscOptionsGetBool(NULL,NULL,"-test_unsorted",&test_unsorted,NULL);
33: /* Read matrix A and RHS */
34: PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd);
35: MatCreate(PETSC_COMM_WORLD,&A);
36: MatSetType(A,MATAIJ);
37: MatSetFromOptions(A);
38: MatLoad(A,fd);
39: PetscViewerDestroy(&fd);
41: /* Read the same matrix as a seq matrix B */
42: PetscViewerBinaryOpen(PETSC_COMM_SELF,file,FILE_MODE_READ,&fd);
43: MatCreate(PETSC_COMM_SELF,&B);
44: MatSetType(B,MATSEQAIJ);
45: MatSetFromOptions(B);
46: MatLoad(B,fd);
47: PetscViewerDestroy(&fd);
49: MatGetBlockSize(A,&bs);
51: /* Create the Random no generator */
52: MatGetSize(A,&m,&n);
53: PetscRandomCreate(PETSC_COMM_SELF,&r);
54: PetscRandomSetFromOptions(r);
56: /* Create the IS corresponding to subdomains */
57: PetscMalloc1(nd,&is1);
58: PetscMalloc1(nd,&is2);
59: PetscMalloc1(m ,&idx);
60: for (i = 0; i < m; i++) {idx[i] = i;}
62: /* Create the random Index Sets */
63: for (i=0; i<nd; i++) {
64: /* Skip a few,so that the IS on different procs are diffeent*/
65: for (j=0; j<rank; j++) {
66: PetscRandomGetValue(r,&rand);
67: }
68: PetscRandomGetValue(r,&rand);
69: lsize = (PetscInt)(rand*(m/bs));
70: /* shuffle */
71: for (j=0; j<lsize; j++) {
72: PetscInt k, swap, l;
74: PetscRandomGetValue(r,&rand);
75: k = j + (PetscInt)(rand*((m/bs)-j));
76: for (l = 0; l < bs; l++) {
77: swap = idx[bs*j+l];
78: idx[bs*j+l] = idx[bs*k+l];
79: idx[bs*k+l] = swap;
80: }
81: }
82: if (!test_unsorted) {PetscSortInt(lsize*bs,idx);}
83: ISCreateGeneral(PETSC_COMM_SELF,lsize*bs,idx,PETSC_COPY_VALUES,is1+i);
84: ISCreateGeneral(PETSC_COMM_SELF,lsize*bs,idx,PETSC_COPY_VALUES,is2+i);
85: ISSetBlockSize(is1[i],bs);
86: ISSetBlockSize(is2[i],bs);
87: }
89: if (!test_unsorted) {
90: MatIncreaseOverlap(A,nd,is1,ov);
91: MatIncreaseOverlap(B,nd,is2,ov);
93: for (i=0; i<nd; ++i) {
94: ISSort(is1[i]);
95: ISSort(is2[i]);
96: }
97: }
99: MatCreateSubMatrices(A,nd,is1,is1,MAT_INITIAL_MATRIX,&submatA);
100: MatCreateSubMatrices(B,nd,is2,is2,MAT_INITIAL_MATRIX,&submatB);
102: /* Now see if the serial and parallel case have the same answers */
103: for (i=0; i<nd; ++i) {
104: MatEqual(submatA[i],submatB[i],&flg);
105: if (!flg) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"%D-th paralle submatA != seq submatB",i);
106: }
108: /* Free Allocated Memory */
109: for (i=0; i<nd; ++i) {
110: ISDestroy(&is1[i]);
111: ISDestroy(&is2[i]);
112: }
113: MatDestroySubMatrices(nd,&submatA);
114: MatDestroySubMatrices(nd,&submatB);
116: PetscRandomDestroy(&r);
117: PetscFree(is1);
118: PetscFree(is2);
119: MatDestroy(&A);
120: MatDestroy(&B);
121: PetscFree(idx);
122: PetscFinalize();
123: return ierr;
124: }
128: /*TEST
130: build:
131: requires: !complex
133: test:
134: nsize: 3
135: requires: datafilespath double !define(PETSC_USE_64BIT_INDICES) !complex
136: args: -f ${DATAFILESPATH}/matrices/arco1 -nd 5 -ov 2
138: test:
139: suffix: 2
140: args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -ov 2
141: requires: datafilespath double !define(PETSC_USE_64BIT_INDICES) !complex
143: test:
144: suffix: unsorted_baij_mpi
145: nsize: 3
146: requires: datafilespath double !define(PETSC_USE_64BIT_INDICES) !complex
147: args: -f ${DATAFILESPATH}/matrices/cfd.1.10 -nd 8 -mat_type baij -test_unsorted
149: test:
150: suffix: unsorted_baij_seq
151: requires: datafilespath double !define(PETSC_USE_64BIT_INDICES) !complex
152: args: -f ${DATAFILESPATH}/matrices/cfd.1.10 -nd 8 -mat_type baij -test_unsorted
154: test:
155: suffix: unsorted_mpi
156: nsize: 3
157: requires: datafilespath double !define(PETSC_USE_64BIT_INDICES) !complex
158: args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -test_unsorted
160: test:
161: suffix: unsorted_seq
162: requires: datafilespath double !define(PETSC_USE_64BIT_INDICES) !complex
163: args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -test_unsorted
165: TEST*/