Actual source code: ex41.c
petsc-3.10.5 2019-03-28
2: static char help[] = "Tests MatIncreaseOverlap() - the parallel case. This example\n\
3: is similar to ex40.c; here the index sets used are random. Input arguments are:\n\
4: -f <input_file> : file to load. For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
5: -nd <size> : > 0 no of domains per processor \n\
6: -ov <overlap> : >=0 amount of overlap between domains\n\n";
8: #include <petscmat.h>
10: int main(int argc,char **args)
11: {
12: PetscInt nd = 2,ov=1,i,j,m,n,*idx,lsize;
14: PetscMPIInt rank;
15: PetscBool flg;
16: Mat A,B;
17: char file[PETSC_MAX_PATH_LEN];
18: PetscViewer fd;
19: IS *is1,*is2;
20: PetscRandom r;
21: PetscScalar rand;
23: PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
24: #if defined(PETSC_USE_COMPLEX)
25: SETERRQ(PETSC_COMM_WORLD,1,"This example does not work with complex numbers");
26: #else
27: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
28: PetscOptionsGetString(NULL,NULL,"-f",file,PETSC_MAX_PATH_LEN,NULL);
29: PetscOptionsGetInt(NULL,NULL,"-nd",&nd,NULL);
30: PetscOptionsGetInt(NULL,NULL,"-ov",&ov,NULL);
32: /* Read matrix and RHS */
33: PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd);
34: MatCreate(PETSC_COMM_WORLD,&A);
35: MatSetType(A,MATMPIAIJ);
36: MatLoad(A,fd);
37: PetscViewerDestroy(&fd);
39: /* Read the matrix again as a seq matrix */
40: PetscViewerBinaryOpen(PETSC_COMM_SELF,file,FILE_MODE_READ,&fd);
41: MatCreate(PETSC_COMM_SELF,&B);
42: MatSetType(B,MATSEQAIJ);
43: MatLoad(B,fd);
44: PetscViewerDestroy(&fd);
46: /* Create the Random no generator */
47: MatGetSize(A,&m,&n);
48: PetscRandomCreate(PETSC_COMM_SELF,&r);
49: PetscRandomSetFromOptions(r);
51: /* Create the IS corresponding to subdomains */
52: PetscMalloc1(nd,&is1);
53: PetscMalloc1(nd,&is2);
54: PetscMalloc1(m ,&idx);
56: /* Create the random Index Sets */
57: for (i=0; i<nd; i++) {
58: for (j=0; j<rank; j++) {
59: PetscRandomGetValue(r,&rand);
60: }
61: PetscRandomGetValue(r,&rand);
62: lsize = (PetscInt)(rand*m);
63: for (j=0; j<lsize; j++) {
64: PetscRandomGetValue(r,&rand);
65: idx[j] = (PetscInt)(rand*m);
66: }
67: ISCreateGeneral(PETSC_COMM_SELF,lsize,idx,PETSC_COPY_VALUES,is1+i);
68: ISCreateGeneral(PETSC_COMM_SELF,lsize,idx,PETSC_COPY_VALUES,is2+i);
69: }
71: MatIncreaseOverlap(A,nd,is1,ov);
72: MatIncreaseOverlap(B,nd,is2,ov);
74: /* Now see if the serial and parallel case have the same answers */
75: for (i=0; i<nd; ++i) {
76: PetscInt sz1,sz2;
77: ISEqual(is1[i],is2[i],&flg);
78: ISGetSize(is1[i],&sz1);
79: ISGetSize(is2[i],&sz2);
80: if (!flg) SETERRQ5(PETSC_COMM_SELF,PETSC_ERR_PLIB,"proc:[%d], i=%D, flg =%d sz1 = %D sz2 = %D\n",rank,i,(int)flg,sz1,sz2);
81: }
83: /* Free Allocated Memory */
84: for (i=0; i<nd; ++i) {
85: ISDestroy(&is1[i]);
86: ISDestroy(&is2[i]);
87: }
88: PetscRandomDestroy(&r);
89: PetscFree(is1);
90: PetscFree(is2);
91: MatDestroy(&A);
92: MatDestroy(&B);
93: PetscFree(idx);
94: #endif
95: PetscFinalize();
96: return ierr;
97: }
101: /*TEST
103: build:
104: requires: !complex
106: test:
107: nsize: 3
108: requires: datafilespath double !define(PETSC_USE_64BIT_INDICES) !complex
109: args: -f ${DATAFILESPATH}/matrices/arco1 -nd 3 -ov 1
112: TEST*/