Actual source code: ex41.c
petsc-3.4.5 2014-06-29
2: static char help[] = "Tests MatIncreaseOverlap() - the parallel case. This example\n\
3: is similar to ex40.c; here the index sets used are random. Input arguments are:\n\
4: -f <input_file> : file to load. For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
5: -nd <size> : > 0 no of domains per processor \n\
6: -ov <overlap> : >=0 amount of overlap between domains\n\n";
8: #include <petscmat.h>
12: int main(int argc,char **args)
13: {
14: PetscInt nd = 2,ov=1,i,j,m,n,*idx,lsize;
16: PetscMPIInt rank;
17: PetscBool flg;
18: Mat A,B;
19: char file[PETSC_MAX_PATH_LEN];
20: PetscViewer fd;
21: IS *is1,*is2;
22: PetscRandom r;
23: PetscScalar rand;
25: PetscInitialize(&argc,&args,(char*)0,help);
26: #if defined(PETSC_USE_COMPLEX)
27: SETERRQ(PETSC_COMM_WORLD,1,"This example does not work with complex numbers");
28: #else
30: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
31: PetscOptionsGetString(NULL,"-f",file,PETSC_MAX_PATH_LEN,NULL);
32: PetscOptionsGetInt(NULL,"-nd",&nd,NULL);
33: PetscOptionsGetInt(NULL,"-ov",&ov,NULL);
35: /* Read matrix and RHS */
36: PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd);
37: MatCreate(PETSC_COMM_WORLD,&A);
38: MatSetType(A,MATMPIAIJ);
39: MatLoad(A,fd);
40: PetscViewerDestroy(&fd);
42: /* Read the matrix again as a seq matrix */
43: PetscViewerBinaryOpen(PETSC_COMM_SELF,file,FILE_MODE_READ,&fd);
44: MatCreate(PETSC_COMM_SELF,&B);
45: MatSetType(B,MATSEQAIJ);
46: MatLoad(B,fd);
47: PetscViewerDestroy(&fd);
49: /* Create the Random no generator */
50: MatGetSize(A,&m,&n);
51: PetscRandomCreate(PETSC_COMM_SELF,&r);
52: PetscRandomSetFromOptions(r);
54: /* Create the IS corresponding to subdomains */
55: PetscMalloc(nd*sizeof(IS **),&is1);
56: PetscMalloc(nd*sizeof(IS **),&is2);
57: PetscMalloc(m *sizeof(PetscInt),&idx);
59: /* Create the random Index Sets */
60: for (i=0; i<nd; i++) {
61: for (j=0; j<rank; j++) {
62: PetscRandomGetValue(r,&rand);
63: }
64: PetscRandomGetValue(r,&rand);
65: lsize = (PetscInt)(rand*m);
66: for (j=0; j<lsize; j++) {
67: PetscRandomGetValue(r,&rand);
68: idx[j] = (PetscInt)(rand*m);
69: }
70: ISCreateGeneral(PETSC_COMM_SELF,lsize,idx,PETSC_COPY_VALUES,is1+i);
71: ISCreateGeneral(PETSC_COMM_SELF,lsize,idx,PETSC_COPY_VALUES,is2+i);
72: }
74: MatIncreaseOverlap(A,nd,is1,ov);
75: MatIncreaseOverlap(B,nd,is2,ov);
77: /* Now see if the serial and parallel case have the same answers */
78: for (i=0; i<nd; ++i) {
79: PetscInt sz1,sz2;
80: ISEqual(is1[i],is2[i],&flg);
81: ISGetSize(is1[i],&sz1);
82: ISGetSize(is2[i],&sz2);
83: PetscPrintf(PETSC_COMM_SELF,"[%d], i=%D, flg =%d sz1 = %D sz2 = %D\n",rank,i,(int)flg,sz1,sz2);
84: /* ISView(is1[i],PETSC_VIEWER_STDOUT_SELF);
85: ISView(is2[i],PETSC_VIEWER_STDOUT_SELF); */
86: }
88: /* Free Allocated Memory */
89: for (i=0; i<nd; ++i) {
90: ISDestroy(&is1[i]);
91: ISDestroy(&is2[i]);
92: }
93: PetscRandomDestroy(&r);
94: PetscFree(is1);
95: PetscFree(is2);
96: MatDestroy(&A);
97: MatDestroy(&B);
98: PetscFree(idx);
100: PetscFinalize();
101: #endif
102: return 0;
103: }