Actual source code: ex40.c
petsc-3.8.4 2018-03-24
2: static char help[] = "Tests the parallel case for MatIncreaseOverlap(). Input arguments are:\n\
3: -f <input_file> : file to load. For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
4: -nd <size> : > 0 number of domains per processor \n\
5: -ov <overlap> : >=0 amount of overlap between domains\n\n";
7: #include <petscmat.h>
9: int main(int argc,char **args)
10: {
12: PetscInt nd = 2,ov=1,i,start,m,n,end,lsize;
13: PetscMPIInt rank;
14: PetscBool flg;
15: Mat A,B;
16: char file[PETSC_MAX_PATH_LEN];
17: PetscViewer fd;
18: IS *is1,*is2;
19: PetscRandom r;
20: PetscScalar rand;
22: PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
23: #if defined(PETSC_USE_COMPLEX)
24: SETERRQ(PETSC_COMM_WORLD,1,"This example does not work with complex numbers");
25: #else
27: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
28: PetscOptionsGetString(NULL,NULL,"-f",file,PETSC_MAX_PATH_LEN,&flg);
29: if (!flg) SETERRQ(PETSC_COMM_WORLD,1,"Must use -f filename to indicate a file containing a PETSc binary matrix");
30: PetscOptionsGetInt(NULL,NULL,"-nd",&nd,NULL);
31: PetscOptionsGetInt(NULL,NULL,"-ov",&ov,NULL);
33: /* Read matrix and RHS */
34: PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd);
35: MatCreate(PETSC_COMM_WORLD,&A);
36: MatSetType(A,MATMPIAIJ);
37: MatLoad(A,fd);
38: MatSetFromOptions(A);
39: PetscViewerDestroy(&fd);
41: /* Read the matrix again as a sequential matrix */
42: PetscViewerBinaryOpen(PETSC_COMM_SELF,file,FILE_MODE_READ,&fd);
43: MatCreate(PETSC_COMM_SELF,&B);
44: MatSetType(B,MATSEQAIJ);
45: MatLoad(B,fd);
46: MatSetFromOptions(B);
47: PetscViewerDestroy(&fd);
49: /* Create the IS corresponding to subdomains */
50: PetscMalloc1(nd,&is1);
51: PetscMalloc1(nd,&is2);
53: /* Create the random Index Sets */
54: MatGetSize(A,&m,&n);
55: PetscRandomCreate(PETSC_COMM_SELF,&r);
56: PetscRandomSetFromOptions(r);
57: for (i=0; i<nd; i++) {
58: PetscRandomGetValue(r,&rand);
59: start = (PetscInt)(rand*m);
60: PetscRandomGetValue(r,&rand);
61: end = (PetscInt)(rand*m);
62: lsize = end - start;
63: if (start > end) { start = end; lsize = -lsize;}
64: ISCreateStride(PETSC_COMM_SELF,lsize,start,1,is1+i);
65: ISCreateStride(PETSC_COMM_SELF,lsize,start,1,is2+i);
66: }
67: MatIncreaseOverlap(A,nd,is1,ov);
68: MatIncreaseOverlap(B,nd,is2,ov);
69: /* Now see if the serial and parallel case have the same answers */
70: for (i=0; i<nd; ++i) {
71: ISEqual(is1[i],is2[i],&flg);
72: PetscPrintf(PETSC_COMM_SELF,"proc:[%d], i=%D, flg =%d\n",rank,i,(int)flg);
73: }
75: /* Free allocated memory */
76: for (i=0; i<nd; ++i) {
77: ISDestroy(&is1[i]);
78: ISDestroy(&is2[i]);
79: }
80: PetscFree(is1);
81: PetscFree(is2);
82: PetscRandomDestroy(&r);
83: MatDestroy(&A);
84: MatDestroy(&B);
85: #endif
86: PetscFinalize();
87: return ierr;
88: }