Actual source code: ex42.c

petsc-3.4.5 2014-06-29
  2: static char help[] = "Tests MatIncreaseOverlap() and MatGetSubmatrices() for the parallel case.\n\
  3: This example is similar to ex40.c; here the index sets used are random.\n\
  4: Input arguments are:\n\
  5:   -f <input_file> : file to load.  For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
  6:   -nd <size>      : > 0  no of domains per processor \n\
  7:   -ov <overlap>   : >=0  amount of overlap between domains\n\n";

  9: #include <petscmat.h>

 13: int main(int argc,char **args)
 14: {
 16:   PetscInt       nd = 2,ov=1,i,j,lsize,m,n,*idx;
 17:   PetscMPIInt    rank;
 18:   PetscBool      flg;
 19:   Mat            A,B,*submatA,*submatB;
 20:   char           file[PETSC_MAX_PATH_LEN];
 21:   PetscViewer    fd;
 22:   IS             *is1,*is2;
 23:   PetscRandom    r;
 24:   PetscScalar    rand;

 26:   PetscInitialize(&argc,&args,(char*)0,help);
 27: #if defined(PETSC_USE_COMPLEX)
 28:   SETERRQ(PETSC_COMM_WORLD,1,"This example does not work with complex numbers");
 29: #else

 31:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
 32:   PetscOptionsGetString(NULL,"-f",file,PETSC_MAX_PATH_LEN,NULL);
 33:   PetscOptionsGetInt(NULL,"-nd",&nd,NULL);
 34:   PetscOptionsGetInt(NULL,"-ov",&ov,NULL);

 36:   /* Read matrix and RHS */
 37:   PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd);
 38:   MatCreate(PETSC_COMM_WORLD,&A);
 39:   MatSetType(A,MATMPIAIJ);
 40:   MatLoad(A,fd);
 41:   PetscViewerDestroy(&fd);

 43:   /* Read the matrix again as a seq matrix */
 44:   PetscViewerBinaryOpen(PETSC_COMM_SELF,file,FILE_MODE_READ,&fd);
 45:   MatCreate(PETSC_COMM_SELF,&B);
 46:   MatSetType(B,MATSEQAIJ);
 47:   MatLoad(B,fd);
 48:   PetscViewerDestroy(&fd);

 50:   /* Create the Random no generator */
 51:   MatGetSize(A,&m,&n);
 52:   PetscRandomCreate(PETSC_COMM_SELF,&r);
 53:   PetscRandomSetFromOptions(r);

 55:   /* Create the IS corresponding to subdomains */
 56:   PetscMalloc(nd*sizeof(IS **),&is1);
 57:   PetscMalloc(nd*sizeof(IS **),&is2);
 58:   PetscMalloc(m *sizeof(PetscInt),&idx);

 60:   /* Create the random Index Sets */
 61:   for (i=0; i<nd; i++) {
 62:     /* Skip a few,so that the IS on different procs are diffeent*/
 63:     for (j=0; j<rank; j++) {
 64:       PetscRandomGetValue(r,&rand);
 65:     }
 66:     PetscRandomGetValue(r,&rand);
 67:     lsize = (PetscInt)(rand*m);
 68:     for (j=0; j<lsize; j++) {
 69:       PetscRandomGetValue(r,&rand);
 70:       idx[j] = (PetscInt)(rand*m);
 71:     }
 72:     PetscSortInt(lsize,idx);
 73:     ISCreateGeneral(PETSC_COMM_SELF,lsize,idx,PETSC_COPY_VALUES,is1+i);
 74:     ISCreateGeneral(PETSC_COMM_SELF,lsize,idx,PETSC_COPY_VALUES,is2+i);
 75:   }

 77:   MatIncreaseOverlap(A,nd,is1,ov);
 78:   MatIncreaseOverlap(B,nd,is2,ov);

 80:   for (i=0; i<nd; ++i) {
 81:     ISSort(is1[i]);
 82:     ISSort(is2[i]);
 83:   }

 85:   MatGetSubMatrices(A,nd,is1,is1,MAT_INITIAL_MATRIX,&submatA);
 86:   MatGetSubMatrices(B,nd,is2,is2,MAT_INITIAL_MATRIX,&submatB);

 88:   /* Now see if the serial and parallel case have the same answers */
 89:   for (i=0; i<nd; ++i) {
 90:     MatEqual(submatA[i],submatB[i],&flg);
 91:     PetscPrintf(PETSC_COMM_SELF,"proc:[%d], i=%D, flg =%d\n",rank,i,(int)flg);
 92:   }

 94:   /* Free Allocated Memory */
 95:   for (i=0; i<nd; ++i) {
 96:     ISDestroy(&is1[i]);
 97:     ISDestroy(&is2[i]);
 98:     MatDestroy(&submatA[i]);
 99:     MatDestroy(&submatB[i]);
100:   }
101:   PetscFree(submatA);
102:   PetscFree(submatB);
103:   PetscRandomDestroy(&r);
104:   PetscFree(is1);
105:   PetscFree(is2);
106:   MatDestroy(&A);
107:   MatDestroy(&B);
108:   PetscFree(idx);

110:   PetscFinalize();
111: #endif
112:   return 0;
113: }