Actual source code: ex40.c

petsc-3.7.3 2016-08-01
Report Typos and Errors
  2: static char help[] = "Tests the parallel case for MatIncreaseOverlap(). Input arguments are:\n\
  3:   -f <input_file> : file to load.  For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
  4:   -nd <size>      : > 0  number of domains per processor \n\
  5:   -ov <overlap>   : >=0  amount of overlap between domains\n\n";

  7: #include <petscmat.h>

 11: int main(int argc,char **args)
 12: {
 14:   PetscInt       nd = 2,ov=1,i,start,m,n,end,lsize;
 15:   PetscMPIInt    rank;
 16:   PetscBool      flg;
 17:   Mat            A,B;
 18:   char           file[PETSC_MAX_PATH_LEN];
 19:   PetscViewer    fd;
 20:   IS             *is1,*is2;
 21:   PetscRandom    r;
 22:   PetscScalar    rand;

 24:   PetscInitialize(&argc,&args,(char*)0,help);
 25: #if defined(PETSC_USE_COMPLEX)
 26:   SETERRQ(PETSC_COMM_WORLD,1,"This example does not work with complex numbers");
 27: #else

 29:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
 30:   PetscOptionsGetString(NULL,NULL,"-f",file,PETSC_MAX_PATH_LEN,&flg);
 31:   if (!flg) SETERRQ(PETSC_COMM_WORLD,1,"Must use -f filename to indicate a file containing a PETSc binary matrix");
 32:   PetscOptionsGetInt(NULL,NULL,"-nd",&nd,NULL);
 33:   PetscOptionsGetInt(NULL,NULL,"-ov",&ov,NULL);

 35:   /* Read matrix and RHS */
 36:   PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd);
 37:   MatCreate(PETSC_COMM_WORLD,&A);
 38:   MatSetType(A,MATMPIAIJ);
 39:   MatLoad(A,fd);
 40:   MatSetFromOptions(A);
 41:   PetscViewerDestroy(&fd);

 43:   /* Read the matrix again as a sequential matrix */
 44:   PetscViewerBinaryOpen(PETSC_COMM_SELF,file,FILE_MODE_READ,&fd);
 45:   MatCreate(PETSC_COMM_SELF,&B);
 46:   MatSetType(B,MATSEQAIJ);
 47:   MatLoad(B,fd);
 48:   MatSetFromOptions(B);
 49:   PetscViewerDestroy(&fd);

 51:   /* Create the IS corresponding to subdomains */
 52:   PetscMalloc1(nd,&is1);
 53:   PetscMalloc1(nd,&is2);

 55:   /* Create the random Index Sets */
 56:   MatGetSize(A,&m,&n);
 57:   PetscRandomCreate(PETSC_COMM_SELF,&r);
 58:   PetscRandomSetFromOptions(r);
 59:   for (i=0; i<nd; i++) {
 60:     PetscRandomGetValue(r,&rand);
 61:     start = (PetscInt)(rand*m);
 62:     PetscRandomGetValue(r,&rand);
 63:     end   = (PetscInt)(rand*m);
 64:     lsize =  end - start;
 65:     if (start > end) { start = end; lsize = -lsize;}
 66:     ISCreateStride(PETSC_COMM_SELF,lsize,start,1,is1+i);
 67:     ISCreateStride(PETSC_COMM_SELF,lsize,start,1,is2+i);
 68:   }
 69:   MatIncreaseOverlap(A,nd,is1,ov);
 70:   MatIncreaseOverlap(B,nd,is2,ov);
 71:   /* Now see if the serial and parallel case have the same answers */
 72:   for (i=0; i<nd; ++i) {
 73:     ISEqual(is1[i],is2[i],&flg);
 74:     PetscPrintf(PETSC_COMM_SELF,"proc:[%d], i=%D, flg =%d\n",rank,i,(int)flg);
 75:   }

 77:   /* Free allocated memory */
 78:   for (i=0; i<nd; ++i) {
 79:     ISDestroy(&is1[i]);
 80:     ISDestroy(&is2[i]);
 81:   }
 82:   PetscFree(is1);
 83:   PetscFree(is2);
 84:   PetscRandomDestroy(&r);
 85:   MatDestroy(&A);
 86:   MatDestroy(&B);

 88:   PetscFinalize();
 89: #endif
 90:   return 0;
 91: }