Actual source code: ex41.c

petsc-3.13.6 2020-09-29
Report Typos and Errors

  2: static char help[] = "Tests MatIncreaseOverlap() - the parallel case. This example\n\
  3: is similar to ex40.c; here the index sets used are random. Input arguments are:\n\
  4:   -f <input_file> : file to load.  For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
  5:   -nd <size>      : > 0  no of domains per processor \n\
  6:   -ov <overlap>   : >=0  amount of overlap between domains\n\n";

  8:  #include <petscmat.h>

 10: int main(int argc,char **args)
 11: {
 12:   PetscInt       nd = 2,ov=1,i,j,m,n,*idx,lsize;
 14:   PetscMPIInt    rank;
 15:   PetscBool      flg;
 16:   Mat            A,B;
 17:   char           file[PETSC_MAX_PATH_LEN];
 18:   PetscViewer    fd;
 19:   IS             *is1,*is2;
 20:   PetscRandom    r;
 21:   PetscScalar    rand;

 23:   PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
 24:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
 25:   PetscOptionsGetString(NULL,NULL,"-f",file,PETSC_MAX_PATH_LEN,NULL);
 26:   PetscOptionsGetInt(NULL,NULL,"-nd",&nd,NULL);
 27:   PetscOptionsGetInt(NULL,NULL,"-ov",&ov,NULL);

 29:   /* Read matrix and RHS */
 30:   PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd);
 31:   MatCreate(PETSC_COMM_WORLD,&A);
 32:   MatSetType(A,MATMPIAIJ);
 33:   MatLoad(A,fd);
 34:   PetscViewerDestroy(&fd);

 36:   /* Read the matrix again as a seq matrix */
 37:   PetscViewerBinaryOpen(PETSC_COMM_SELF,file,FILE_MODE_READ,&fd);
 38:   MatCreate(PETSC_COMM_SELF,&B);
 39:   MatSetType(B,MATSEQAIJ);
 40:   MatLoad(B,fd);
 41:   PetscViewerDestroy(&fd);

 43:   /* Create the Random no generator */
 44:   MatGetSize(A,&m,&n);
 45:   PetscRandomCreate(PETSC_COMM_SELF,&r);
 46:   PetscRandomSetFromOptions(r);

 48:   /* Create the IS corresponding to subdomains */
 49:   PetscMalloc1(nd,&is1);
 50:   PetscMalloc1(nd,&is2);
 51:   PetscMalloc1(m ,&idx);

 53:   /* Create the random Index Sets */
 54:   for (i=0; i<nd; i++) {
 55:     for (j=0; j<rank; j++) {
 56:       PetscRandomGetValue(r,&rand);
 57:     }
 58:     PetscRandomGetValue(r,&rand);
 59:     lsize = (PetscInt)(rand*m);
 60:     for (j=0; j<lsize; j++) {
 61:       PetscRandomGetValue(r,&rand);
 62:       idx[j] = (PetscInt)(rand*m);
 63:     }
 64:     ISCreateGeneral(PETSC_COMM_SELF,lsize,idx,PETSC_COPY_VALUES,is1+i);
 65:     ISCreateGeneral(PETSC_COMM_SELF,lsize,idx,PETSC_COPY_VALUES,is2+i);
 66:   }

 68:   MatIncreaseOverlap(A,nd,is1,ov);
 69:   MatIncreaseOverlap(B,nd,is2,ov);

 71:   /* Now see if the serial and parallel case have the same answers */
 72:   for (i=0; i<nd; ++i) {
 73:     PetscInt sz1,sz2;
 74:     ISEqual(is1[i],is2[i],&flg);
 75:     ISGetSize(is1[i],&sz1);
 76:     ISGetSize(is2[i],&sz2);
 77:     if (!flg) SETERRQ5(PETSC_COMM_SELF,PETSC_ERR_PLIB,"proc:[%d], i=%D, flg =%d  sz1 = %D sz2 = %D\n",rank,i,(int)flg,sz1,sz2);
 78:   }

 80:   /* Free Allocated Memory */
 81:   for (i=0; i<nd; ++i) {
 82:     ISDestroy(&is1[i]);
 83:     ISDestroy(&is2[i]);
 84:   }
 85:   PetscRandomDestroy(&r);
 86:   PetscFree(is1);
 87:   PetscFree(is2);
 88:   MatDestroy(&A);
 89:   MatDestroy(&B);
 90:   PetscFree(idx);
 91:   PetscFinalize();
 92:   return ierr;
 93: }



 97: /*TEST

 99:    build:
100:       requires: !complex

102:    test:
103:       nsize: 3
104:       requires: datafilespath double !define(PETSC_USE_64BIT_INDICES) !complex
105:       args: -f ${DATAFILESPATH}/matrices/arco1 -nd 3 -ov 1


108: TEST*/