Actual source code: ex42.c

petsc-3.10.5 2019-03-28
Report Typos and Errors

  2: static char help[] = "Tests MatIncreaseOverlap() and MatCreateSubmatrices() for the parallel case.\n\
  3: This example is similar to ex40.c; here the index sets used are random.\n\
  4: Input arguments are:\n\
  5:   -f <input_file> : file to load.  For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
  6:   -nd <size>      : > 0  no of domains per processor \n\
  7:   -ov <overlap>   : >=0  amount of overlap between domains\n\n";

  9:  #include <petscmat.h>

 11: int main(int argc,char **args)
 12: {
 14:   PetscInt       nd = 2,ov=1,i,j,lsize,m,n,*idx,bs;
 15:   PetscMPIInt    rank, size;
 16:   PetscBool      flg;
 17:   Mat            A,B,*submatA,*submatB;
 18:   char           file[PETSC_MAX_PATH_LEN];
 19:   PetscViewer    fd;
 20:   IS             *is1,*is2;
 21:   PetscRandom    r;
 22:   PetscBool      test_unsorted = PETSC_FALSE;
 23:   PetscScalar    rand;

 25:   PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
 26: #if defined(PETSC_USE_COMPLEX)
 27:   SETERRQ(PETSC_COMM_WORLD,1,"This example does not work with complex numbers");
 28: #else

 30:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
 31:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
 32:   PetscOptionsGetString(NULL,NULL,"-f",file,PETSC_MAX_PATH_LEN,NULL);
 33:   PetscOptionsGetInt(NULL,NULL,"-nd",&nd,NULL);
 34:   PetscOptionsGetInt(NULL,NULL,"-ov",&ov,NULL);
 35:   PetscOptionsGetBool(NULL,NULL,"-test_unsorted",&test_unsorted,NULL);

 37:   /* Read matrix A and RHS */
 38:   PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd);
 39:   MatCreate(PETSC_COMM_WORLD,&A);
 40:   MatSetType(A,MATAIJ);
 41:   MatSetFromOptions(A);
 42:   MatLoad(A,fd);
 43:   PetscViewerDestroy(&fd);

 45:   /* Read the same matrix as a seq matrix B */
 46:   PetscViewerBinaryOpen(PETSC_COMM_SELF,file,FILE_MODE_READ,&fd);
 47:   MatCreate(PETSC_COMM_SELF,&B);
 48:   MatSetType(B,MATSEQAIJ);
 49:   MatSetFromOptions(B);
 50:   MatLoad(B,fd);
 51:   PetscViewerDestroy(&fd);

 53:   MatGetBlockSize(A,&bs);

 55:   /* Create the Random no generator */
 56:   MatGetSize(A,&m,&n);
 57:   PetscRandomCreate(PETSC_COMM_SELF,&r);
 58:   PetscRandomSetFromOptions(r);

 60:   /* Create the IS corresponding to subdomains */
 61:   PetscMalloc1(nd,&is1);
 62:   PetscMalloc1(nd,&is2);
 63:   PetscMalloc1(m ,&idx);
 64:   for (i = 0; i < m; i++) {idx[i] = i;}

 66:   /* Create the random Index Sets */
 67:   for (i=0; i<nd; i++) {
 68:     /* Skip a few,so that the IS on different procs are diffeent*/
 69:     for (j=0; j<rank; j++) {
 70:       PetscRandomGetValue(r,&rand);
 71:     }
 72:     PetscRandomGetValue(r,&rand);
 73:     lsize = (PetscInt)(rand*(m/bs));
 74:     /* shuffle */
 75:     for (j=0; j<lsize; j++) {
 76:       PetscInt k, swap, l;

 78:       PetscRandomGetValue(r,&rand);
 79:       k      = j + (PetscInt)(rand*((m/bs)-j));
 80:       for (l = 0; l < bs; l++) {
 81:         swap        = idx[bs*j+l];
 82:         idx[bs*j+l] = idx[bs*k+l];
 83:         idx[bs*k+l] = swap;
 84:       }
 85:     }
 86:     if (!test_unsorted) {PetscSortInt(lsize*bs,idx);}
 87:     ISCreateGeneral(PETSC_COMM_SELF,lsize*bs,idx,PETSC_COPY_VALUES,is1+i);
 88:     ISCreateGeneral(PETSC_COMM_SELF,lsize*bs,idx,PETSC_COPY_VALUES,is2+i);
 89:     ISSetBlockSize(is1[i],bs);
 90:     ISSetBlockSize(is2[i],bs);
 91:   }

 93:   if (!test_unsorted) {
 94:     MatIncreaseOverlap(A,nd,is1,ov);
 95:     MatIncreaseOverlap(B,nd,is2,ov);

 97:     for (i=0; i<nd; ++i) {
 98:       ISSort(is1[i]);
 99:       ISSort(is2[i]);
100:     }
101:   }

103:   MatCreateSubMatrices(A,nd,is1,is1,MAT_INITIAL_MATRIX,&submatA);
104:   MatCreateSubMatrices(B,nd,is2,is2,MAT_INITIAL_MATRIX,&submatB);

106:   /* Now see if the serial and parallel case have the same answers */
107:   for (i=0; i<nd; ++i) {
108:     MatEqual(submatA[i],submatB[i],&flg);
109:     if (!flg) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"%D-th paralle submatA != seq submatB",i);
110:   }

112:   /* Free Allocated Memory */
113:   for (i=0; i<nd; ++i) {
114:     ISDestroy(&is1[i]);
115:     ISDestroy(&is2[i]);
116:   }
117:   MatDestroySubMatrices(nd,&submatA);
118:   MatDestroySubMatrices(nd,&submatB);

120:   PetscRandomDestroy(&r);
121:   PetscFree(is1);
122:   PetscFree(is2);
123:   MatDestroy(&A);
124:   MatDestroy(&B);
125:   PetscFree(idx);

127: #endif
128:   PetscFinalize();
129:   return ierr;
130: }



134: /*TEST

136:    build:
137:       requires: !complex

139:    test:
140:       nsize: 3
141:       requires: datafilespath double !define(PETSC_USE_64BIT_INDICES) !complex
142:       args: -f ${DATAFILESPATH}/matrices/arco1 -nd 5 -ov 2

144:    test:
145:       suffix: 2
146:       args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -ov 2
147:       requires: datafilespath double !define(PETSC_USE_64BIT_INDICES) !complex

149:    test:
150:       suffix: unsorted_baij_mpi
151:       nsize: 3
152:       requires: datafilespath double !define(PETSC_USE_64BIT_INDICES) !complex
153:       args: -f ${DATAFILESPATH}/matrices/cfd.1.10 -nd 8 -mat_type baij -test_unsorted

155:    test:
156:       suffix: unsorted_baij_seq
157:       requires: datafilespath double !define(PETSC_USE_64BIT_INDICES) !complex
158:       args: -f ${DATAFILESPATH}/matrices/cfd.1.10 -nd 8 -mat_type baij -test_unsorted

160:    test:
161:       suffix: unsorted_mpi
162:       nsize: 3
163:       requires: datafilespath double !define(PETSC_USE_64BIT_INDICES) !complex
164:       args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -test_unsorted

166:    test:
167:       suffix: unsorted_seq
168:       requires: datafilespath double !define(PETSC_USE_64BIT_INDICES) !complex
169:       args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -test_unsorted

171: TEST*/