Actual source code: ex40.c

petsc-3.14.6 2021-03-30
Report Typos and Errors

  2: static char help[] = "Tests the parallel case for MatIncreaseOverlap(). Input arguments are:\n\
  3:   -f <input_file> : file to load.  For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
  4:   -nd <size>      : > 0  number of domains per processor \n\
  5:   -ov <overlap>   : >=0  amount of overlap between domains\n\n";

  7: #include <petscmat.h>

  9: PetscErrorCode ISAllGatherDisjoint(IS iis, IS** ois)
 10: {
 11:   IS             *is2,is;
 12:   const PetscInt *idxs;
 13:   PetscInt       i, ls,*sizes;
 15:   PetscMPIInt    size;

 18:   MPI_Comm_size(PetscObjectComm((PetscObject)iis),&size);
 19:   PetscMalloc1(size,&is2);
 20:   PetscMalloc1(size,&sizes);
 21:   ISGetLocalSize(iis,&ls);
 22:   /* we don't have a public ISGetLayout */
 23:   MPI_Allgather(&ls,1,MPIU_INT,sizes,1,MPIU_INT,PetscObjectComm((PetscObject)iis));
 24:   ISAllGather(iis,&is);
 25:   ISGetIndices(is,&idxs);
 26:   for (i = 0, ls = 0; i < size; i++) {
 27:     ISCreateGeneral(PETSC_COMM_SELF,sizes[i],idxs+ls,PETSC_COPY_VALUES,&is2[i]);
 28:     ls += sizes[i];
 29:   }
 30:   ISRestoreIndices(is,&idxs);
 31:   ISDestroy(&is);
 32:   PetscFree(sizes);
 33:   *ois = is2;
 34:   return(0);
 35: }

 37: int main(int argc,char **args)
 38: {
 40:   PetscInt       nd = 2,ov = 1,ndpar,i,start,m,n,end,lsize;
 41:   PetscMPIInt    rank;
 42:   PetscBool      flg, useND = PETSC_FALSE;
 43:   Mat            A,B;
 44:   char           file[PETSC_MAX_PATH_LEN];
 45:   PetscViewer    fd;
 46:   IS             *is1,*is2;
 47:   PetscRandom    r;
 48:   PetscScalar    rand;

 50:   PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
 51:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
 52:   PetscOptionsGetString(NULL,NULL,"-f",file,sizeof(file),&flg);
 53:   if (!flg) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"Must use -f filename to indicate a file containing a PETSc binary matrix");
 54:   PetscOptionsGetInt(NULL,NULL,"-nd",&nd,NULL);
 55:   PetscOptionsGetInt(NULL,NULL,"-ov",&ov,NULL);
 56:   PetscOptionsGetBool(NULL,NULL,"-nested_dissection",&useND,NULL);

 58:   /* Read matrix */
 59:   PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd);
 60:   MatCreate(PETSC_COMM_WORLD,&A);
 61:   MatSetType(A,MATMPIAIJ);
 62:   MatLoad(A,fd);
 63:   MatSetFromOptions(A);
 64:   PetscViewerDestroy(&fd);

 66:   /* Read the matrix again as a sequential matrix */
 67:   PetscViewerBinaryOpen(PETSC_COMM_SELF,file,FILE_MODE_READ,&fd);
 68:   MatCreate(PETSC_COMM_SELF,&B);
 69:   MatSetType(B,MATSEQAIJ);
 70:   MatLoad(B,fd);
 71:   MatSetFromOptions(B);
 72:   PetscViewerDestroy(&fd);

 74:   /* Create the IS corresponding to subdomains */
 75:   if (useND) {
 76:     MatPartitioning part;
 77:     IS              ndmap;
 78:     PetscMPIInt     size;

 80:     ndpar = 1;
 81:     MPI_Comm_size(PETSC_COMM_WORLD,&size);
 82:     nd   = (PetscInt)size;
 83:     PetscMalloc1(ndpar,&is1);
 84:     MatPartitioningCreate(PETSC_COMM_WORLD,&part);
 85:     MatPartitioningSetAdjacency(part,A);
 86:     MatPartitioningSetFromOptions(part);
 87:     MatPartitioningApplyND(part,&ndmap);
 88:     MatPartitioningDestroy(&part);
 89:     ISBuildTwoSided(ndmap,NULL,&is1[0]);
 90:     ISDestroy(&ndmap);
 91:     ISAllGatherDisjoint(is1[0],&is2);
 92:   } else {
 93:     /* Create the random Index Sets */
 94:     PetscMalloc1(nd,&is1);
 95:     PetscMalloc1(nd,&is2);

 97:     MatGetSize(A,&m,&n);
 98:     PetscRandomCreate(PETSC_COMM_SELF,&r);
 99:     PetscRandomSetFromOptions(r);
100:     for (i=0; i<nd; i++) {
101:       PetscRandomGetValue(r,&rand);
102:       start = (PetscInt)(rand*m);
103:       PetscRandomGetValue(r,&rand);
104:       end   = (PetscInt)(rand*m);
105:       lsize =  end - start;
106:       if (start > end) { start = end; lsize = -lsize;}
107:       ISCreateStride(PETSC_COMM_SELF,lsize,start,1,is1+i);
108:       ISCreateStride(PETSC_COMM_SELF,lsize,start,1,is2+i);
109:     }
110:     ndpar = nd;
111:     PetscRandomDestroy(&r);
112:   }
113:   MatIncreaseOverlap(A,ndpar,is1,ov);
114:   MatIncreaseOverlap(B,nd,is2,ov);
115:   if (useND) {
116:     IS *is;

118:     ISAllGatherDisjoint(is1[0],&is);
119:     ISDestroy(&is1[0]);
120:     PetscFree(is1);
121:     is1 = is;
122:   }
123:   /* Now see if the serial and parallel case have the same answers */
124:   for (i=0; i<nd; ++i) {
125:     ISEqual(is1[i],is2[i],&flg);
126:     if (!flg) {
127:       ISViewFromOptions(is1[i],NULL,"-err_view");
128:       ISViewFromOptions(is2[i],NULL,"-err_view");
129:       SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"proc:[%d], i=%D, flg =%d\n",rank,i,(int)flg);
130:     }
131:   }

133:   /* Free allocated memory */
134:   for (i=0; i<nd; ++i) {
135:     ISDestroy(&is1[i]);
136:     ISDestroy(&is2[i]);
137:   }
138:   PetscFree(is1);
139:   PetscFree(is2);
140:   MatDestroy(&A);
141:   MatDestroy(&B);
142:   PetscFinalize();
143:   return ierr;
144: }



148: /*TEST

150:    build:
151:       requires: !complex

153:    testset:
154:       nsize: 5
155:       requires: datafilespath double !define(PETSC_USE_64BIT_INDICES) !complex
156:       args: -f ${DATAFILESPATH}/matrices/arco1 -viewer_binary_skip_info -ov 2
157:       output_file: output/ex40_1.out
158:       test:
159:         suffix: 1
160:         args: -nd 7
161:       test:
162:         requires: parmetis
163:         suffix: 1_nd
164:         args: -nested_dissection -mat_partitioning_type parmetis

166:    testset:
167:       nsize: 3
168:       requires: double !define(PETSC_USE_64BIT_INDICES) !complex
169:       args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/ns-real-int32-float64 -mat_increase_overlap_scalable 1 -ov 2
170:       output_file: output/ex40_1.out
171:       test:
172:         suffix: 2
173:         args: -nd 7
174:       test:
175:         requires: parmetis
176:         suffix: 2_nd
177:         args: -nested_dissection -mat_partitioning_type parmetis

179: TEST*/