Actual source code: ex211.c

petsc-3.10.5 2019-03-28
Report Typos and Errors

  2: static char help[] = "Tests MatCreateSubmatrix() in parallel.";

  4:  #include <petscmat.h>
  5:  #include <../src/mat/impls/aij/mpi/mpiaij.h>

  7: PetscErrorCode ISGetSeqIS_SameColDist_Private(Mat mat,IS isrow,IS iscol,IS *isrow_d,IS *iscol_d,IS *iscol_o,const PetscInt *garray[])
  8: {
 10:   Vec            x,cmap;
 11:   const PetscInt *is_idx;
 12:   PetscScalar    *xarray,*cmaparray;
 13:   PetscInt       ncols,isstart,*idx,m,rstart,count;
 14:   Mat_MPIAIJ     *a=(Mat_MPIAIJ*)mat->data;
 15:   Mat            B=a->B;
 16:   Vec            lvec=a->lvec,lcmap;
 17:   PetscInt       i,cstart,cend,Bn=B->cmap->N;
 18:   MPI_Comm       comm;
 19:   PetscMPIInt    rank;
 20:   VecScatter     Mvctx;

 23:   PetscObjectGetComm((PetscObject)mat,&comm);
 24:   MPI_Comm_rank(comm,&rank);
 25:   ISGetLocalSize(iscol,&ncols);

 27:   /* (1) iscol is a sub-column vector of mat, pad it with '-1.' to form a full vector x */
 28:   MatCreateVecs(mat,&x,NULL);
 29:   VecDuplicate(x,&cmap);
 30:   VecSet(x,-1.0);
 31:   VecSet(cmap,-1.0);

 33:   VecDuplicate(lvec,&lcmap);

 35:   /* Get start indices */
 36:   MPI_Scan(&ncols,&isstart,1,MPIU_INT,MPI_SUM,comm);
 37:   isstart -= ncols;
 38:   MatGetOwnershipRangeColumn(mat,&cstart,&cend);

 40:   ISGetIndices(iscol,&is_idx);
 41:   VecGetArray(x,&xarray);
 42:   VecGetArray(cmap,&cmaparray);
 43:   PetscMalloc1(ncols,&idx);
 44:   for (i=0; i<ncols; i++) {
 45:     xarray[is_idx[i]-cstart]    = (PetscScalar)is_idx[i];
 46:     cmaparray[is_idx[i]-cstart] = (PetscScalar)(i + isstart);      /* global index of iscol[i] */
 47:     idx[i]                      = is_idx[i]-cstart; /* local index of iscol[i]  */
 48:   }
 49:   VecRestoreArray(x,&xarray);
 50:   VecRestoreArray(cmap,&cmaparray);
 51:   ISRestoreIndices(iscol,&is_idx);

 53:   /* Get iscol_d */
 54:   ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,PETSC_OWN_POINTER,iscol_d);
 55:   ISGetBlockSize(iscol,&i);
 56:   ISSetBlockSize(*iscol_d,i);

 58:   /* Get isrow_d */
 59:   ISGetLocalSize(isrow,&m);
 60:   rstart = mat->rmap->rstart;
 61:   PetscMalloc1(m,&idx);
 62:   ISGetIndices(isrow,&is_idx);
 63:   for (i=0; i<m; i++) idx[i] = is_idx[i]-rstart;
 64:   ISRestoreIndices(isrow,&is_idx);

 66:   ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,isrow_d);
 67:   ISGetBlockSize(isrow,&i);
 68:   ISSetBlockSize(*isrow_d,i);

 70:   /* (2) Scatter x and cmap using aij->Mvctx to get their off-process portions (see MatMult_MPIAIJ) */
 71: #if 0
 72:   if (!a->Mvctx_mpi1) {
 73:     /* a->Mvctx causes random 'count' in o-build? See src/mat/examples/tests/runex59_2 */
 74:     a->Mvctx_mpi1_flg = PETSC_TRUE;
 75:     MatSetUpMultiply_MPIAIJ(mat);
 76:   }
 77:   Mvctx = a->Mvctx_mpi1;
 78: #endif
 79:   Mvctx = a->Mvctx;
 80:   VecScatterBegin(Mvctx,x,lvec,INSERT_VALUES,SCATTER_FORWARD);
 81:   VecScatterEnd(Mvctx,x,lvec,INSERT_VALUES,SCATTER_FORWARD);

 83:   VecScatterBegin(Mvctx,cmap,lcmap,INSERT_VALUES,SCATTER_FORWARD);
 84:   VecScatterEnd(Mvctx,cmap,lcmap,INSERT_VALUES,SCATTER_FORWARD);

 86:   /* (3) create sequential iscol_o (a subset of iscol) and isgarray */
 87:   /* off-process column indices */
 88:   count = 0;
 89:   PetscInt *cmap1;
 90:   PetscMalloc1(Bn,&idx);
 91:   PetscMalloc1(Bn,&cmap1);

 93:   VecGetArray(lvec,&xarray);
 94:   VecGetArray(lcmap,&cmaparray);
 95:   for (i=0; i<Bn; i++) {
 96:     if (PetscRealPart(xarray[i]) > -1.0) {
 97:       idx[count]   = i;                   /* local column index in off-diagonal part B */
 98:       cmap1[count] = (PetscInt)(PetscRealPart(cmaparray[i]));  /* column index in submat */
 99:       count++;
100:     }
101:   }
102:   printf("[%d] Bn %d, count %d\n",rank,Bn,count);
103:   VecRestoreArray(lvec,&xarray);
104:   VecRestoreArray(lcmap,&cmaparray);
105:   if (count != 6) {
106:     printf("[%d] count %d != 6 lvec:\n",rank,count);
107:     VecView(lvec,0);

109:     printf("[%d] count %d != 6 lcmap:\n",rank,count);
110:     VecView(lcmap,0);
111:     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"count %d != 6",count);
112:   }

114:   ISCreateGeneral(PETSC_COMM_SELF,count,idx,PETSC_COPY_VALUES,iscol_o);
115:   /* cannot ensure iscol_o has same blocksize as iscol! */

117:   PetscFree(idx);

119:   *garray = cmap1;

121:   VecDestroy(&x);
122:   VecDestroy(&cmap);
123:   VecDestroy(&lcmap);
124:   return(0);
125: }

127: int main(int argc,char **args)
128: {
129:   Mat            C,A;
130:   PetscInt       i,j,m = 3,n = 2,rstart,rend;
131:   PetscMPIInt    size,rank;
133:   PetscScalar    v;
134:   IS             isrow,iscol;

136:   PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
137:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
138:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
139:   n    = 2*size;

141:   MatCreate(PETSC_COMM_WORLD,&C);
142:   MatSetSizes(C,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
143:   MatSetFromOptions(C);
144:   MatSetUp(C);

146:   /*
147:         This is JUST to generate a nice test matrix, all processors fill up
148:     the entire matrix. This is not something one would ever do in practice.
149:   */
150:   MatGetOwnershipRange(C,&rstart,&rend);
151:   for (i=rstart; i<rend; i++) {
152:     for (j=0; j<m*n; j++) {
153:       v    = i + j + 1;
154:       MatSetValues(C,1,&i,1,&j,&v,INSERT_VALUES);
155:     }
156:   }

158:   MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);
159:   MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);

161:   /*
162:      Generate a new matrix consisting of every second row and column of
163:    the original matrix
164:   */
165:   MatGetOwnershipRange(C,&rstart,&rend);
166:   /* Create parallel IS with the rows we want on THIS processor */
167:   ISCreateStride(PETSC_COMM_WORLD,(rend-rstart)/2,rstart,2,&isrow);
168:   /* Create parallel IS with the rows we want on THIS processor (same as rows for now) */
169:   ISCreateStride(PETSC_COMM_WORLD,(rend-rstart)/2,rstart,2,&iscol);

171:   IS             iscol_d,isrow_d,iscol_o;
172:   const PetscInt *garray;
173:   ISGetSeqIS_SameColDist_Private(C,isrow,iscol,&isrow_d,&iscol_d,&iscol_o,&garray);

175:   ISDestroy(&isrow_d);
176:   ISDestroy(&iscol_d);
177:   ISDestroy(&iscol_o);
178:   PetscFree(garray);

180:   MatCreateSubMatrix(C,isrow,iscol,MAT_INITIAL_MATRIX,&A);
181:   MatCreateSubMatrix(C,isrow,iscol,MAT_REUSE_MATRIX,&A);

183:   ISDestroy(&isrow);
184:   ISDestroy(&iscol);
185:   MatDestroy(&A);
186:   MatDestroy(&C);
187:   PetscFinalize();
188:   return ierr;
189: }