Actual source code: ex7.c

petsc-3.11.4 2019-09-28
Report Typos and Errors
  1: static char help[]= "Test vecscatter of different block sizes across processes\n\n";

  3:  #include <petscvec.h>
  4: int main(int argc,char **argv)
  5: {
  6:   PetscErrorCode     ierr;
  7:   PetscInt           i,bs,n,low,high;
  8:   PetscMPIInt        nproc,rank;
  9:   Vec                x,y,z;
 10:   IS                 ix,iy;
 11:   VecScatter         vscat;
 12:   const PetscScalar  *yv;

 14:   PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
 15:   MPI_Comm_size(PETSC_COMM_WORLD,&nproc);
 16:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);

 18:   if (nproc != 2) SETERRQ(PETSC_COMM_WORLD,1,"This test can only run on two MPI ranks");

 20:   /* Create an MPI vector x of size 12 on two processes, and set x = {0, 1, 2, .., 11} */
 21:   VecCreateMPI(PETSC_COMM_WORLD,6,PETSC_DECIDE,&x);
 22:   VecGetOwnershipRange(x,&low,&high);
 23:   for (i=low; i<high; i++) {VecSetValue(x,i,(PetscScalar)i,INSERT_VALUES);}
 24:   VecAssemblyBegin(x);
 25:   VecAssemblyEnd(x);

 27:   /* Create a seq vector y, and a parallel to sequential (PtoS) vecscatter to scatter x to y */
 28:   if (!rank) {
 29:     /* On rank 0, seq y is of size 6. We will scatter x[0,1,2,6,7,8] to y[0,1,2,3,4,5] using IS with bs=3 */
 30:     PetscInt idx[2]={0,2};
 31:     PetscInt idy[2]={0,1};
 32:     n    = 6;
 33:     bs   = 3;
 34:     VecCreateSeq(PETSC_COMM_SELF,n,&y);
 35:     ISCreateBlock(PETSC_COMM_SELF,bs,2,idx,PETSC_COPY_VALUES,&ix);
 36:     ISCreateBlock(PETSC_COMM_SELF,bs,2,idy,PETSC_COPY_VALUES,&iy);
 37:   } else {
 38:     /* On rank 1, seq y is of size 4. We will scatter x[4,5,10,11] to y[0,1,2,3] using IS with bs=2 */
 39:     PetscInt idx[2]= {2,5};
 40:     PetscInt idy[2]= {0,1};
 41:     n    = 4;
 42:     bs   = 2;
 43:     VecCreateSeq(PETSC_COMM_SELF,n,&y);
 44:     ISCreateBlock(PETSC_COMM_SELF,bs,2,idx,PETSC_COPY_VALUES,&ix);
 45:     ISCreateBlock(PETSC_COMM_SELF,bs,2,idy,PETSC_COPY_VALUES,&iy);
 46:   }
 47:   VecScatterCreate(x,ix,y,iy,&vscat);

 49:   /* Do the vecscatter */
 50:   VecScatterBegin(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD);
 51:   VecScatterEnd(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD);

 53:   /* Print y. Since y is sequential, we put y in a parallel z to print its value on both ranks */
 54:   VecGetArrayRead(y,&yv);
 55:   VecCreateMPIWithArray(PETSC_COMM_WORLD,1,n,PETSC_DECIDE,yv,&z);
 56:   VecView(z,PETSC_VIEWER_STDOUT_WORLD);
 57:   VecRestoreArrayRead(y,&yv);

 59:   ISDestroy(&ix);
 60:   ISDestroy(&iy);
 61:   VecDestroy(&x);
 62:   VecDestroy(&y);
 63:   VecDestroy(&z);
 64:   VecScatterDestroy(&vscat);

 66:   PetscFinalize();
 67:   return ierr;
 68: }

 70: /*TEST

 72:    test:
 73:       nsize: 2
 74:       args:
 75:       requires:
 76: TEST*/