Actual source code: ex6.c

petsc-3.14.6 2021-03-30
Report Typos and Errors
  1: static char help[]= "  Test VecScatter with x, y on different communicators\n\n";

  3: #include <petscvec.h>

  5: int main(int argc,char **argv)
  6: {
  7:   PetscErrorCode     ierr;
  8:   PetscInt           i,n=5,rstart;
  9:   PetscScalar        *val;
 10:   const PetscScalar  *dat;
 11:   Vec                x,y1,y2;
 12:   MPI_Comm           newcomm;
 13:   PetscMPIInt        nproc,rank;
 14:   IS                 ix;
 15:   VecScatter         vscat1,vscat2;

 18:   PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
 19:   MPI_Comm_size(PETSC_COMM_WORLD,&nproc);
 20:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);

 22:   if (nproc != 2) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,"This test must run with exactly two MPI ranks\n");

 24:   /* Create MPI vectors x and y, which are on the same comm (i.e., MPI_IDENT) */
 25:   VecCreateMPI(PETSC_COMM_WORLD,n,PETSC_DECIDE,&x);
 26:   VecDuplicate(x,&y1);
 27:   VecGetOwnershipRange(x,&rstart,NULL);

 29:   /* Set x's value locally. x would be {0., 1., 2., ..., 9.} */
 30:   VecGetArray(x,&val);
 31:   for (i=0; i<n; i++) val[i] = rstart + i;
 32:   VecRestoreArray(x,&val);

 34:   /* Create index set ix = {0, 1, 2, ..., 9} */
 35:   ISCreateStride(PETSC_COMM_WORLD,n,rstart,1,&ix);

 37:   /* Create newcomm that reverses processes in x's comm, and then create y2 on it*/
 38:   MPI_Comm_split(PETSC_COMM_WORLD,0/*color*/,-rank/*key*/,&newcomm); /* use -rank as key to reverse processes in newcomm */
 39:   VecCreateMPI(newcomm,n,PETSC_DECIDE,&y2);

 41:   /* It looks vscat1/2 are the same, but actually not. y2 is on a different communicator than x */
 42:   VecScatterCreate(x,ix,y1,ix,&vscat1);
 43:   VecScatterCreate(x,ix,y2,ix,&vscat2);

 45:   VecScatterBegin(vscat1,x,y1,INSERT_VALUES,SCATTER_FORWARD);
 46:   VecScatterBegin(vscat2,x,y2,INSERT_VALUES,SCATTER_FORWARD);
 47:   VecScatterEnd  (vscat1,x,y1,INSERT_VALUES,SCATTER_FORWARD);
 48:   VecScatterEnd  (vscat2,x,y2,INSERT_VALUES,SCATTER_FORWARD);

 50:   /* View on rank 0 of x's comm, which is PETSC_COMM_WORLD */
 51:   if (!rank) {
 52:     /* Print the part of x on rank 0, which is 0 1 2 3 4 */
 53:     PetscPrintf(PETSC_COMM_SELF,"\nOn rank 0 of PETSC_COMM_WORLD, x  = ");
 54:     VecGetArrayRead(x,&dat);
 55:     for (i=0; i<n; i++) {PetscPrintf(PETSC_COMM_SELF," %.0f",PetscRealPart(dat[i]));}
 56:     VecRestoreArrayRead(x,&dat);

 58:     /* Print the part of y1 on rank 0, which is 0 1 2 3 4 */
 59:     PetscPrintf(PETSC_COMM_SELF,"\nOn rank 0 of PETSC_COMM_WORLD, y1 = ");
 60:     VecGetArrayRead(y1,&dat);
 61:     for (i=0; i<n; i++) {PetscPrintf(PETSC_COMM_SELF," %.0f",PetscRealPart(dat[i]));}
 62:     VecRestoreArrayRead(y1,&dat);

 64:     /* Print the part of y2 on rank 0, which is 5 6 7 8 9 since y2 swapped the processes of PETSC_COMM_WORLD */
 65:     PetscPrintf(PETSC_COMM_SELF,"\nOn rank 0 of PETSC_COMM_WORLD, y2 = ");
 66:     VecGetArrayRead(y2,&dat);
 67:     for (i=0; i<n; i++) {PetscPrintf(PETSC_COMM_SELF," %.0f",PetscRealPart(dat[i]));}
 68:     VecRestoreArrayRead(y2,&dat);
 69:     PetscPrintf(PETSC_COMM_SELF,"\n");
 70:   }

 72:   ISDestroy(&ix);
 73:   VecDestroy(&x);
 74:   VecDestroy(&y1);
 75:   VecDestroy(&y2);
 76:   VecScatterDestroy(&vscat1);
 77:   VecScatterDestroy(&vscat2);
 78:   MPI_Comm_free(&newcomm);
 79:   PetscFinalize();
 80:   return ierr;
 81: }

 83: /*TEST

 85:    test:
 86:       nsize: 2
 87:       requires: double
 88: TEST*/