Actual source code: ex1.c

petsc-3.13.6 2020-09-29
Report Typos and Errors

  2: static char help[]= "Scatters between parallel vectors of type VECNODE. \n\
  3: uses block index sets\n\n";

  5:  #include <petscvec.h>

  7: int main(int argc,char **argv)
  8: {
 10:   PetscInt       bs=1,n=5,N,i,low;
 11:   PetscInt       ix0[3] = {5,7,9},iy0[3] = {1,2,4},ix1[3] = {2,3,1},iy1[3] = {0,3,9};
 12:   PetscMPIInt    size,rank;
 13:   PetscScalar    *array;
 14:   Vec            x,x1,y;
 15:   IS             isx,isy;
 16:   VecScatter     ctx;
 17:   VecScatterType type;
 18:   PetscBool      flg;

 20:   PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
 21:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
 22:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);

 24:   if (size <2) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,"Must run more than one processor");

 26:   PetscOptionsGetInt(NULL,NULL,"-bs",&bs,NULL);
 27:   n    = bs*n;

 29:   /* Create vector x over shared memory */
 30:   VecCreate(PETSC_COMM_WORLD,&x);
 31:   VecSetSizes(x,n,PETSC_DECIDE);
 32:   VecSetType(x,VECNODE);
 33:   VecSetFromOptions(x);

 35:   VecGetOwnershipRange(x,&low,NULL);
 36:   VecGetArray(x,&array);
 37:   for (i=0; i<n; i++) {
 38:     array[i] = (PetscScalar)(i + low);
 39:   }
 40:   VecRestoreArray(x,&array);
 41:   /* VecView(x,PETSC_VIEWER_STDOUT_WORLD); */

 43:   /* Test some vector functions */
 44:   VecAssemblyBegin(x);
 45:   VecAssemblyEnd(x);

 47:   VecGetSize(x,&N);
 48:   VecGetLocalSize(x,&n);

 50:   VecDuplicate(x,&x1);
 51:   VecCopy(x,x1);
 52:   VecEqual(x,x1,&flg);
 53:   if (!flg) SETERRQ(PetscObjectComm((PetscObject)x),PETSC_ERR_ARG_WRONG,"x1 != x");

 55:   VecScale(x1,2.0);
 56:   VecSet(x1,10.0);
 57:   /* VecView(x1,PETSC_VIEWER_STDOUT_WORLD); */

 59:   /* Create vector y over shared memory */
 60:   VecCreate(PETSC_COMM_WORLD,&y);
 61:   VecSetSizes(y,n,PETSC_DECIDE);
 62:   VecSetType(y,VECNODE);
 63:   VecSetFromOptions(y);
 64:   VecGetArray(y,&array);
 65:   for (i=0; i<n; i++) {
 66:     array[i] = -(PetscScalar) (i + 100*rank);
 67:   }
 68:   VecRestoreArray(y,&array);
 69:   VecAssemblyBegin(y);
 70:   VecAssemblyEnd(y);
 71:   /* VecView(y,PETSC_VIEWER_STDOUT_WORLD); */

 73:   /* Create two index sets */
 74:   if (!rank) {
 75:     ISCreateBlock(PETSC_COMM_SELF,bs,3,ix0,PETSC_COPY_VALUES,&isx);
 76:     ISCreateBlock(PETSC_COMM_SELF,bs,3,iy0,PETSC_COPY_VALUES,&isy);
 77:   } else {
 78:     ISCreateBlock(PETSC_COMM_SELF,bs,3,ix1,PETSC_COPY_VALUES,&isx);
 79:     ISCreateBlock(PETSC_COMM_SELF,bs,3,iy1,PETSC_COPY_VALUES,&isy);
 80:   }

 82:   if (rank == 10) {
 83:     PetscPrintf(PETSC_COMM_SELF,"\n[%d] isx:\n",rank);
 84:     ISView(isx,PETSC_VIEWER_STDOUT_SELF);
 85:     PetscPrintf(PETSC_COMM_SELF,"\n[%d] isy:\n",rank);
 86:     ISView(isy,PETSC_VIEWER_STDOUT_SELF);
 87:   }

 89:   /* Create Vector scatter */
 90:   VecScatterCreate(x,isx,y,isy,&ctx);
 91:   VecScatterSetFromOptions(ctx);
 92:   VecScatterGetType(ctx,&type);
 93:   PetscPrintf(PETSC_COMM_WORLD,"scatter type %s\n",type);

 95:   /* Test forward vecscatter */
 96:   VecSet(y,0.0);
 97:   VecScatterBegin(ctx,x,y,ADD_VALUES,SCATTER_FORWARD);
 98:   VecScatterEnd(ctx,x,y,ADD_VALUES,SCATTER_FORWARD);
 99:   PetscPrintf(PETSC_COMM_WORLD,"\nSCATTER_FORWARD y:\n");
100:   VecView(y,PETSC_VIEWER_STDOUT_WORLD);

102:   /* Test reverse vecscatter */
103:   VecSet(x,0.0);
104:   VecSet(y,0.0);
105:   VecGetOwnershipRange(y,&low,NULL);
106:   VecGetArray(y,&array);
107:   for (i=0; i<n; i++) {
108:     array[i] = (PetscScalar)(i+ low);
109:   }
110:   VecRestoreArray(y,&array);
111:   VecScatterBegin(ctx,y,x,ADD_VALUES,SCATTER_REVERSE);
112:   VecScatterEnd(ctx,y,x,ADD_VALUES,SCATTER_REVERSE);
113:   PetscPrintf(PETSC_COMM_WORLD,"\nSCATTER_REVERSE x:\n");
114:   VecView(x,PETSC_VIEWER_STDOUT_WORLD);

116:   /* Free objects */
117:   VecScatterDestroy(&ctx);
118:   ISDestroy(&isx);
119:   ISDestroy(&isy);
120:   VecDestroy(&x);
121:   VecDestroy(&x1);
122:   VecDestroy(&y);
123:   PetscFinalize();
124:   return ierr;
125: }

127: /*TEST

129:    test:
130:       nsize: 2
131:       args: -vecscatter_type mpi3node
132:       output_file: output/ex1_1.out
133:       requires:  define(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY)

135:    test:
136:       suffix: 2
137:       nsize: 4
138:       args: -vecscatter_type mpi3node
139:       output_file: output/ex1_2.out
140:       requires:  define(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY)

142:    test:
143:       suffix: 3
144:       nsize: 2
145:       args: -bs 2 -vecscatter_type mpi3node
146:       output_file: output/ex1_3.out
147:       requires:  define(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY)

149: TEST*/