Actual source code: ex4.c

petsc-3.13.6 2020-09-29
Report Typos and Errors

  2: static char help[]= "Test event log of VecScatter with various block sizes\n\n";

  4:  #include <petscvec.h>

  6: int main(int argc,char **argv)
  7: {
  8:   PetscErrorCode     ierr;
  9:   PetscInt           i,j,low,high,n=256,N,errors,tot_errors,tot_msg,tot_len,avg_len;
 10:   PetscInt           bs=1,ix[2],iy[2];
 11:   PetscMPIInt        nproc,rank;
 12:   PetscScalar       *xval;
 13:   const PetscScalar *yval;
 14:   Vec                x,y;
 15:   IS                 isx,isy;
 16:   VecScatter         ctx;
 17:   const PetscInt     niter = 10;
 18:   PetscLogStage      stage1,stage2;
 19:   PetscLogEvent      event1,event2;
 20:   PetscLogDouble     numMessages,messageLength;
 21:   PetscEventPerfInfo eventInfo;

 24:   PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
 25:   PetscLogDefaultBegin();
 26:   MPI_Comm_size(PETSC_COMM_WORLD,&nproc);
 27:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);

 29:   PetscLogStageRegister("Scatter(bs=1)", &stage1);
 30:   PetscLogEventRegister("VecScatter(bs=1)", PETSC_OBJECT_CLASSID, &event1);
 31:   PetscLogStageRegister("Scatter(bs=4)", &stage2);
 32:   PetscLogEventRegister("VecScatter(bs=4)", PETSC_OBJECT_CLASSID, &event2);

 34:   /* Create a parallel vector x and a sequential vector y */
 35:   VecCreate(PETSC_COMM_WORLD,&x);
 36:   VecSetSizes(x,n,PETSC_DECIDE);
 37:   VecSetFromOptions(x);
 38:   VecGetOwnershipRange(x,&low,&high);
 39:   VecGetSize(x,&N);
 40:   VecCreateSeq(PETSC_COMM_SELF,n,&y);

 42:   /*=======================================
 43:      test VecScatter with bs = 1
 44:     ======================================*/

 46:   /* the code works as if we are going to do 3-point stencil computations on a 1D domain x,
 47:      which has periodic boundary conditions but the two ghosts are scatterred to beginning of y.
 48:    */
 49:   bs    = 1;
 50:   ix[0] = rank? low-1 : N-1; /* ix[] contains global indices of the two ghost points */
 51:   ix[1] = (rank != nproc-1)? high : 0;
 52:   iy[0] = 0;
 53:   iy[1] = 1;

 55:   ISCreateGeneral(PETSC_COMM_SELF,2,ix,PETSC_COPY_VALUES,&isx);
 56:   ISCreateGeneral(PETSC_COMM_SELF,2,iy,PETSC_COPY_VALUES,&isy);
 57:   VecScatterCreate(x,isx,y,isy,&ctx);

 59:   PetscLogStagePush(stage1);
 60:   PetscLogEventBegin(event1,0,0,0,0);
 61:   errors = 0;
 62:   for (i=0; i<niter; i++) {
 63:     /* set x = 0+i, 1+i, 2+i, ..., N-1+i */
 64:     VecGetArray(x,&xval);
 65:     for (j=0; j<n; j++) xval[j] = (PetscScalar)(low+j+i);
 66:     VecRestoreArray(x,&xval);
 67:     /* scatter the ghosts to y */
 68:     VecScatterBegin(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD);
 69:     VecScatterEnd(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD);
 70:     /* check if y has correct values */
 71:     VecGetArrayRead(y,&yval);
 72:     if ((PetscInt)PetscRealPart(yval[0]) != ix[0]+i) errors++;
 73:     if ((PetscInt)PetscRealPart(yval[1]) != ix[1]+i) errors++;
 74:     VecRestoreArrayRead(y,&yval);
 75:   }
 76:   PetscLogEventEnd(event1,0,0,0,0);
 77:   PetscLogStagePop();

 79:   /* check if we found wrong values on any processors */
 80:   MPI_Allreduce(&errors,&tot_errors,1,MPIU_INT,MPI_SUM,PETSC_COMM_WORLD);
 81:   if (tot_errors) { PetscPrintf(PETSC_COMM_WORLD,"Error: wrong values were scatterred in vecscatter with bs = %d\n",bs); }

 83:   /* print out event log of VecScatter(bs=1) */
 84:   PetscLogEventGetPerfInfo(stage1,event1,&eventInfo);
 85:   MPI_Allreduce(&eventInfo.numMessages,  &numMessages,  1,MPIU_PETSCLOGDOUBLE,MPI_SUM,PETSC_COMM_WORLD);
 86:   MPI_Allreduce(&eventInfo.messageLength,&messageLength,1,MPIU_PETSCLOGDOUBLE,MPI_SUM,PETSC_COMM_WORLD);
 87:   tot_msg = (PetscInt)numMessages*0.5; /* two MPI calls (Send & Recv) per message */
 88:   tot_len = (PetscInt)messageLength*0.5;
 89:   avg_len = tot_msg? (PetscInt)(messageLength/numMessages) : 0;
 90:   /* when nproc > 2, tot_msg = 2*nproc*niter, tot_len = tot_msg*sizeof(PetscScalar)*bs */
 91:   PetscPrintf(PETSC_COMM_WORLD,"VecScatter(bs=%d) has sent out %d messages, total %d bytes, with average length %d bytes\n",bs,tot_msg,tot_len,avg_len);

 93:   ISDestroy(&isx);
 94:   ISDestroy(&isy);
 95:   VecScatterDestroy(&ctx);

 97:   /*=======================================
 98:      test VecScatter with bs = 4
 99:     ======================================*/

101:   /* similar to the 3-point stencil above, except that this time a ghost is a block */
102:   bs    = 4; /* n needs to be a multiple of bs to make the following code work */
103:   ix[0] = rank? low/bs-1 : N/bs-1; /* ix[] contains global indices of the two ghost blocks */
104:   ix[1] = (rank != nproc-1)? high/bs : 0;
105:   iy[0] = 0;
106:   iy[1] = 1;

108:   ISCreateBlock(PETSC_COMM_SELF,bs,2,ix,PETSC_COPY_VALUES,&isx);
109:   ISCreateBlock(PETSC_COMM_SELF,bs,2,iy,PETSC_COPY_VALUES,&isy);

111:   VecScatterCreate(x,isx,y,isy,&ctx);

113:   PetscLogStagePush(stage2);
114:   PetscLogEventBegin(event2,0,0,0,0);
115:   errors = 0;
116:   for (i=0; i<niter; i++) {
117:     /* set x = 0+i, 1+i, 2+i, ..., N-1+i */
118:     VecGetArray(x,&xval);
119:     for (j=0; j<n; j++) xval[j] = (PetscScalar)(low+j+i);
120:     VecRestoreArray(x,&xval);
121:     /* scatter the ghost blocks to y */
122:     VecScatterBegin(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD);
123:     VecScatterEnd(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD);
124:     /* check if y has correct values */
125:     VecGetArrayRead(y,&yval);
126:     if ((PetscInt)PetscRealPart(yval[0])  != ix[0]*bs+i) errors++;
127:     if ((PetscInt)PetscRealPart(yval[bs]) != ix[1]*bs+i) errors++;
128:     VecRestoreArrayRead(y,&yval);
129:   }
130:   PetscLogEventEnd(event2,0,0,0,0);
131:   PetscLogStagePop();

133:   /* check if we found wrong values on any processors */
134:   MPI_Allreduce(&errors,&tot_errors,1,MPIU_INT,MPI_SUM,PETSC_COMM_WORLD);
135:   if (tot_errors) { PetscPrintf(PETSC_COMM_WORLD,"Error: wrong values were scatterred in vecscatter with bs = %d\n",bs); }

137:   /* print out event log of VecScatter(bs=4) */
138:   PetscLogEventGetPerfInfo(stage2,event2,&eventInfo);
139:   MPI_Allreduce(&eventInfo.numMessages,  &numMessages,  1,MPIU_PETSCLOGDOUBLE,MPI_SUM,PETSC_COMM_WORLD);
140:   MPI_Allreduce(&eventInfo.messageLength,&messageLength,1,MPIU_PETSCLOGDOUBLE,MPI_SUM,PETSC_COMM_WORLD);
141:   tot_msg = (PetscInt)numMessages*0.5; /* two MPI calls (Send & Recv) per message */
142:   tot_len = (PetscInt)messageLength*0.5;
143:   avg_len = tot_msg? (PetscInt)(messageLength/numMessages) : 0;
144:   /* when nproc > 2, tot_msg = 2*nproc*niter, tot_len = tot_msg*sizeof(PetscScalar)*bs */
145:   PetscPrintf(PETSC_COMM_WORLD,"VecScatter(bs=%d) has sent out %d messages, total %d bytes, with average length %d bytes\n",bs,tot_msg,tot_len,avg_len);

147:   PetscPrintf(PETSC_COMM_WORLD,"Program finished\n");
148:   ISDestroy(&isx);
149:   ISDestroy(&isy);
150:   VecScatterDestroy(&ctx);
151:   VecDestroy(&x);
152:   VecDestroy(&y);
153:   PetscFinalize();
154:   return ierr;
155: }

157: /*TEST

159:    test:
160:       nsize: 4
161:       args:
162:       requires: double define(PETSC_USE_LOG)

164:    test:
165:       suffix: 2
166:       nsize: 4
167:       args: -vecscatter_type mpi3
168:       # have this filter since some messages might go through shared memory and PETSc have not yet
169:       # implemented message logging for them. Add this test to just test mpi3 VecScatter type works.
170:       filter: grep -v "VecScatter(bs="
171:       requires: double define(PETSC_USE_LOG) define(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY)
172: TEST*/