Actual source code: ex4.c

petsc-3.14.6 2021-03-30
Report Typos and Errors

  2: static char help[]= "Test event log of VecScatter with various block sizes\n\n";

  4: #include <petscvec.h>

  6: int main(int argc,char **argv)
  7: {
  8:   PetscErrorCode     ierr;
  9:   PetscInt           i,j,low,high,n=256,N,errors,tot_errors;
 10:   PetscInt           bs=1,ix[2],iy[2];
 11:   PetscMPIInt        nproc,rank;
 12:   PetscScalar       *xval;
 13:   const PetscScalar *yval;
 14:   Vec                x,y;
 15:   IS                 isx,isy;
 16:   VecScatter         ctx;
 17:   const PetscInt     niter = 10;
 18: #if defined(PETSC_USE_LOG)
 19:   PetscLogStage      stage1,stage2;
 20:   PetscLogEvent      event1,event2;
 21:   PetscLogDouble     numMessages,messageLength;
 22:   PetscEventPerfInfo eventInfo;
 23:   PetscInt           tot_msg,tot_len,avg_len;
 24: #endif

 27:   PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
 28:   PetscLogDefaultBegin();
 29:   MPI_Comm_size(PETSC_COMM_WORLD,&nproc);
 30:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);

 32:   PetscLogStageRegister("Scatter(bs=1)", &stage1);
 33:   PetscLogEventRegister("VecScatter(bs=1)", PETSC_OBJECT_CLASSID, &event1);
 34:   PetscLogStageRegister("Scatter(bs=4)", &stage2);
 35:   PetscLogEventRegister("VecScatter(bs=4)", PETSC_OBJECT_CLASSID, &event2);

 37:   /* Create a parallel vector x and a sequential vector y */
 38:   VecCreate(PETSC_COMM_WORLD,&x);
 39:   VecSetSizes(x,n,PETSC_DECIDE);
 40:   VecSetFromOptions(x);
 41:   VecGetOwnershipRange(x,&low,&high);
 42:   VecGetSize(x,&N);
 43:   VecCreateSeq(PETSC_COMM_SELF,n,&y);

 45:   /*=======================================
 46:      test VecScatter with bs = 1
 47:     ======================================*/

 49:   /* the code works as if we are going to do 3-point stencil computations on a 1D domain x,
 50:      which has periodic boundary conditions but the two ghosts are scatterred to beginning of y.
 51:    */
 52:   bs    = 1;
 53:   ix[0] = rank? low-1 : N-1; /* ix[] contains global indices of the two ghost points */
 54:   ix[1] = (rank != nproc-1)? high : 0;
 55:   iy[0] = 0;
 56:   iy[1] = 1;

 58:   ISCreateGeneral(PETSC_COMM_SELF,2,ix,PETSC_COPY_VALUES,&isx);
 59:   ISCreateGeneral(PETSC_COMM_SELF,2,iy,PETSC_COPY_VALUES,&isy);
 60:   VecScatterCreate(x,isx,y,isy,&ctx);

 62:   PetscLogStagePush(stage1);
 63:   PetscLogEventBegin(event1,0,0,0,0);
 64:   errors = 0;
 65:   for (i=0; i<niter; i++) {
 66:     /* set x = 0+i, 1+i, 2+i, ..., N-1+i */
 67:     VecGetArray(x,&xval);
 68:     for (j=0; j<n; j++) xval[j] = (PetscScalar)(low+j+i);
 69:     VecRestoreArray(x,&xval);
 70:     /* scatter the ghosts to y */
 71:     VecScatterBegin(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD);
 72:     VecScatterEnd(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD);
 73:     /* check if y has correct values */
 74:     VecGetArrayRead(y,&yval);
 75:     if ((PetscInt)PetscRealPart(yval[0]) != ix[0]+i) errors++;
 76:     if ((PetscInt)PetscRealPart(yval[1]) != ix[1]+i) errors++;
 77:     VecRestoreArrayRead(y,&yval);
 78:   }
 79:   PetscLogEventEnd(event1,0,0,0,0);
 80:   PetscLogStagePop();

 82:   /* check if we found wrong values on any processors */
 83:   MPI_Allreduce(&errors,&tot_errors,1,MPIU_INT,MPI_SUM,PETSC_COMM_WORLD);
 84:   if (tot_errors) { PetscPrintf(PETSC_COMM_WORLD,"Error: wrong values were scatterred in vecscatter with bs = %d\n",bs); }

 86:   /* print out event log of VecScatter(bs=1) */
 87: #if defined(PETSC_USE_LOG)
 88:   PetscLogEventGetPerfInfo(stage1,event1,&eventInfo);
 89:   MPI_Allreduce(&eventInfo.numMessages,  &numMessages,  1,MPIU_PETSCLOGDOUBLE,MPI_SUM,PETSC_COMM_WORLD);
 90:   MPI_Allreduce(&eventInfo.messageLength,&messageLength,1,MPIU_PETSCLOGDOUBLE,MPI_SUM,PETSC_COMM_WORLD);
 91:   tot_msg = (PetscInt)numMessages*0.5; /* two MPI calls (Send & Recv) per message */
 92:   tot_len = (PetscInt)messageLength*0.5;
 93:   avg_len = tot_msg? (PetscInt)(messageLength/numMessages) : 0;
 94:   /* when nproc > 2, tot_msg = 2*nproc*niter, tot_len = tot_msg*sizeof(PetscScalar)*bs */
 95:   PetscPrintf(PETSC_COMM_WORLD,"VecScatter(bs=%d) has sent out %d messages, total %d bytes, with average length %d bytes\n",bs,tot_msg,tot_len,avg_len);
 96: #endif

 98:   ISDestroy(&isx);
 99:   ISDestroy(&isy);
100:   VecScatterDestroy(&ctx);

102:   /*=======================================
103:      test VecScatter with bs = 4
104:     ======================================*/

106:   /* similar to the 3-point stencil above, except that this time a ghost is a block */
107:   bs    = 4; /* n needs to be a multiple of bs to make the following code work */
108:   ix[0] = rank? low/bs-1 : N/bs-1; /* ix[] contains global indices of the two ghost blocks */
109:   ix[1] = (rank != nproc-1)? high/bs : 0;
110:   iy[0] = 0;
111:   iy[1] = 1;

113:   ISCreateBlock(PETSC_COMM_SELF,bs,2,ix,PETSC_COPY_VALUES,&isx);
114:   ISCreateBlock(PETSC_COMM_SELF,bs,2,iy,PETSC_COPY_VALUES,&isy);

116:   VecScatterCreate(x,isx,y,isy,&ctx);

118:   PetscLogStagePush(stage2);
119:   PetscLogEventBegin(event2,0,0,0,0);
120:   errors = 0;
121:   for (i=0; i<niter; i++) {
122:     /* set x = 0+i, 1+i, 2+i, ..., N-1+i */
123:     VecGetArray(x,&xval);
124:     for (j=0; j<n; j++) xval[j] = (PetscScalar)(low+j+i);
125:     VecRestoreArray(x,&xval);
126:     /* scatter the ghost blocks to y */
127:     VecScatterBegin(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD);
128:     VecScatterEnd(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD);
129:     /* check if y has correct values */
130:     VecGetArrayRead(y,&yval);
131:     if ((PetscInt)PetscRealPart(yval[0])  != ix[0]*bs+i) errors++;
132:     if ((PetscInt)PetscRealPart(yval[bs]) != ix[1]*bs+i) errors++;
133:     VecRestoreArrayRead(y,&yval);
134:   }
135:   PetscLogEventEnd(event2,0,0,0,0);
136:   PetscLogStagePop();

138:   /* check if we found wrong values on any processors */
139:   MPI_Allreduce(&errors,&tot_errors,1,MPIU_INT,MPI_SUM,PETSC_COMM_WORLD);
140:   if (tot_errors) { PetscPrintf(PETSC_COMM_WORLD,"Error: wrong values were scatterred in vecscatter with bs = %d\n",bs); }

142:   /* print out event log of VecScatter(bs=4) */
143: #if defined(PETSC_USE_LOG)
144:   PetscLogEventGetPerfInfo(stage2,event2,&eventInfo);
145:   MPI_Allreduce(&eventInfo.numMessages,  &numMessages,  1,MPIU_PETSCLOGDOUBLE,MPI_SUM,PETSC_COMM_WORLD);
146:   MPI_Allreduce(&eventInfo.messageLength,&messageLength,1,MPIU_PETSCLOGDOUBLE,MPI_SUM,PETSC_COMM_WORLD);
147:   tot_msg = (PetscInt)numMessages*0.5; /* two MPI calls (Send & Recv) per message */
148:   tot_len = (PetscInt)messageLength*0.5;
149:   avg_len = tot_msg? (PetscInt)(messageLength/numMessages) : 0;
150:   /* when nproc > 2, tot_msg = 2*nproc*niter, tot_len = tot_msg*sizeof(PetscScalar)*bs */
151:   PetscPrintf(PETSC_COMM_WORLD,"VecScatter(bs=%d) has sent out %d messages, total %d bytes, with average length %d bytes\n",bs,tot_msg,tot_len,avg_len);
152: #endif

154:   PetscPrintf(PETSC_COMM_WORLD,"Program finished\n");
155:   ISDestroy(&isx);
156:   ISDestroy(&isy);
157:   VecScatterDestroy(&ctx);
158:   VecDestroy(&x);
159:   VecDestroy(&y);
160:   PetscFinalize();
161:   return ierr;
162: }

164: /*TEST

166:    test:
167:       nsize: 4
168:       args:
169:       requires: double define(PETSC_USE_LOG)

171:    test:
172:       suffix: 2
173:       nsize: 4
174:       args: -vecscatter_type mpi3
175:       # have this filter since some messages might go through shared memory and PETSc have not yet
176:       # implemented message logging for them. Add this test to just test mpi3 VecScatter type works.
177:       filter: grep -v "VecScatter(bs="
178:       requires: double define(PETSC_USE_LOG) define(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY)
179: TEST*/