Actual source code: ex33.c

petsc-3.12.5 2020-03-29
Report Typos and Errors
  1: static char help[] = "Test memory scalability of MatMatMult() for AIJ and DENSE matrices. \n\
  2: Modified from the code contributed by Ian Lin <iancclin@umich.edu> \n\n";

  4: /*
  5: Example:
  6:   mpiexec -n <np> ./ex33 -mem_view -matmatmult_Bbn <Bbn>
  7: */

  9:  #include <petsc.h>

 11: PetscErrorCode Print_memory(PetscLogDouble mem)
 12: {
 14:   double         max_mem,min_mem;

 17:   MPI_Reduce(&mem, &max_mem, 1, MPI_DOUBLE, MPI_MAX, 0, MPI_COMM_WORLD);
 18:   MPI_Reduce(&mem, &min_mem, 1, MPI_DOUBLE, MPI_MIN, 0, MPI_COMM_WORLD);
 19:   max_mem = max_mem / 1024.0 / 1024.0;
 20:   min_mem = min_mem / 1024.0 / 1024.0;
 21:   PetscPrintf(MPI_COMM_WORLD, " max and min memory across all processors %.4f Mb, %.4f Mb.\n", (double)max_mem,(double)min_mem);
 22:   return(0);
 23: }

 25: /*
 26:    Illustrate how to use MPI derived data types.
 27:    It would save memory significantly. See MatMPIDenseScatter()
 28: */
 29: PetscErrorCode TestMPIDerivedDataType()
 30: {
 31:   PetscErrorCode    ierr;
 32:   MPI_Datatype      type1, type2,rtype1,rtype2;
 33:   PetscInt          i,j;
 34:   PetscScalar       buffer[24]; /* An array of 4 rows, 6 cols */
 35:   MPI_Status        status;
 36:   PetscMPIInt       rank,size,disp[2];

 39:   MPI_Comm_size(MPI_COMM_WORLD, &size);
 40:   if (size < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Must use at least 2 processors");
 41:   MPI_Comm_rank(MPI_COMM_WORLD, &rank);

 43:   if (rank == 0) {
 44:     /* proc[0] sends 2 rows to proc[1] */
 45:     for (i=0; i<24; i++) buffer[i] = (PetscScalar)i;

 47:     disp[0] = 0;  disp[1] = 2;
 48:     MPI_Type_create_indexed_block(2, 1, (const PetscMPIInt *)disp, MPIU_SCALAR, &type1);
 49:     /* one column has 4 entries */
 50:     MPI_Type_create_resized(type1,0,4*sizeof(PetscScalar),&type2);
 51:     MPI_Type_commit(&type2);
 52:     MPI_Send(buffer, 6, type2, 1, 123, MPI_COMM_WORLD);

 54:   } else if (rank == 1) {
 55:     /* proc[1] receives 2 rows from proc[0], and put them into contiguous rows, starting at the row 1 (disp[0]) */
 56:     PetscInt blen = 2;
 57:     for (i=0; i<24; i++) buffer[i] = 0.0;

 59:     disp[0] = 1;
 60:     MPI_Type_create_indexed_block(1, blen, (const PetscMPIInt *)disp, MPIU_SCALAR, &rtype1);
 61:     MPI_Type_create_resized(rtype1, 0, 4*sizeof(PetscScalar), &rtype2);

 63:     MPI_Type_commit(&rtype2);
 64:     MPI_Recv(buffer, 6, rtype2, 0, 123, MPI_COMM_WORLD, &status);
 65:     for (i=0; i<4; i++) {
 66:       for (j=0; j<6; j++) {
 67:         PetscPrintf(MPI_COMM_SELF,"  %g", (double)PetscRealPart(buffer[i+j*4]));
 68:       }
 69:       PetscPrintf(MPI_COMM_SELF,"\n");
 70:     }
 71:   }

 73:   if (rank == 0) {
 74:     MPI_Type_free(&type1);
 75:     MPI_Type_free(&type2);
 76:   } else if (rank == 1) {
 77:     MPI_Type_free(&rtype1);
 78:     MPI_Type_free(&rtype2);
 79:   }
 80:   MPI_Barrier(MPI_COMM_WORLD);
 81:   return(0);
 82: }

 84: int main(int argc, char **args)
 85: {
 86:   PetscInt          mA = 2700,nX = 80,nz = 40;
 87:   /* PetscInt        mA=6,nX=5,nz=2; //small test */
 88:   PetscLogDouble    mem;
 89:   Mat               A,X,Y;
 90:   PetscErrorCode    ierr;
 91:   PetscBool         flg = PETSC_FALSE;

 93:   PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
 94:   PetscOptionsGetBool(NULL,NULL,"-test_mpiderivedtype",&flg,NULL);
 95:   if (flg) {
 96:     TestMPIDerivedDataType();
 97:     PetscFinalize();
 98:     return ierr;
 99:   }

101:   PetscOptionsGetBool(NULL,NULL,"-mem_view",&flg,NULL);
102:   PetscMemoryGetCurrentUsage(&mem);
103:   if (flg) {
104:     PetscPrintf(MPI_COMM_WORLD, "Before start,");
105:     Print_memory(mem);
106:   }

108:   MatCreateAIJ(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,mA,mA,nz,PETSC_NULL,nz,PETSC_NULL,&A);
109:   MatSetRandom(A,PETSC_NULL);
110:   PetscMemoryGetCurrentUsage(&mem);
111:   if (flg) {
112:     PetscPrintf(MPI_COMM_WORLD, "After creating A,");
113:     Print_memory(mem);
114:   }

116:   MatCreateDense(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,mA,nX,PETSC_NULL,&X);
117:   MatSetRandom(X,PETSC_NULL);
118:   PetscMemoryGetCurrentUsage(&mem);
119:   if (flg) {
120:     PetscPrintf(MPI_COMM_WORLD, "After creating X,");
121:     Print_memory(mem);
122:   }

124:   MatMatMult(A,X,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&Y);
125:   PetscMemoryGetCurrentUsage(&mem);
126:   if (flg) {
127:     PetscPrintf(MPI_COMM_WORLD, "After MatMatMult,");
128:     Print_memory(mem);
129:   }

131:   /* Test reuse */
132:   MatMatMult(A,X,MAT_REUSE_MATRIX,PETSC_DEFAULT,&Y);
133:   PetscMemoryGetCurrentUsage(&mem);
134:   if (flg) {
135:     PetscPrintf(MPI_COMM_WORLD, "After reuse MatMatMult,");
136:     Print_memory(mem);
137:   }

139:   /* Check accuracy */
140:   MatMatMultEqual(A,X,Y,10,&flg);
141:   if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NOTSAMETYPE,"Error in MatMatMult()");

143:   MatDestroy(&A);
144:   MatDestroy(&X);
145:   MatDestroy(&Y);

147:   PetscFinalize();
148:   return ierr;
149: }

151: /*TEST

153:    test:
154:       suffix: 1
155:       nsize: 4
156:       output_file: output/ex33.out

158:    test:
159:       suffix: 2
160:       nsize: 8
161:       output_file: output/ex33.out

163:    test:
164:       suffix: 3
165:       nsize: 2
166:       args: -test_mpiderivedtype
167:       output_file: output/ex33_3.out

169: TEST*/