Actual source code: ex9.c

petsc-3.14.6 2021-03-30
Report Typos and Errors

  2: static char help[] = "Tests MPI parallel matrix creation. Test MatCreateRedundantMatrix() \n\n";

  4: #include <petscmat.h>

  6: int main(int argc,char **args)
  7: {
  8:   Mat            C,Credundant;
  9:   MatInfo        info;
 10:   PetscMPIInt    rank,size,subsize;
 11:   PetscInt       i,j,m = 3,n = 2,low,high,iglobal;
 12:   PetscInt       Ii,J,ldim,nsubcomms;
 14:   PetscBool      flg_info,flg_mat;
 15:   PetscScalar    v,one = 1.0;
 16:   Vec            x,y;
 17:   MPI_Comm       subcomm;

 19:   PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
 20:   PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);
 21:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
 22:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
 23:   n    = 2*size;

 25:   MatCreate(PETSC_COMM_WORLD,&C);
 26:   MatSetSizes(C,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
 27:   MatSetFromOptions(C);
 28:   MatSetUp(C);

 30:   /* Create the matrix for the five point stencil, YET AGAIN */
 31:   for (i=0; i<m; i++) {
 32:     for (j=2*rank; j<2*rank+2; j++) {
 33:       v = -1.0;  Ii = j + n*i;
 34:       if (i>0)   {J = Ii - n; MatSetValues(C,1,&Ii,1,&J,&v,INSERT_VALUES);}
 35:       if (i<m-1) {J = Ii + n; MatSetValues(C,1,&Ii,1,&J,&v,INSERT_VALUES);}
 36:       if (j>0)   {J = Ii - 1; MatSetValues(C,1,&Ii,1,&J,&v,INSERT_VALUES);}
 37:       if (j<n-1) {J = Ii + 1; MatSetValues(C,1,&Ii,1,&J,&v,INSERT_VALUES);}
 38:       v = 4.0; MatSetValues(C,1,&Ii,1,&Ii,&v,INSERT_VALUES);
 39:     }
 40:   }

 42:   /* Add extra elements (to illustrate variants of MatGetInfo) */
 43:   Ii   = n; J = n-2; v = 100.0;
 44:   MatSetValues(C,1,&Ii,1,&J,&v,INSERT_VALUES);
 45:   Ii   = n-2; J = n; v = 100.0;
 46:   MatSetValues(C,1,&Ii,1,&J,&v,INSERT_VALUES);

 48:   MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);
 49:   MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);

 51:   /* Form vectors */
 52:   MatCreateVecs(C,&x,&y);
 53:   VecGetLocalSize(x,&ldim);
 54:   VecGetOwnershipRange(x,&low,&high);
 55:   for (i=0; i<ldim; i++) {
 56:     iglobal = i + low;
 57:     v       = one*((PetscReal)i) + 100.0*rank;
 58:     VecSetValues(x,1,&iglobal,&v,INSERT_VALUES);
 59:   }
 60:   VecAssemblyBegin(x);
 61:   VecAssemblyEnd(x);

 63:   MatMult(C,x,y);

 65:   PetscOptionsHasName(NULL,NULL,"-view_info",&flg_info);
 66:   if (flg_info)  {
 67:     PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_ASCII_INFO);
 68:     MatView(C,PETSC_VIEWER_STDOUT_WORLD);

 70:     MatGetInfo(C,MAT_GLOBAL_SUM,&info);
 71:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"matrix information (global sums):\nnonzeros = %D, allocated nonzeros = %D\n",(PetscInt)info.nz_used,(PetscInt)info.nz_allocated);
 72:     MatGetInfo (C,MAT_GLOBAL_MAX,&info);
 73:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"matrix information (global max):\nnonzeros = %D, allocated nonzeros = %D\n",(PetscInt)info.nz_used,(PetscInt)info.nz_allocated);
 74:   }

 76:   PetscOptionsHasName(NULL,NULL,"-view_mat",&flg_mat);
 77:   if (flg_mat) {
 78:     MatView(C,PETSC_VIEWER_STDOUT_WORLD);
 79:   }

 81:   /* Test MatCreateRedundantMatrix() */
 82:   nsubcomms = size;
 83:   PetscOptionsGetInt(NULL,NULL,"-nsubcomms",&nsubcomms,NULL);
 84:   MatCreateRedundantMatrix(C,nsubcomms,MPI_COMM_NULL,MAT_INITIAL_MATRIX,&Credundant);
 85:   MatCreateRedundantMatrix(C,nsubcomms,MPI_COMM_NULL,MAT_REUSE_MATRIX,&Credundant);

 87:   PetscObjectGetComm((PetscObject)Credundant,&subcomm);
 88:   MPI_Comm_size(subcomm,&subsize);

 90:   if (subsize==2 && flg_mat) {
 91:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_(subcomm),"\n[%d] Credundant:\n",rank);
 92:     MatView(Credundant,PETSC_VIEWER_STDOUT_(subcomm));
 93:   }
 94:   MatDestroy(&Credundant);

 96:   /* Test MatCreateRedundantMatrix() with user-provided subcomm */
 97:   {
 98:     PetscSubcomm psubcomm;

100:     PetscSubcommCreate(PETSC_COMM_WORLD,&psubcomm);
101:     PetscSubcommSetNumber(psubcomm,nsubcomms);
102:     PetscSubcommSetType(psubcomm,PETSC_SUBCOMM_CONTIGUOUS);
103:     /* enable runtime switch of psubcomm type, e.g., '-psubcomm_type interlaced */
104:     PetscSubcommSetFromOptions(psubcomm);

106:     MatCreateRedundantMatrix(C,nsubcomms,PetscSubcommChild(psubcomm),MAT_INITIAL_MATRIX,&Credundant);
107:     MatCreateRedundantMatrix(C,nsubcomms,PetscSubcommChild(psubcomm),MAT_REUSE_MATRIX,&Credundant);

109:     PetscSubcommDestroy(&psubcomm);
110:     MatDestroy(&Credundant);
111:   }

113:   VecDestroy(&x);
114:   VecDestroy(&y);
115:   MatDestroy(&C);
116:   PetscFinalize();
117:   return ierr;
118: }


121: /*TEST

123:    test:
124:       nsize: 3
125:       args: -view_info

127:    test:
128:       suffix: 2
129:       nsize: 3
130:       args: -nsubcomms 2 -view_mat -psubcomm_type interlaced

132:    test:
133:       suffix: 3
134:       nsize: 3
135:       args: -nsubcomms 2 -view_mat -psubcomm_type contiguous

137:    test:
138:       suffix: 3_baij
139:       nsize: 3
140:       args: -mat_type baij -nsubcomms 2 -view_mat

142:    test:
143:       suffix: 3_sbaij
144:       nsize: 3
145:       args: -mat_type sbaij -nsubcomms 2 -view_mat

147:    test:
148:       suffix: 3_dense
149:       nsize: 3
150:       args: -mat_type dense -nsubcomms 2 -view_mat

152:    test:
153:       suffix: 4_baij
154:       nsize: 3
155:       args: -mat_type baij -nsubcomms 2 -view_mat -psubcomm_type interlaced

157:    test:
158:       suffix: 4_sbaij
159:       nsize: 3
160:       args: -mat_type sbaij -nsubcomms 2 -view_mat -psubcomm_type interlaced

162:    test:
163:       suffix: 4_dense
164:       nsize: 3
165:       args: -mat_type dense -nsubcomms 2 -view_mat -psubcomm_type interlaced

167: TEST*/