Actual source code: ex18.c

petsc-3.10.5 2019-03-28
Report Typos and Errors
  1: static char help[] = "Tests the use of MatZeroRowsColumns() for parallel matrices.\n\
  2: Contributed-by: Stephan Kramer <s.kramer@imperial.ac.uk>\n\n";

  4:  #include <petscmat.h>

  6: int main(int argc,char **args)
  7: {
  8:   Mat            A;
  9:   Vec            x, rhs, y;
 10:   PetscInt       i,j,k,b,m = 3,n,nlocal=2,bs=1,Ii,J;
 11:   PetscInt       *boundary_nodes, nboundary_nodes, *boundary_indices;
 12:   PetscMPIInt    rank,size;
 14:   PetscScalar    v,v0,v1,v2,a0=0.1,a,rhsval, *boundary_values;
 15:   PetscReal      norm;
 16:   PetscBool      upwind = PETSC_FALSE, nonlocalBC = PETSC_FALSE;

 18:   PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
 19:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
 20:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
 21:   n = nlocal*size;

 23:   PetscOptionsGetInt(NULL,NULL, "-bs", &bs, NULL);
 24:   PetscOptionsGetBool(NULL,NULL, "-nonlocal_bc", &nonlocalBC, NULL);

 26:   MatCreate(PETSC_COMM_WORLD,&A);
 27:   MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n*bs,m*n*bs);
 28:   MatSetFromOptions(A);
 29:   MatSetUp(A);

 31:   VecCreate(PETSC_COMM_WORLD, &rhs);
 32:   VecSetSizes(rhs, PETSC_DECIDE, m*n*bs);
 33:   VecSetFromOptions(rhs);
 34:   VecSetUp(rhs);

 36:   rhsval = 0.0;
 37:   for (i=0; i<m; i++) {
 38:     for (j=nlocal*rank; j<nlocal*(rank+1); j++) {
 39:       a = a0;
 40:       for (b=0; b<bs; b++) {
 41:         /* let's start with a 5-point stencil diffusion term */
 42:         v = -1.0;  Ii = (j + n*i)*bs + b;
 43:         if (i>0)   {J = Ii - n*bs; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);}
 44:         if (i<m-1) {J = Ii + n*bs; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);}
 45:         if (j>0)   {J = Ii - 1*bs; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);}
 46:         if (j<n-1) {J = Ii + 1*bs; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);}
 47:         v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,ADD_VALUES);
 48:         if (upwind) {
 49:           /* now add a 2nd order upwind advection term to add a little asymmetry */
 50:           if (j>2) {
 51:             J = Ii-2*bs; v2 = 0.5*a; v1 = -2.0*a; v0 = 1.5*a;
 52:             MatSetValues(A,1,&Ii,1,&J,&v2,ADD_VALUES);
 53:           } else {
 54:             /* fall back to 1st order upwind */
 55:             v1 = -1.0*a; v0 = 1.0*a;
 56:           };
 57:           if (j>1) {J = Ii-1*bs; MatSetValues(A,1,&Ii,1,&J,&v1,ADD_VALUES);}
 58:           MatSetValues(A,1,&Ii,1,&Ii,&v0,ADD_VALUES);
 59:           a /= 10.; /* use a different velocity for the next component */
 60:           /* add a coupling to the previous and next components */
 61:           v = 0.5;
 62:           if (b>0) {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);}
 63:           if (b<bs-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);}
 64:         }
 65:         /* make up some rhs */
 66:         VecSetValue(rhs, Ii, rhsval, INSERT_VALUES);
 67:         rhsval += 1.0;
 68:       }
 69:     }
 70:   }
 71:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
 72:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
 73:   VecAssemblyBegin(rhs);
 74:   VecAssemblyEnd(rhs);
 75:   /* set rhs to zero to simplify */
 76:   VecZeroEntries(rhs);

 78:   if (nonlocalBC) {
 79:     /*version where boundary conditions are set by processes that don't necessarily own the nodes */
 80:     if (!rank) {
 81:       nboundary_nodes = size>m ? nlocal : m-size+nlocal;
 82:       PetscMalloc1(nboundary_nodes,&boundary_nodes);
 83:       k = 0;
 84:       for (i=size; i<m; i++,k++) {boundary_nodes[k] = n*i;};
 85:     } else if (rank < m) {
 86:       nboundary_nodes = nlocal+1;
 87:       PetscMalloc1(nboundary_nodes,&boundary_nodes);
 88:       boundary_nodes[0] = rank*n;
 89:       k = 1;
 90:     } else {
 91:       nboundary_nodes = nlocal;
 92:       PetscMalloc1(nboundary_nodes,&boundary_nodes);
 93:       k = 0;
 94:     }
 95:     for (j=nlocal*rank; j<nlocal*(rank+1); j++,k++) {boundary_nodes[k] = j;};
 96:   } else {
 97:     /*version where boundary conditions are set by the node owners only */
 98:     PetscMalloc1(m*n,&boundary_nodes);
 99:     k=0;
100:     for (j=0; j<n; j++) {
101:       Ii = j;
102:       if (Ii>=rank*m*nlocal && Ii<(rank+1)*m*nlocal) boundary_nodes[k++] = Ii;
103:     }
104:     for (i=1; i<m; i++) {
105:       Ii = n*i;
106:       if (Ii>=rank*m*nlocal && Ii<(rank+1)*m*nlocal) boundary_nodes[k++] = Ii;
107:     }
108:     nboundary_nodes = k;
109:   }

111:   VecDuplicate(rhs, &x);
112:   VecZeroEntries(x);
113:   PetscMalloc2(nboundary_nodes*bs,&boundary_indices,nboundary_nodes*bs,&boundary_values);
114:   for (k=0; k<nboundary_nodes; k++) {
115:     Ii = boundary_nodes[k]*bs;
116:     v = 1.0*boundary_nodes[k];
117:     for (b=0; b<bs; b++, Ii++) {
118:       boundary_indices[k*bs+b] = Ii;
119:       boundary_values[k*bs+b] = v;
120:       PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%d %D %f\n", rank, Ii, (double)PetscRealPart(v));
121:       v += 0.1;
122:     }
123:   }
124:   PetscSynchronizedFlush(PETSC_COMM_WORLD, NULL);
125:   if (nboundary_nodes) {VecSetValues(x, nboundary_nodes*bs, boundary_indices, boundary_values, INSERT_VALUES);}
126:   VecAssemblyBegin(x);
127:   VecAssemblyEnd(x);

129:   /* We can check the rhs returned by MatZeroColumns by computing y=rhs-A*x  and overwriting the boundary entries with boundary values */
130:   VecDuplicate(x, &y);
131:   MatMult(A, x, y);
132:   VecAYPX(y, -1.0, rhs);
133:   if (nboundary_nodes) {VecSetValues(y, nboundary_nodes*bs, boundary_indices, boundary_values, INSERT_VALUES);}
134:   VecAssemblyBegin(y);
135:   VecAssemblyEnd(y);

137:   PetscPrintf(PETSC_COMM_WORLD, "*** Matrix A and vector x:\n");
138:   MatView(A, PETSC_VIEWER_STDOUT_WORLD);
139:   VecView(x,PETSC_VIEWER_STDOUT_WORLD);

141:   MatZeroRowsColumns(A, nboundary_nodes*bs, boundary_indices, 1.0, x, rhs);
142:   PetscPrintf(PETSC_COMM_WORLD, "*** Vector rhs returned by MatZeroRowsColumns\n");
143:   VecView(rhs,PETSC_VIEWER_STDOUT_WORLD);
144:   VecAXPY(y, -1.0, rhs);
145:   VecNorm(y, NORM_INFINITY, &norm);
146:   if (norm > 1.0e-10) {
147:     PetscPrintf(PETSC_COMM_WORLD, "*** Difference between rhs and y, inf-norm: %f\n", (double)norm);
148:     VecView(y,PETSC_VIEWER_STDOUT_WORLD);
149:     SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_PLIB, "Bug in MatZeroRowsColumns");
150:   }

152:   PetscFree(boundary_nodes);
153:   PetscFree2(boundary_indices,boundary_values);
154:   VecDestroy(&x);
155:   VecDestroy(&y);
156:   VecDestroy(&rhs);
157:   MatDestroy(&A);

159:   PetscFinalize();
160:   return ierr;
161: }


164: /*TEST

166:    test:
167:       output_file: output/ex18_0.out

169:    test:
170:       suffix: 1
171:       nsize: 2

173:    test:
174:       suffix: 10
175:       nsize: 2
176:       args: -bs 2 -nonlocal_bc

178:    test:
179:       suffix: 11
180:       nsize: 7
181:       args: -bs 2 -nonlocal_bc

183:    test:
184:       suffix: 12
185:       args: -bs 2 -nonlocal_bc -mat_type baij

187:    test:
188:       suffix: 13
189:       nsize: 2
190:       args: -bs 2 -nonlocal_bc -mat_type baij

192:    test:
193:       suffix: 14
194:       nsize: 7
195:       args: -bs 2 -nonlocal_bc -mat_type baij

197:    test:
198:       suffix: 2
199:       nsize: 7

201:    test:
202:       suffix: 3
203:       args: -mat_type baij

205:    test:
206:       suffix: 4
207:       nsize: 2
208:       args: -mat_type baij

210:    test:
211:       suffix: 5
212:       nsize: 7
213:       args: -mat_type baij

215:    test:
216:       suffix: 6
217:       args: -bs 2 -mat_type baij

219:    test:
220:       suffix: 7
221:       nsize: 2
222:       args: -bs 2 -mat_type baij

224:    test:
225:       suffix: 8
226:       nsize: 7
227:       args: -bs 2 -mat_type baij

229:    test:
230:       suffix: 9
231:       args: -bs 2 -nonlocal_bc

233: TEST*/