Actual source code: ex19.c
2: static char help[] = "Tests reusing MPI parallel matrices and MatGetValues().\n\
3: To test the parallel matrix assembly, this example intentionally lays out\n\
4: the matrix across processors differently from the way it is assembled.\n\
5: This example uses bilinear elements on the unit square. Input arguments are:\n\
6: -m <size> : problem size\n\n";
8: #include petscmat.h
12: int FormElementStiffness(PetscReal H,PetscScalar *Ke)
13: {
15: Ke[0] = H/6.0; Ke[1] = -.125*H; Ke[2] = H/12.0; Ke[3] = -.125*H;
16: Ke[4] = -.125*H; Ke[5] = H/6.0; Ke[6] = -.125*H; Ke[7] = H/12.0;
17: Ke[8] = H/12.0; Ke[9] = -.125*H; Ke[10] = H/6.0; Ke[11] = -.125*H;
18: Ke[12] = -.125*H; Ke[13] = H/12.0; Ke[14] = -.125*H; Ke[15] = H/6.0;
19: return(0);
20: }
24: int main(int argc,char **args)
25: {
26: Mat C;
27: Vec u,b;
29: PetscMPIInt size,rank;
30: PetscInt i,m = 5,N,start,end,M,idx[4];
31: PetscInt j,nrsub,ncsub,*rsub,*csub,mystart,myend;
32: PetscTruth flg;
33: PetscScalar one = 1.0,Ke[16],*vals;
34: PetscReal h,norm;
36: PetscInitialize(&argc,&args,(char *)0,help);
37: PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
39: N = (m+1)*(m+1); /* dimension of matrix */
40: M = m*m; /* number of elements */
41: h = 1.0/m; /* mesh width */
42: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
43: MPI_Comm_size(PETSC_COMM_WORLD,&size);
45: /* Create stiffness matrix */
46: MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,N,N,&C);
47: MatSetFromOptions(C);
49: start = rank*(M/size) + ((M%size) < rank ? (M%size) : rank);
50: end = start + M/size + ((M%size) > rank);
52: /* Form the element stiffness for the Laplacian */
53: FormElementStiffness(h*h,Ke);
54: for (i=start; i<end; i++) {
55: /* location of lower left corner of element */
56: /* node numbers for the four corners of element */
57: idx[0] = (m+1)*(i/m) + (i % m);
58: idx[1] = idx[0]+1; idx[2] = idx[1] + m + 1; idx[3] = idx[2] - 1;
59: MatSetValues(C,4,idx,4,idx,Ke,ADD_VALUES);
60: }
61: MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);
62: MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);
64: /* Assemble the matrix again */
65: MatZeroEntries(C);
67: for (i=start; i<end; i++) {
68: /* location of lower left corner of element */
69: /* node numbers for the four corners of element */
70: idx[0] = (m+1)*(i/m) + (i % m);
71: idx[1] = idx[0]+1; idx[2] = idx[1] + m + 1; idx[3] = idx[2] - 1;
72: MatSetValues(C,4,idx,4,idx,Ke,ADD_VALUES);
73: }
74: MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);
75: MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);
77: /* Create test vectors */
78: VecCreate(PETSC_COMM_WORLD,&u);
79: VecSetSizes(u,PETSC_DECIDE,N);
80: VecSetFromOptions(u);
81: VecDuplicate(u,&b);
82: VecSet(&one,u);
84: /* Check error */
85: MatMult(C,u,b);
86: VecNorm(b,NORM_2,&norm);
87: if (norm > 1.e-10 || norm < -1.e-10) {
88: PetscPrintf(PETSC_COMM_WORLD,"Norm of error b %g should be near 0\n",norm);
89: }
91: /* Now test MatGetValues() */
92: PetscOptionsHasName(PETSC_NULL,"-get_values",&flg);
93: if (flg) {
94: MatGetOwnershipRange(C,&mystart,&myend);
95: nrsub = myend - mystart; ncsub = 4;
96: PetscMalloc(nrsub*ncsub*sizeof(PetscScalar),&vals);
97: PetscMalloc(nrsub*sizeof(PetscInt),&rsub);
98: PetscMalloc(ncsub*sizeof(PetscInt),&csub);
99: for (i=myend-1; i>=mystart; i--) rsub[myend-i-1] = i;
100: for (i=0; i<ncsub; i++) csub[i] = 2*(ncsub-i) + mystart;
101: MatGetValues(C,nrsub,rsub,ncsub,csub,vals);
102: MatView(C,PETSC_VIEWER_STDOUT_WORLD);
103: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"processor number %d: start=%D, end=%D, mystart=%D, myend=%D\n",
104: rank,start,end,mystart,myend);
105: for (i=0; i<nrsub; i++) {
106: for (j=0; j<ncsub; j++) {
107: #if defined(PETSC_USE_COMPLEX)
108: if (PetscImaginaryPart(vals[i*ncsub+j]) != 0.0) {
109: PetscSynchronizedPrintf(PETSC_COMM_WORLD," C[%D, %D] = %g + %g i\n",rsub[i],csub[j],PetscRealPart(vals[i*ncsub+j]),
110: PetscImaginaryPart(vals[i*ncsub+j]));
111: } else {
112: PetscSynchronizedPrintf(PETSC_COMM_WORLD," C[%D, %D] = %g\n",rsub[i],csub[j],PetscRealPart(vals[i*ncsub+j]));
113: }
114: #else
115: PetscSynchronizedPrintf(PETSC_COMM_WORLD," C[%D, %D] = %g\n",rsub[i],csub[j],vals[i*ncsub+j]);
116: #endif
117: }
118: }
119: PetscSynchronizedFlush(PETSC_COMM_WORLD);
120: PetscFree(rsub);
121: PetscFree(csub);
122: PetscFree(vals);
123: }
125: /* Free data structures */
126: VecDestroy(u);
127: VecDestroy(b);
128: MatDestroy(C);
129: PetscFinalize();
130: return 0;
131: }