Actual source code: ex12.c
petsc-3.12.5 2020-03-29
2: static char help[] = "Tests the use of MatZeroRows() for parallel matrices.\n\
3: This example also tests the use of MatDuplicate() for both MPIAIJ and MPIBAIJ matrices";
5: #include <petscmat.h>
7: extern PetscErrorCode TestMatZeroRows_Basic(Mat,IS,PetscScalar);
8: extern PetscErrorCode TestMatZeroRows_with_no_allocation(Mat,IS,PetscScalar);
10: int main(int argc,char **args)
11: {
12: Mat A;
13: PetscInt i,j,m = 3,n,Ii,J,Imax;
14: PetscMPIInt rank,size;
16: PetscScalar v,diag=-4.0;
17: IS is;
19: PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
20: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
21: MPI_Comm_size(PETSC_COMM_WORLD,&size);
22: n = 2*size;
24: /* create A Square matrix for the five point stencil,YET AGAIN*/
25: MatCreate(PETSC_COMM_WORLD,&A);
26: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
27: MatSetFromOptions(A);
28: MatSetUp(A);
29: for (i=0; i<m; i++) {
30: for (j=2*rank; j<2*rank+2; j++) {
31: v = -1.0; Ii = j + n*i;
32: if (i>0) {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
33: if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
34: if (j>0) {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
35: if (j<n-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
36: v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);
37: }
38: }
39: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
40: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
42: /* Create AN IS required by MatZeroRows() */
43: Imax = n*rank; if (Imax>= n*m -m - 1) Imax = m*n - m - 1;
44: ISCreateStride(PETSC_COMM_SELF,m,Imax,1,&is);
46: TestMatZeroRows_Basic(A,is,0.0);
47: TestMatZeroRows_Basic(A,is,diag);
49: TestMatZeroRows_with_no_allocation(A,is,0.0);
50: TestMatZeroRows_with_no_allocation(A,is,diag);
52: MatDestroy(&A);
54: /* Now Create a rectangular matrix with five point stencil (app)
55: n+size is used so that this dimension is always divisible by size.
56: This way, we can always use bs = size for any number of procs */
57: MatCreate(PETSC_COMM_WORLD,&A);
58: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*(n+size));
59: MatSetFromOptions(A);
60: MatSetUp(A);
61: for (i=0; i<m; i++) {
62: for (j=2*rank; j<2*rank+2; j++) {
63: v = -1.0; Ii = j + n*i;
64: if (i>0) {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
65: if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
66: if (j>0) {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
67: if (j<n+size-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
68: v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);
69: }
70: }
71: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
72: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
74: TestMatZeroRows_Basic(A,is,0.0);
75: TestMatZeroRows_Basic(A,is,diag);
77: MatDestroy(&A);
78: ISDestroy(&is);
79: PetscFinalize();
80: return ierr;
81: }
83: PetscErrorCode TestMatZeroRows_Basic(Mat A,IS is,PetscScalar diag)
84: {
85: Mat B;
87: PetscBool keepnonzeropattern;
89: /* Now copy A into B, and test it with MatZeroRows() */
90: MatDuplicate(A,MAT_COPY_VALUES,&B);
92: PetscOptionsHasName(NULL,NULL,"-keep_nonzero_pattern",&keepnonzeropattern);
93: if (keepnonzeropattern) {
94: MatSetOption(B,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE);
95: }
97: MatZeroRowsIS(B,is,diag,0,0);
98: MatView(B,PETSC_VIEWER_STDOUT_WORLD);
99: MatDestroy(&B);
100: return 0;
101: }
103: PetscErrorCode TestMatZeroRows_with_no_allocation(Mat A,IS is,PetscScalar diag)
104: {
105: Mat B;
108: /* Now copy A into B, and test it with MatZeroRows() */
109: MatDuplicate(A,MAT_COPY_VALUES,&B);
110: /* Set this flag after assembly. This way, it affects only MatZeroRows() */
111: MatSetOption(B,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_TRUE);
113: MatZeroRowsIS(B,is,diag,0,0);
114: MatView(B,PETSC_VIEWER_STDOUT_WORLD);
115: MatDestroy(&B);
116: return 0;
117: }
120: /*TEST
122: test:
123: nsize: 2
124: filter: grep -v "MPI processes"
126: test:
127: suffix: 2
128: nsize: 3
129: args: -mat_type mpibaij -mat_block_size 3
130: filter: grep -v "MPI processes"
132: test:
133: suffix: 3
134: nsize: 3
135: args: -mat_type mpiaij -keep_nonzero_pattern
136: filter: grep -v "MPI processes"
138: test:
139: suffix: 4
140: nsize: 3
141: args: -keep_nonzero_pattern -mat_type mpibaij -mat_block_size 3
142: filter: grep -v "MPI processes"
144: TEST*/