Actual source code: ex4.c
petsc-3.12.5 2020-03-29
2: static char help[] = "Solves a linear system in parallel with KSP and HMG.\n\
3: Input parameters include:\n\
4: -view_exact_sol : write exact solution vector to stdout\n\
5: -m <mesh_x> : number of mesh points in x-direction\n\
6: -n <mesh_n> : number of mesh points in y-direction\n\
7: -bs : number of variables on each mesh vertex \n\n";
9: /*
10: Simple example is used to test PCHMG
11: */
12: #include <petscksp.h>
14: int main(int argc,char **args)
15: {
16: Vec x,b,u; /* approx solution, RHS, exact solution */
17: Mat A; /* linear system matrix */
18: KSP ksp; /* linear solver context */
19: PetscReal norm; /* norm of solution error */
20: PetscInt i,j,Ii,J,Istart,Iend,m = 8,n = 7,its,bs=1,II,JJ,jj;
22: PetscBool flg,test=PETSC_FALSE,reuse=PETSC_FALSE;
23: PetscScalar v;
24: PC pc;
26: PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
27: PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);
28: PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);
29: PetscOptionsGetInt(NULL,NULL,"-bs",&bs,NULL);
30: PetscOptionsGetBool(NULL,NULL,"-test_hmg_interface",&test,NULL);
31: PetscOptionsGetBool(NULL,NULL,"-test_reuse_interpolation",&reuse,NULL);
33: MatCreate(PETSC_COMM_WORLD,&A);
34: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n*bs,m*n*bs);
35: MatSetBlockSize(A,bs);
36: MatSetFromOptions(A);
37: MatMPIAIJSetPreallocation(A,5,NULL,5,NULL);
38: MatSeqAIJSetPreallocation(A,5,NULL);
40: MatGetOwnershipRange(A,&Istart,&Iend);
42: for (Ii=Istart/bs; Ii<Iend/bs; Ii++) {
43: v = -1.0; i = Ii/n; j = Ii - i*n;
44: if (i>0) {
45: J = Ii - n;
46: for (jj=0; jj<bs; jj++) {
47: II = Ii*bs + jj;
48: JJ = J*bs + jj;
49: MatSetValues(A,1,&II,1,&JJ,&v,ADD_VALUES);
50: }
51: }
52: if (i<m-1) {
53: J = Ii + n;
54: for (jj=0; jj<bs; jj++) {
55: II = Ii*bs + jj;
56: JJ = J*bs + jj;
57: MatSetValues(A,1,&II,1,&JJ,&v,ADD_VALUES);
58: }
59: }
60: if (j>0) {
61: J = Ii - 1;
62: for (jj=0; jj<bs; jj++) {
63: II = Ii*bs + jj;
64: JJ = J*bs + jj;
65: MatSetValues(A,1,&II,1,&JJ,&v,ADD_VALUES);
66: }
67: }
68: if (j<n-1) {
69: J = Ii + 1;
70: for (jj=0; jj<bs; jj++) {
71: II = Ii*bs + jj;
72: JJ = J*bs + jj;
73: MatSetValues(A,1,&II,1,&JJ,&v,ADD_VALUES);
74: }
75: }
76: v = 4.0;
77: for (jj=0; jj<bs; jj++) {
78: II = Ii*bs + jj;
79: MatSetValues(A,1,&II,1,&II,&v,ADD_VALUES);
80: }
81: }
83: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
84: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
86: MatCreateVecs(A,&u,NULL);
87: VecDuplicate(u,&b);
88: VecDuplicate(b,&x);
90: VecSet(u,1.0);
91: MatMult(A,u,b);
93: flg = PETSC_FALSE;
94: PetscOptionsGetBool(NULL,NULL,"-view_exact_sol",&flg,NULL);
95: if (flg) {VecView(u,PETSC_VIEWER_STDOUT_WORLD);}
97: KSPCreate(PETSC_COMM_WORLD,&ksp);
98: KSPSetOperators(ksp,A,A);
99: KSPSetTolerances(ksp,1.e-2/((m+1)*(n+1)),1.e-50,PETSC_DEFAULT,PETSC_DEFAULT);
101: if (test) {
102: KSPGetPC(ksp,&pc);
103: PCSetType(pc,PCHMG);
104: PCHMGSetInnerPCType(pc,PCGAMG);
105: PCHMGSetReuseInterpolation(pc,PETSC_TRUE);
106: PCHMGSetUseSubspaceCoarsening(pc,PETSC_TRUE);
107: PCHMGUseMatMAIJ(pc,PETSC_FALSE);
108: PCHMGSetCoarseningComponent(pc,0);
109: }
111: KSPSetFromOptions(ksp);
112: KSPSolve(ksp,b,x);
114: if (reuse) {
115: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
116: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
117: KSPSolve(ksp,b,x);
118: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
119: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
120: KSPSolve(ksp,b,x);
121: /* Make sparsity pattern different and reuse interpolation */
122: MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE);
123: MatSetOption(A,MAT_IGNORE_ZERO_ENTRIES,PETSC_FALSE);
124: MatGetSize(A,&m,NULL);
125: n = 0;
126: v = 0;
127: m--;
128: /* Connect the last element to the first element */
129: MatSetValue(A,m,n,v,ADD_VALUES);
130: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
131: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
132: KSPSolve(ksp,b,x);
133: }
135: VecAXPY(x,-1.0,u);
136: VecNorm(x,NORM_2,&norm);
137: KSPGetIterationNumber(ksp,&its);
139: PetscPrintf(PETSC_COMM_WORLD,"Norm of error %g iterations %D\n",(double)norm,its);
141: KSPDestroy(&ksp);
142: VecDestroy(&u);
143: VecDestroy(&x);
144: VecDestroy(&b);
145: MatDestroy(&A);
147: PetscFinalize();
148: return ierr;
149: }
151: /*TEST
153: build:
154: requires: !complex !single
156: test:
157: suffix: hypre
158: nsize: 2
159: requires: hypre
160: args: -ksp_monitor -pc_type hmg -ksp_rtol 1e-6 -hmg_inner_pc_type hypre
162: test:
163: suffix: hypre_bs4
164: nsize: 2
165: requires: hypre
166: args: -ksp_monitor -pc_type hmg -ksp_rtol 1e-6 -hmg_inner_pc_type hypre -bs 4 -pc_hmg_use_subspace_coarsening 1
168: test:
169: suffix: hypre_asm
170: nsize: 2
171: requires: hypre
172: args: -ksp_monitor -pc_type hmg -ksp_rtol 1e-6 -hmg_inner_pc_type hypre -bs 4 -pc_hmg_use_subspace_coarsening 1 -mg_levels_3_pc_type asm
174: test:
175: suffix: hypre_fieldsplit
176: nsize: 2
177: requires: hypre
178: args: -ksp_monitor -pc_type hmg -ksp_rtol 1e-6 -hmg_inner_pc_type hypre -bs 4 -mg_levels_4_pc_type fieldsplit
180: test:
181: suffix: gamg
182: nsize: 2
183: args: -ksp_monitor -pc_type hmg -ksp_rtol 1e-6 -hmg_inner_pc_type gamg
185: test:
186: suffix: gamg_bs4
187: nsize: 2
188: args: -ksp_monitor -pc_type hmg -ksp_rtol 1e-6 -hmg_inner_pc_type gamg -bs 4 -pc_hmg_use_subspace_coarsening 1
190: test:
191: suffix: gamg_asm
192: nsize: 2
193: args: -ksp_monitor -pc_type hmg -ksp_rtol 1e-6 -hmg_inner_pc_type gamg -bs 4 -pc_hmg_use_subspace_coarsening 1 -mg_levels_1_pc_type asm
195: test:
196: suffix: gamg_fieldsplit
197: nsize: 2
198: args: -ksp_monitor -pc_type hmg -ksp_rtol 1e-6 -hmg_inner_pc_type gamg -bs 4 -mg_levels_1_pc_type fieldsplit
200: test:
201: suffix: interface
202: nsize: 2
203: args: -ksp_monitor -ksp_rtol 1e-6 -test_hmg_interface 1 -bs 4
205: test:
206: suffix: reuse
207: nsize: 2
208: args: -ksp_monitor -ksp_rtol 1e-6 -pc_type hmg -pc_hmg_reuse_interpolation 1 -test_reuse_interpolation 1 -hmg_inner_pc_type gamg
210: test:
211: suffix: component
212: nsize: 2
213: args: -ksp_monitor -ksp_rtol 1e-6 -pc_type hmg -pc_hmg_coarsening_component 2 -pc_hmg_use_subspace_coarsening 1 -bs 4 -hmg_inner_pc_type gamg
215: TEST*/