Actual source code: ex67.c
1: static char help[] = "Krylov methods to solve u'' = f in parallel with periodic boundary conditions,\n\
2: with a singular, inconsistent system.\n\n";
4: /*
6: This tests solving singular inconsistent systems with GMRES
8: Default: Solves a symmetric system
9: -symmetric false: Solves a non-symmetric system where nullspace(A) != nullspace(A')
11: -ksp_pc_side left or right
13: See the KSPSolve() for a discussion of when right preconditioning with nullspace(A) != nullspace(A') can fail to produce the
14: norm minimizing solution.
16: Note that though this example does solve the system with right preconditioning and nullspace(A) != nullspace(A') it does not produce the
17: norm minimizing solution, that is the computed solution is not orthogonal to the nullspace(A).
19: Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
20: Include "petscksp.h" so that we can use KSP solvers. Note that this
21: file automatically includes:
22: petscsys.h - base PETSc routines petscvec.h - vectors
23: petscmat.h - matrices
24: petscis.h - index sets petscksp.h - Krylov subspace methods
25: petscviewer.h - viewers petscpc.h - preconditioners
26: petscksp.h - linear solvers
27: */
29: #include <petscdm.h>
30: #include <petscdmda.h>
31: #include <petscsnes.h>
32: #include <petsc/private/kspimpl.h>
34: PetscBool symmetric = PETSC_TRUE;
36: PetscErrorCode FormMatrix(Mat, void *);
37: PetscErrorCode FormRightHandSide(Vec, void *);
39: int main(int argc, char **argv)
40: {
41: KSP ksp;
42: Mat J;
43: DM da;
44: Vec x, r; /* vectors */
45: PetscInt M = 10;
46: MatNullSpace constants, nconstants;
48: PetscFunctionBeginUser;
49: PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
50: PetscCall(PetscOptionsGetInt(NULL, NULL, "-M", &M, NULL));
51: PetscCall(PetscOptionsGetBool(NULL, NULL, "-symmetric", &symmetric, NULL));
53: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
54: Create linear solver context
55: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
57: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
59: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
60: Create vector data structures; set function evaluation routine
61: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
63: /*
64: Create distributed array (DMDA) to manage parallel grid and vectors
65: */
66: PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, M, 1, 2, NULL, &da));
67: PetscCall(DMSetFromOptions(da));
68: PetscCall(DMSetUp(da));
70: /*
71: Extract global and local vectors from DMDA; then duplicate for remaining
72: vectors that are the same types
73: */
74: PetscCall(DMCreateGlobalVector(da, &x));
75: PetscCall(VecDuplicate(x, &r));
77: /*
78: Set function evaluation routine and vector. Whenever the nonlinear
79: solver needs to compute the nonlinear function, it will call this
80: routine.
81: - Note that the final routine argument is the user-defined
82: context that provides application-specific data for the
83: function evaluation routine.
84: */
85: PetscCall(FormRightHandSide(r, da));
87: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
88: Create matrix data structure;
89: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
90: PetscCall(DMCreateMatrix(da, &J));
91: PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, NULL, &constants));
92: if (symmetric) {
93: PetscCall(MatSetOption(J, MAT_SYMMETRIC, PETSC_TRUE));
94: PetscCall(MatSetOption(J, MAT_SYMMETRY_ETERNAL, PETSC_TRUE));
95: } else {
96: Vec n;
97: PetscInt zero = 0;
98: PetscScalar zeros = 0.0;
99: PetscCall(VecDuplicate(x, &n));
100: /* the nullspace(A') is the constant vector but with a zero in the very first entry; hence nullspace(A') != nullspace(A) */
101: PetscCall(VecSet(n, 1.0));
102: PetscCall(VecSetValues(n, 1, &zero, &zeros, INSERT_VALUES));
103: PetscCall(VecAssemblyBegin(n));
104: PetscCall(VecAssemblyEnd(n));
105: PetscCall(VecNormalize(n, NULL));
106: PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_FALSE, 1, &n, &nconstants));
107: PetscCall(MatSetTransposeNullSpace(J, nconstants));
108: PetscCall(MatNullSpaceDestroy(&nconstants));
109: PetscCall(VecDestroy(&n));
110: }
111: PetscCall(MatSetNullSpace(J, constants));
112: PetscCall(FormMatrix(J, da));
114: PetscCall(KSPSetOperators(ksp, J, J));
116: PetscCall(KSPSetFromOptions(ksp));
117: PetscCall(KSPSolve(ksp, r, x));
118: PetscCall(KSPSolveTranspose(ksp, r, x));
120: PetscCall(VecDestroy(&x));
121: PetscCall(VecDestroy(&r));
122: PetscCall(MatDestroy(&J));
123: PetscCall(MatNullSpaceDestroy(&constants));
124: PetscCall(KSPDestroy(&ksp));
125: PetscCall(DMDestroy(&da));
126: PetscCall(PetscFinalize());
127: return 0;
128: }
130: /*
132: This intentionally includes something in the right-hand side that is not in the range of the matrix A.
133: Since MatSetNullSpace() is called and the matrix is symmetric; the Krylov method automatically removes this
134: portion of the right-hand side before solving the linear system.
135: */
136: PetscErrorCode FormRightHandSide(Vec f, void *ctx)
137: {
138: DM da = (DM)ctx;
139: PetscScalar *ff;
140: PetscInt i, M, xs, xm;
141: PetscReal h;
143: PetscFunctionBeginUser;
144: PetscCall(DMDAVecGetArray(da, f, &ff));
146: /*
147: Get local grid boundaries (for 1-dimensional DMDA):
148: xs, xm - starting grid index, width of local grid (no ghost points)
149: */
150: PetscCall(DMDAGetCorners(da, &xs, NULL, NULL, &xm, NULL, NULL));
151: PetscCall(DMDAGetInfo(da, NULL, &M, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
153: /*
154: Compute function over locally owned part of the grid
155: Note the [i-1] and [i+1] will automatically access the ghost points from other processes or the periodic points.
156: */
157: h = 1.0 / M;
158: for (i = xs; i < xs + xm; i++) ff[i] = -PetscSinReal(2.0 * PETSC_PI * i * h) + 1.0;
160: /*
161: Restore vectors
162: */
163: PetscCall(DMDAVecRestoreArray(da, f, &ff));
164: PetscFunctionReturn(PETSC_SUCCESS);
165: }
166: /* ------------------------------------------------------------------- */
167: PetscErrorCode FormMatrix(Mat jac, void *ctx)
168: {
169: PetscScalar A[3];
170: PetscInt i, M, xs, xm;
171: DM da = (DM)ctx;
172: MatStencil row, cols[3];
173: PetscReal h;
175: PetscFunctionBeginUser;
176: PetscCall(DMDAGetCorners(da, &xs, NULL, NULL, &xm, NULL, NULL));
178: /*
179: Get range of locally owned matrix
180: */
181: PetscCall(DMDAGetInfo(da, NULL, &M, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
183: PetscCall(MatZeroEntries(jac));
184: h = 1.0 / M;
185: /* because of periodic boundary conditions we can simply loop over all local nodes and access to the left and right */
186: if (symmetric) {
187: for (i = xs; i < xs + xm; i++) {
188: row.i = i;
189: cols[0].i = i - 1;
190: cols[1].i = i;
191: cols[2].i = i + 1;
192: A[0] = A[2] = 1.0 / (h * h);
193: A[1] = -2.0 / (h * h);
194: PetscCall(MatSetValuesStencil(jac, 1, &row, 3, cols, A, ADD_VALUES));
195: }
196: } else {
197: MatStencil *acols;
198: PetscScalar *avals;
200: /* only works for one process */
201: PetscCall(MatSetOption(jac, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_FALSE));
202: row.i = 0;
203: PetscCall(PetscMalloc1(M, &acols));
204: PetscCall(PetscMalloc1(M, &avals));
205: for (i = 0; i < M; i++) {
206: acols[i].i = i;
207: avals[i] = (i % 2) ? 1 : -1;
208: }
209: PetscCall(MatSetValuesStencil(jac, 1, &row, M, acols, avals, ADD_VALUES));
210: PetscCall(PetscFree(acols));
211: PetscCall(PetscFree(avals));
212: row.i = 1;
213: cols[0].i = -1;
214: cols[1].i = 1;
215: cols[2].i = 1 + 1;
216: A[0] = A[2] = 1.0 / (h * h);
217: A[1] = -2.0 / (h * h);
218: PetscCall(MatSetValuesStencil(jac, 1, &row, 3, cols, A, ADD_VALUES));
219: for (i = 2; i < xs + xm - 1; i++) {
220: row.i = i;
221: cols[0].i = i - 1;
222: cols[1].i = i;
223: cols[2].i = i + 1;
224: A[0] = A[2] = 1.0 / (h * h);
225: A[1] = -2.0 / (h * h);
226: PetscCall(MatSetValuesStencil(jac, 1, &row, 3, cols, A, ADD_VALUES));
227: }
228: row.i = M - 1;
229: cols[0].i = M - 2;
230: cols[1].i = M - 1;
231: cols[2].i = M + 1;
232: A[0] = A[2] = 1.0 / (h * h);
233: A[1] = -2.0 / (h * h);
234: PetscCall(MatSetValuesStencil(jac, 1, &row, 3, cols, A, ADD_VALUES));
235: }
236: PetscCall(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY));
237: PetscCall(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY));
238: PetscFunctionReturn(PETSC_SUCCESS);
239: }
241: /*TEST
243: test:
244: suffix: nonsymmetric_left
245: args: -symmetric false -ksp_view -ksp_converged_reason -pc_type jacobi -mat_no_inode -ksp_monitor_true_residual -ksp_rtol 1.e-14 -ksp_max_it 12 -ksp_pc_side left
246: filter: sed 's/ATOL/RTOL/g'
247: requires: !single
249: test:
250: suffix: nonsymmetric_right
251: args: -symmetric false -ksp_view -ksp_converged_reason -pc_type jacobi -mat_no_inode -ksp_monitor_true_residual -ksp_rtol 1.e-14 -ksp_max_it 12 -ksp_pc_side right
252: filter: sed 's/ATOL/RTOL/g'
253: requires: !single
255: test:
256: suffix: symmetric_left
257: args: -ksp_view -ksp_converged_reason -pc_type sor -mat_no_inode -ksp_monitor_true_residual -ksp_rtol 1.e-14 -ksp_max_it 12 -ksp_pc_side left
258: requires: !single
260: test:
261: suffix: symmetric_right
262: args: -ksp_view -ksp_converged_reason -pc_type sor -mat_no_inode -ksp_monitor_true_residual -ksp_rtol 1.e-14 -ksp_max_it 12 -ksp_pc_side right
263: filter: sed 's/ATOL/RTOL/g'
264: requires: !single
266: test:
267: suffix: transpose_asm
268: args: -symmetric false -ksp_monitor -ksp_view -pc_type asm -sub_pc_type lu -sub_pc_factor_zeropivot 1.e-33 -ksp_converged_reason
269: filter: sed 's/ATOL/RTOL/g'
271: TEST*/