Actual source code: ex87.c
1: #include <petscksp.h>
2: #include <petsc/private/petscimpl.h>
4: static char help[] = "Solves a saddle-point linear system using PCHPDDM.\n\n";
6: static PetscErrorCode MatAndISLoad(const char *prefix, const char *identifier, Mat A, IS is, Mat N, PetscMPIInt size);
8: int main(int argc, char **args)
9: {
10: Vec b, x; /* computed solution and RHS */
11: Mat A[4], aux[2], S; /* linear system matrix */
12: KSP ksp, *subksp; /* linear solver context */
13: PC pc;
14: IS is[2];
15: PetscMPIInt size;
16: PetscInt m, M, n, N, id = 0;
17: PetscViewer viewer;
18: const char *const system[] = {"elasticity", "stokes", "diffusion", "lagrange"};
19: /* "elasticity":
20: * 2D linear elasticity with rubber-like and steel-like material coefficients, i.e., Poisson's ratio \in {0.4999, 0.35} and Young's modulus \in {0.01 GPa, 200.0 GPa}
21: * discretized by order 2 (resp. 0) Lagrange finite elements in displacements (resp. pressure) on a triangle mesh
22: * "stokes":
23: * 2D lid-driven cavity with constant viscosity
24: * discretized by order 2 (resp. 1) Lagrange finite elements, i.e., lowest-order Taylor--Hood finite elements, in velocities (resp. pressure) on a triangle mesh
25: * if the option -empty_A11 is not set (or set to false), a pressure with a zero mean-value is computed
26: * "diffusion":
27: * 2D primal-dual nonsymmetric diffusion equation
28: * discretized by order 2 (resp. 1) Lagrange finite elements in primal (resp. dual) unknowns on a triangle mesh
29: * "lagrange":
30: * 2D linear elasticity with essential boundary conditions imposed through a Lagrange multiplier
31: */
32: char dir[PETSC_MAX_PATH_LEN], prefix[PETSC_MAX_PATH_LEN];
33: PetscBool flg[4] = {PETSC_FALSE, PETSC_FALSE, PETSC_FALSE, PETSC_FALSE};
35: PetscFunctionBeginUser;
36: PetscCall(PetscInitialize(&argc, &args, NULL, help));
37: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
38: PetscCall(PetscOptionsGetEList(NULL, NULL, "-system", system, PETSC_STATIC_ARRAY_LENGTH(system), &id, NULL));
39: if (id == 1) PetscCall(PetscOptionsGetBool(NULL, NULL, "-empty_A11", flg, NULL));
40: if (id != 3) PetscCheck(size == 4, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "This example requires 4 processes");
41: else PetscCheck(id == 3 && size == 2, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "This example requires 2 processes");
42: for (PetscInt i = 0; i < 2; ++i) {
43: PetscCall(MatCreate(PETSC_COMM_WORLD, A + (i ? 3 : 0)));
44: if (id < 2 || (id == 3 && i == 0)) {
45: PetscCall(ISCreate(PETSC_COMM_SELF, is + i));
46: PetscCall(MatCreate(PETSC_COMM_SELF, aux + i));
47: } else {
48: is[i] = NULL;
49: aux[i] = NULL;
50: }
51: }
52: PetscCall(PetscStrncpy(dir, ".", sizeof(dir)));
53: PetscCall(PetscOptionsGetString(NULL, NULL, "-load_dir", dir, sizeof(dir), NULL));
54: /* loading matrices and auxiliary data for the diagonal blocks */
55: PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%s/%s", dir, id == 3 ? "D" : (id == 2 ? "C" : (id == 1 ? "B" : "A"))));
56: PetscCall(MatAndISLoad(prefix, "00", A[0], is[0], aux[0], size));
57: PetscCall(MatAndISLoad(prefix, "11", A[3], is[1], aux[1], size));
58: /* loading the off-diagonal block with a coherent row/column layout */
59: PetscCall(MatCreate(PETSC_COMM_WORLD, A + 2));
60: PetscCall(MatGetLocalSize(A[0], &n, NULL));
61: PetscCall(MatGetSize(A[0], &N, NULL));
62: PetscCall(MatGetLocalSize(A[3], &m, NULL));
63: PetscCall(MatGetSize(A[3], &M, NULL));
64: PetscCall(MatSetSizes(A[2], m, n, M, N));
65: PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%s/%s10.dat", dir, id == 3 ? "D" : (id == 2 ? "C" : (id == 1 ? "B" : "A"))));
66: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, prefix, FILE_MODE_READ, &viewer));
67: PetscCall(MatLoad(A[2], viewer));
68: PetscCall(PetscViewerDestroy(&viewer));
69: if (id != 2) {
70: /* transposing the off-diagonal block */
71: PetscCall(PetscOptionsGetBool(NULL, NULL, "-transpose", flg + 1, NULL));
72: PetscCall(PetscOptionsGetBool(NULL, NULL, "-permute", flg + 2, NULL));
73: PetscCall(PetscOptionsGetBool(NULL, NULL, "-explicit", flg + 3, NULL));
74: if (flg[1]) {
75: if (flg[2]) {
76: PetscCall(MatTranspose(A[2], MAT_INITIAL_MATRIX, A + 1));
77: PetscCall(MatDestroy(A + 2));
78: }
79: if (!flg[3]) PetscCall(MatCreateTranspose(A[2 - flg[2]], A + 1 + flg[2]));
80: else PetscCall(MatTranspose(A[2 - flg[2]], MAT_INITIAL_MATRIX, A + 1 + flg[2]));
81: } else {
82: if (flg[2]) {
83: PetscCall(MatHermitianTranspose(A[2], MAT_INITIAL_MATRIX, A + 1));
84: PetscCall(MatDestroy(A + 2));
85: }
86: if (!flg[3]) PetscCall(MatCreateHermitianTranspose(A[2 - flg[2]], A + 1 + flg[2]));
87: else PetscCall(MatHermitianTranspose(A[2 - flg[2]], MAT_INITIAL_MATRIX, A + 1 + flg[2]));
88: }
89: } else {
90: PetscCall(MatCreate(PETSC_COMM_WORLD, A + 1));
91: PetscCall(MatSetSizes(A[1], n, m, N, M));
92: PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%s/C01.dat", dir));
93: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, prefix, FILE_MODE_READ, &viewer));
94: PetscCall(MatLoad(A[1], viewer));
95: PetscCall(PetscViewerDestroy(&viewer));
96: }
97: if (flg[0]) PetscCall(MatDestroy(A + 3));
98: else {
99: PetscCall(PetscOptionsGetBool(NULL, NULL, "-diagonal_A11", flg, NULL));
100: if (flg[0]) {
101: PetscCall(MatDestroy(A + 3));
102: PetscCall(MatCreateConstantDiagonal(PETSC_COMM_WORLD, m, m, M, M, PETSC_SMALL, A + 3));
103: }
104: }
105: flg[1] = PETSC_FALSE;
106: PetscCall(PetscOptionsGetBool(NULL, NULL, "-all_transpose", flg + 1, NULL));
107: if (flg[1] && flg[2]) {
108: PetscCall(MatTranspose(A[1], MAT_INITIAL_MATRIX, &S));
109: PetscCall(MatDestroy(A + 1));
110: PetscCall(MatCreateHermitianTranspose(S, A + 1));
111: PetscCall(MatDestroy(&S));
112: }
113: /* global coefficient matrix */
114: PetscCall(MatCreateNest(PETSC_COMM_WORLD, 2, NULL, 2, NULL, A, &S));
115: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
116: PetscCall(KSPSetOperators(ksp, S, S));
117: PetscCall(KSPGetPC(ksp, &pc));
118: /* outer preconditioner */
119: PetscCall(PCSetType(pc, PCFIELDSPLIT));
120: PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR));
121: PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL));
122: PetscCall(PCSetFromOptions(pc));
123: PetscCall(PCSetUp(pc));
124: PetscCall(PCFieldSplitGetSubKSP(pc, &n, &subksp));
125: PetscCall(KSPGetPC(subksp[0], &pc));
126: /* inner preconditioner associated to top-left block */
127: #if defined(PETSC_HAVE_HPDDM) && defined(PETSC_HAVE_DYNAMIC_LIBRARIES) && defined(PETSC_USE_SHARED_LIBRARIES)
128: PetscCall(PCSetType(pc, PCHPDDM));
129: PetscCall(PCHPDDMSetAuxiliaryMat(pc, is[0], aux[0], NULL, NULL));
130: #endif
131: PetscCall(PCSetFromOptions(pc));
132: PetscCall(KSPGetPC(subksp[1], &pc));
133: /* inner preconditioner associated to Schur complement, which will be set internally to PCKSP (or PCASM if the Schur complement is centralized on a single process) */
134: #if defined(PETSC_HAVE_HPDDM) && defined(PETSC_HAVE_DYNAMIC_LIBRARIES) && defined(PETSC_USE_SHARED_LIBRARIES)
135: PetscCall(PCSetType(pc, PCHPDDM));
136: if (!flg[0]) PetscCall(PCHPDDMSetAuxiliaryMat(pc, is[1], aux[1], NULL, NULL));
137: #endif
138: PetscCall(PCSetFromOptions(pc));
139: PetscCall(PetscFree(subksp));
140: PetscCall(KSPSetFromOptions(ksp));
141: PetscCall(MatCreateVecs(S, &b, &x));
142: PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%s/rhs_%s.dat", dir, id == 3 ? "D" : (id == 2 ? "C" : (id == 1 ? "B" : "A"))));
143: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, prefix, FILE_MODE_READ, &viewer));
144: PetscCall(VecLoad(b, viewer));
145: PetscCall(PetscViewerDestroy(&viewer));
146: PetscCall(KSPSolve(ksp, b, x));
147: flg[1] = PETSC_FALSE;
148: PetscCall(PetscOptionsGetBool(NULL, NULL, "-viewer", flg + 1, NULL));
149: if (flg[1]) PetscCall(PCView(pc, PETSC_VIEWER_STDOUT_WORLD));
150: flg[1] = PETSC_FALSE;
151: PetscCall(PetscOptionsGetBool(NULL, NULL, "-successive_solves", flg + 1, NULL));
152: if (flg[1]) {
153: KSPConvergedReason reason[2];
154: PetscInt iterations[2];
155: PetscCall(KSPGetConvergedReason(ksp, reason));
156: PetscCall(KSPGetTotalIterations(ksp, iterations));
157: PetscCall(KSPMonitorCancel(ksp));
158: PetscCall(PetscOptionsClearValue(NULL, "-ksp_monitor"));
159: PetscCall(PetscObjectStateIncrease((PetscObject)S));
160: PetscCall(KSPGetPC(ksp, &pc));
161: PetscCall(PCSetUp(pc)); /* update PCFIELDSPLIT submatrices */
162: PetscCall(PCFieldSplitGetSubKSP(pc, &n, &subksp));
163: PetscCall(KSPGetPC(subksp[0], &pc));
164: #if defined(PETSC_HAVE_HPDDM) && defined(PETSC_HAVE_DYNAMIC_LIBRARIES) && defined(PETSC_USE_SHARED_LIBRARIES)
165: PetscCall(PCHPDDMSetAuxiliaryMat(pc, is[0], aux[0], NULL, NULL));
166: #endif
167: PetscCall(PCSetFromOptions(pc));
168: PetscCall(KSPGetPC(subksp[1], &pc));
169: #if defined(PETSC_HAVE_HPDDM) && defined(PETSC_HAVE_DYNAMIC_LIBRARIES) && defined(PETSC_USE_SHARED_LIBRARIES)
170: PetscCall(PCSetType(pc, PCHPDDM)); /* may have been set to PCKSP internally (or PCASM if the Schur complement is centralized on a single process), so need to enforce the proper PCType */
171: if (!flg[0]) PetscCall(PCHPDDMSetAuxiliaryMat(pc, is[1], aux[1], NULL, NULL));
172: #endif
173: PetscCall(PCSetFromOptions(pc));
174: PetscCall(PetscFree(subksp));
175: PetscCall(KSPSolve(ksp, b, x));
176: PetscCall(KSPGetConvergedReason(ksp, reason + 1));
177: PetscCall(KSPGetTotalIterations(ksp, iterations + 1));
178: iterations[1] -= iterations[0];
179: PetscCheck(reason[0] == reason[1] && PetscAbs(iterations[0] - iterations[1]) <= 3, PetscObjectComm((PetscObject)ksp), PETSC_ERR_PLIB, "Successive calls to KSPSolve() did not converge for the same reason (%s v. %s) or with the same number of iterations (+/- 3, %" PetscInt_FMT " v. %" PetscInt_FMT ")", KSPConvergedReasons[reason[0]], KSPConvergedReasons[reason[1]], iterations[0], iterations[1]);
180: }
181: PetscCall(VecDestroy(&x));
182: PetscCall(VecDestroy(&b));
183: PetscCall(KSPDestroy(&ksp));
184: PetscCall(MatDestroy(&S));
185: PetscCall(MatDestroy(A + 1));
186: PetscCall(MatDestroy(A + 2));
187: for (PetscInt i = 0; i < 2; ++i) {
188: PetscCall(MatDestroy(A + (i ? 3 : 0)));
189: PetscCall(MatDestroy(aux + i));
190: PetscCall(ISDestroy(is + i));
191: }
192: PetscCall(PetscFinalize());
193: return 0;
194: }
196: PetscErrorCode MatAndISLoad(const char *prefix, const char *identifier, Mat A, IS is, Mat aux, PetscMPIInt size)
197: {
198: Mat tmp[3];
199: IS sizes;
200: const PetscInt *idx;
201: PetscInt m;
202: PetscLayout map;
203: PetscViewer viewer;
204: char name[PETSC_MAX_PATH_LEN];
206: PetscFunctionBeginUser;
207: PetscCall(PetscSNPrintf(name, sizeof(name), "%s%s_sizes_%d.dat", prefix, identifier, size));
208: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
209: PetscCall(ISCreate(PETSC_COMM_WORLD, &sizes));
210: PetscCall(ISLoad(sizes, viewer));
211: PetscCall(ISSetBlockSize(sizes, is && aux ? 5 : 4)); /* not mandatory but useful to check for proper sizes */
212: PetscCall(ISGetIndices(sizes, &idx));
213: PetscCall(MatSetSizes(A, idx[0], idx[1], idx[2], idx[3]));
214: if (is && aux) {
215: PetscCall(MatCreate(PETSC_COMM_WORLD, tmp));
216: PetscCall(MatSetSizes(tmp[0], idx[4], idx[4], PETSC_DETERMINE, PETSC_DETERMINE));
217: PetscCall(MatSetUp(tmp[0]));
218: }
219: PetscCall(ISRestoreIndices(sizes, &idx));
220: PetscCall(ISDestroy(&sizes));
221: PetscCall(PetscViewerDestroy(&viewer));
222: PetscCall(PetscSNPrintf(name, sizeof(name), "%s%s.dat", prefix, identifier));
223: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
224: PetscCall(MatLoad(A, viewer));
225: PetscCall(PetscViewerDestroy(&viewer));
226: if (is && aux) {
227: PetscCall(ISCreate(PETSC_COMM_WORLD, &sizes));
228: PetscCall(MatGetLayouts(tmp[0], &map, NULL));
229: PetscCall(ISSetLayout(sizes, map));
230: PetscCall(PetscSNPrintf(name, sizeof(name), "%s%s_is_%d.dat", prefix, identifier, size));
231: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
232: PetscCall(ISLoad(sizes, viewer));
233: PetscCall(ISGetLocalSize(sizes, &m));
234: PetscCall(ISGetIndices(sizes, &idx));
235: PetscCall(ISSetType(is, ISGENERAL));
236: PetscCall(ISGeneralSetIndices(is, m, idx, PETSC_COPY_VALUES));
237: PetscCall(ISRestoreIndices(sizes, &idx));
238: PetscCall(ISDestroy(&sizes));
239: PetscCall(PetscViewerDestroy(&viewer));
240: PetscCall(PetscSNPrintf(name, sizeof(name), "%s%s_aux_%d.dat", prefix, identifier, size));
241: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
242: PetscCall(MatLoad(tmp[0], viewer));
243: PetscCall(PetscViewerDestroy(&viewer));
244: PetscCall(MatGetDiagonalBlock(tmp[0], tmp + 1));
245: PetscCall(MatDuplicate(tmp[1], MAT_COPY_VALUES, tmp + 2));
246: PetscCall(MatHeaderReplace(aux, tmp + 2));
247: PetscCall(MatDestroy(tmp));
248: }
249: PetscFunctionReturn(PETSC_SUCCESS);
250: }
252: /*TEST
254: testset:
255: requires: datafilespath hpddm slepc double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
256: nsize: 4
257: args: -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -ksp_monitor -ksp_rtol 1e-4 -fieldsplit_ksp_max_it 100 -fieldsplit_pc_hpddm_levels_1_eps_nev 10 -fieldsplit_pc_hpddm_levels_1_st_share_sub_ksp -fieldsplit_pc_hpddm_has_neumann -fieldsplit_pc_hpddm_define_subdomains -fieldsplit_1_pc_hpddm_schur_precondition geneo -fieldsplit_pc_hpddm_coarse_pc_type redundant -fieldsplit_pc_hpddm_coarse_redundant_pc_type cholesky -fieldsplit_pc_hpddm_levels_1_sub_pc_type lu -fieldsplit_ksp_type fgmres -ksp_type fgmres -ksp_max_it 10 -fieldsplit_1_pc_hpddm_coarse_correction balanced -fieldsplit_1_pc_hpddm_levels_1_eps_gen_non_hermitian -fieldsplit_1_pc_hpddm_coarse_p 2
258: test:
259: requires: mumps
260: suffix: 1
261: args: -viewer -system {{elasticity stokes}separate output} -fieldsplit_1_pc_hpddm_ksp_pc_side left -fieldsplit_1_pc_hpddm_levels_1_sub_mat_mumps_icntl_26 1
262: filter: grep -v -e "action of " -e " " -e "block size" -e "total: nonzeros=" -e "using I-node" -e "aij" -e "transpose" -e "diagonal" -e "total number of" -e " rows="
263: test:
264: requires: mumps
265: suffix: 2
266: output_file: output/ex87_1_system-stokes.out
267: args: -viewer -system stokes -empty_A11 -transpose {{false true}shared output} -permute {{false true}shared output} -fieldsplit_1_pc_hpddm_ksp_pc_side right -fieldsplit_1_pc_hpddm_coarse_mat_type baij -fieldsplit_1_pc_hpddm_levels_1_sub_mat_mumps_icntl_26 1 -explicit {{false true}shared output}
268: filter: grep -v -e "action of " -e " " -e "block size" -e "total: nonzeros=" -e "using I-node" -e "aij" -e "transpose" -e "diagonal" -e "total number of" -e " rows=" | sed -e "s/ right preconditioning/ left preconditioning/g" -e "s/ using UNPRECONDITIONED/ using PRECONDITIONED/g"
269: test:
270: suffix: 1_petsc
271: args: -system {{elasticity stokes}separate output} -fieldsplit_1_pc_hpddm_ksp_pc_side left -fieldsplit_1_pc_hpddm_levels_1_sub_pc_factor_mat_solver_type petsc -fieldsplit_1_pc_hpddm_levels_1_eps_threshold 0.3 -permute
272: test:
273: suffix: 2_petsc
274: output_file: output/ex87_1_petsc_system-stokes.out
275: args: -system stokes -empty_A11 -transpose -fieldsplit_1_pc_hpddm_ksp_pc_side right -fieldsplit_1_pc_hpddm_levels_1_sub_pc_factor_mat_solver_type petsc -fieldsplit_1_pc_hpddm_coarse_mat_type baij -fieldsplit_1_pc_hpddm_levels_1_eps_threshold 0.3 -fieldsplit_1_pc_hpddm_levels_1_sub_pc_factor_shift_type inblocks -successive_solves
276: filter: sed -e "s/type: transpose/type: hermitiantranspose/g"
277: test:
278: suffix: threshold
279: requires: !defined(PETSC_HAVE_MKL_SPARSE_SP2M_FEATURE)
280: output_file: output/ex87_1_petsc_system-elasticity.out
281: args: -fieldsplit_1_pc_hpddm_ksp_pc_side left -fieldsplit_1_pc_hpddm_levels_1_eps_threshold 0.2 -fieldsplit_1_pc_hpddm_coarse_mat_type {{baij sbaij}shared output} -successive_solves
282: testset:
283: requires: datafilespath hpddm slepc double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
284: nsize: 4
285: args: -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -ksp_monitor -ksp_rtol 1e-4 -fieldsplit_ksp_max_it 100 -fieldsplit_pc_hpddm_levels_1_st_share_sub_ksp -fieldsplit_pc_hpddm_define_subdomains -fieldsplit_1_pc_hpddm_schur_precondition geneo -fieldsplit_pc_hpddm_coarse_pc_type redundant -fieldsplit_pc_hpddm_coarse_redundant_pc_type cholesky -fieldsplit_pc_hpddm_levels_1_sub_pc_type lu -fieldsplit_ksp_type fgmres -ksp_type fgmres -ksp_max_it 10 -fieldsplit_1_pc_hpddm_coarse_correction balanced -fieldsplit_1_pc_hpddm_levels_1_eps_gen_non_hermitian -fieldsplit_1_pc_hpddm_coarse_p 2 -system stokes -fieldsplit_1_pc_hpddm_ksp_pc_side left -fieldsplit_1_pc_hpddm_levels_1_sub_pc_factor_mat_solver_type petsc -fieldsplit_1_pc_hpddm_levels_1_eps_threshold 0.3
286: test:
287: suffix: diagonal
288: output_file: output/ex87_1_petsc_system-stokes.out
289: args: -fieldsplit_pc_hpddm_levels_1_eps_nev 10 -fieldsplit_0_pc_hpddm_has_neumann -diagonal_A11 {{false true}shared output}
290: test:
291: suffix: harmonic_overlap_2
292: output_file: output/ex87_1_petsc_system-stokes.out
293: args: -fieldsplit_0_pc_hpddm_harmonic_overlap 2 -fieldsplit_0_pc_hpddm_levels_1_svd_nsv 20 -diagonal_A11 -permute {{false true}shared output} -all_transpose
295: test:
296: requires: datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) !hpddm !memkind
297: nsize: 4
298: suffix: selfp
299: output_file: output/ex41_1.out
300: filter: grep -v "CONVERGED_RTOL iterations"
301: args: -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -system stokes -ksp_rtol 1e-4 -ksp_converged_reason -ksp_max_it 30 -pc_type fieldsplit -pc_fieldsplit_type schur -fieldsplit_ksp_type preonly -pc_fieldsplit_schur_precondition selfp -fieldsplit_pc_type bjacobi -fieldsplit_sub_pc_type lu -transpose {{false true}shared output} -fieldsplit_1_mat_schur_complement_ainv_type lump
303: test:
304: requires: datafilespath hpddm slepc double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
305: nsize: 4
306: suffix: nonsymmetric_least_squares
307: output_file: output/ex41_1.out
308: filter: grep -v "CONVERGED_RTOL iterations"
309: args: -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -system diffusion -ksp_rtol 1e-4 -ksp_converged_reason -ksp_max_it 20 -pc_type fieldsplit -pc_fieldsplit_type schur -fieldsplit_ksp_type preonly -fieldsplit_0_pc_type jacobi -prefix_push fieldsplit_1_ -pc_hpddm_schur_precondition least_squares -pc_hpddm_define_subdomains -prefix_push pc_hpddm_levels_1_ -sub_pc_type lu -sub_pc_factor_shift_type nonzero -eps_nev 5 -st_share_sub_ksp -prefix_pop -prefix_pop
311: test:
312: requires: datafilespath hpddm slepc double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
313: nsize: 2
314: suffix: lagrange
315: output_file: output/ex41_1.out
316: filter: grep -v "CONVERGED_RTOL iterations"
317: args: -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -ksp_rtol 1e-4 -fieldsplit_ksp_max_it 100 -fieldsplit_0_pc_hpddm_has_neumann -fieldsplit_0_pc_hpddm_levels_1_eps_nev 10 -fieldsplit_0_pc_hpddm_levels_1_st_share_sub_ksp -fieldsplit_0_pc_hpddm_define_subdomains -fieldsplit_1_pc_hpddm_schur_precondition geneo -fieldsplit_0_pc_hpddm_coarse_pc_type redundant -fieldsplit_0_pc_hpddm_coarse_redundant_pc_type cholesky -fieldsplit_0_pc_hpddm_levels_1_sub_pc_type lu -fieldsplit_ksp_type fgmres -ksp_type fgmres -ksp_max_it 10 -system lagrange -transpose {{false true}shared output} -successive_solves
319: TEST*/