Actual source code: ex76.c
1: #include <petscksp.h>
2: #include <petsc/private/petscimpl.h>
4: static char help[] = "Solves a linear system using PCHPDDM.\n\n";
6: int main(int argc, char **args)
7: {
8: Vec b; /* computed solution and RHS */
9: Mat A, aux, X, B; /* linear system matrix */
10: KSP ksp; /* linear solver context */
11: PC pc;
12: IS is, sizes;
13: const PetscInt *idx;
14: PetscMPIInt rank, size;
15: PetscInt m, N = 1;
16: PetscLayout map;
17: PetscViewer viewer;
18: char dir[PETSC_MAX_PATH_LEN], name[PETSC_MAX_PATH_LEN], type[256];
19: PetscBool3 share = PETSC_BOOL3_UNKNOWN;
20: PetscBool flg, set, transpose = PETSC_FALSE;
22: PetscFunctionBeginUser;
23: PetscCall(PetscInitialize(&argc, &args, NULL, help));
24: PetscCall(PetscLogDefaultBegin());
25: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
26: PetscCheck(size == 4, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "This example requires 4 processes");
27: PetscCall(PetscOptionsGetInt(NULL, NULL, "-rhs", &N, NULL));
28: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
29: PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
30: PetscCall(PetscStrncpy(dir, ".", sizeof(dir)));
31: PetscCall(PetscOptionsGetString(NULL, NULL, "-load_dir", dir, sizeof(dir), NULL));
32: /* loading matrices */
33: PetscCall(PetscSNPrintf(name, sizeof(name), "%s/sizes_%d.dat", dir, size));
34: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
35: PetscCall(ISCreate(PETSC_COMM_WORLD, &sizes));
36: PetscCall(ISLoad(sizes, viewer));
37: PetscCall(ISGetIndices(sizes, &idx));
38: PetscCall(MatSetSizes(A, idx[0], idx[1], idx[2], idx[3]));
39: PetscCall(MatCreate(PETSC_COMM_WORLD, &X));
40: PetscCall(MatSetSizes(X, idx[4], idx[4], PETSC_DETERMINE, PETSC_DETERMINE));
41: PetscCall(MatSetUp(X));
42: PetscCall(ISRestoreIndices(sizes, &idx));
43: PetscCall(ISDestroy(&sizes));
44: PetscCall(PetscViewerDestroy(&viewer));
45: PetscCall(PetscSNPrintf(name, sizeof(name), "%s/A.dat", dir));
46: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
47: PetscCall(MatLoad(A, viewer));
48: PetscCall(PetscViewerDestroy(&viewer));
49: PetscCall(PetscSNPrintf(name, sizeof(name), "%s/is_%d.dat", dir, size));
50: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
51: PetscCall(ISCreate(PETSC_COMM_WORLD, &sizes));
52: PetscCall(MatGetLayouts(X, &map, NULL));
53: PetscCall(ISSetLayout(sizes, map));
54: PetscCall(ISLoad(sizes, viewer));
55: PetscCall(ISGetLocalSize(sizes, &m));
56: PetscCall(ISGetIndices(sizes, &idx));
57: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m, idx, PETSC_COPY_VALUES, &is));
58: PetscCall(ISRestoreIndices(sizes, &idx));
59: PetscCall(ISDestroy(&sizes));
60: PetscCall(ISSetBlockSize(is, 2));
61: PetscCall(PetscViewerDestroy(&viewer));
62: PetscCall(PetscSNPrintf(name, sizeof(name), "%s/Neumann_%d.dat", dir, size));
63: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
64: PetscCall(MatLoad(X, viewer));
65: PetscCall(PetscViewerDestroy(&viewer));
66: PetscCall(MatGetDiagonalBlock(X, &B));
67: PetscCall(MatDuplicate(B, MAT_COPY_VALUES, &aux));
68: PetscCall(MatDestroy(&X));
69: flg = PETSC_FALSE;
70: PetscCall(PetscOptionsGetBool(NULL, NULL, "-pc_hpddm_levels_1_st_share_sub_ksp", &flg, &set));
71: if (flg) { /* PETSc LU/Cholesky is struggling numerically for bs > 1 */
72: /* only set the proper bs for the geneo_share_* tests, 1 otherwise */
73: PetscCall(MatSetBlockSizesFromMats(aux, A, A));
74: share = PETSC_BOOL3_TRUE;
75: } else if (set) share = PETSC_BOOL3_FALSE;
76: PetscCall(MatSetOption(A, MAT_SYMMETRIC, PETSC_TRUE));
77: PetscCall(MatSetOption(aux, MAT_SYMMETRIC, PETSC_TRUE));
78: /* ready for testing */
79: PetscOptionsBegin(PETSC_COMM_WORLD, "", "", "");
80: PetscCall(PetscStrncpy(type, MATAIJ, sizeof(type)));
81: PetscCall(PetscOptionsFList("-mat_type", "Matrix type", "MatSetType", MatList, type, type, 256, &flg));
82: PetscOptionsEnd();
83: PetscCall(MatConvert(A, type, MAT_INPLACE_MATRIX, &A));
84: PetscCall(MatConvert(aux, type, MAT_INPLACE_MATRIX, &aux));
85: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
86: PetscCall(KSPSetOperators(ksp, A, A));
87: PetscCall(KSPGetPC(ksp, &pc));
88: PetscCall(PCSetType(pc, PCHPDDM));
89: #if defined(PETSC_HAVE_HPDDM) && defined(PETSC_HAVE_DYNAMIC_LIBRARIES) && defined(PETSC_USE_SHARED_LIBRARIES)
90: flg = PETSC_FALSE;
91: PetscCall(PetscOptionsGetBool(NULL, NULL, "-reset", &flg, NULL));
92: if (flg) {
93: PetscCall(PetscOptionsSetValue(NULL, "-pc_hpddm_block_splitting", "true"));
94: PetscCall(PCSetFromOptions(pc));
95: PetscCall(PCSetUp(pc));
96: PetscCall(PetscOptionsClearValue(NULL, "-pc_hpddm_block_splitting"));
97: }
98: PetscCall(PCHPDDMSetAuxiliaryMat(pc, is, aux, NULL, NULL));
99: PetscCall(PCHPDDMHasNeumannMat(pc, PETSC_FALSE)); /* PETSC_TRUE is fine as well, just testing */
100: if (share == PETSC_BOOL3_UNKNOWN) PetscCall(PCHPDDMSetSTShareSubKSP(pc, PetscBool3ToBool(share)));
101: flg = PETSC_FALSE;
102: PetscCall(PetscOptionsGetBool(NULL, NULL, "-set_rhs", &flg, NULL));
103: if (flg) { /* user-provided RHS for concurrent generalized eigenvalue problems */
104: Mat a, c, P; /* usually assembled automatically in PCHPDDM, this is solely for testing PCHPDDMSetRHSMat() */
105: PetscInt rstart, rend, location;
107: PetscCall(MatDuplicate(aux, MAT_DO_NOT_COPY_VALUES, &B)); /* duplicate so that MatStructure is SAME_NONZERO_PATTERN */
108: PetscCall(MatGetDiagonalBlock(A, &a));
109: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
110: PetscCall(ISGetLocalSize(is, &m));
111: PetscCall(MatCreateSeqAIJ(PETSC_COMM_SELF, rend - rstart, m, 1, NULL, &P));
112: for (m = rstart; m < rend; ++m) {
113: PetscCall(ISLocate(is, m, &location));
114: PetscCheck(location >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "IS of the auxiliary Mat does not include all local rows of A");
115: PetscCall(MatSetValue(P, m - rstart, location, 1.0, INSERT_VALUES));
116: }
117: PetscCall(MatAssemblyBegin(P, MAT_FINAL_ASSEMBLY));
118: PetscCall(MatAssemblyEnd(P, MAT_FINAL_ASSEMBLY));
119: PetscCall(PetscObjectTypeCompare((PetscObject)a, MATSEQAIJ, &flg));
120: if (flg) PetscCall(MatPtAP(a, P, MAT_INITIAL_MATRIX, 1.0, &X)); // MatPtAP() is used to extend diagonal blocks with zeros on the overlap
121: else { // workaround for MatPtAP() limitations with some types
122: PetscCall(MatConvert(a, MATSEQAIJ, MAT_INITIAL_MATRIX, &c));
123: PetscCall(MatPtAP(c, P, MAT_INITIAL_MATRIX, 1.0, &X));
124: PetscCall(MatDestroy(&c));
125: }
126: PetscCall(MatDestroy(&P));
127: PetscCall(MatAXPY(B, 1.0, X, SUBSET_NONZERO_PATTERN));
128: PetscCall(MatDestroy(&X));
129: PetscCall(MatSetOption(B, MAT_SYMMETRIC, PETSC_TRUE));
130: PetscCall(PCHPDDMSetRHSMat(pc, B));
131: PetscCall(MatDestroy(&B));
132: }
133: #else
134: (void)share;
135: #endif
136: PetscCall(MatDestroy(&aux));
137: PetscCall(KSPSetFromOptions(ksp));
138: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &flg));
139: if (flg) {
140: flg = PETSC_FALSE;
141: PetscCall(PetscOptionsGetBool(NULL, NULL, "-pc_hpddm_define_subdomains", &flg, NULL));
142: if (flg) {
143: IS rows;
145: PetscCall(MatGetOwnershipIS(A, &rows, NULL));
146: PetscCall(PCASMSetLocalSubdomains(pc, 1, &is, &rows));
147: PetscCall(ISDestroy(&rows));
148: }
149: }
150: PetscCall(ISDestroy(&is));
151: PetscCall(MatCreateVecs(A, NULL, &b));
152: PetscCall(VecSet(b, 1.0));
153: PetscCall(PetscOptionsGetBool(NULL, NULL, "-transpose", &transpose, NULL));
154: if (!transpose) PetscCall(KSPSolve(ksp, b, b));
155: else PetscCall(KSPSolveTranspose(ksp, b, b));
156: PetscCall(VecGetLocalSize(b, &m));
157: PetscCall(VecDestroy(&b));
158: if (N > 1) {
159: KSPType type;
161: PetscCall(PetscOptionsClearValue(NULL, "-ksp_converged_reason"));
162: PetscCall(KSPSetFromOptions(ksp));
163: PetscCall(MatCreateDense(PETSC_COMM_WORLD, m, PETSC_DECIDE, PETSC_DECIDE, N, NULL, &B));
164: PetscCall(MatCreateDense(PETSC_COMM_WORLD, m, PETSC_DECIDE, PETSC_DECIDE, N, NULL, &X));
165: PetscCall(MatSetRandom(B, NULL));
166: /* this is algorithmically optimal in the sense that blocks of vectors are coarsened or interpolated using matrix--matrix operations */
167: /* PCHPDDM however heavily relies on MPI[S]BAIJ format for which there is no efficient MatProduct implementation */
168: if (!transpose) PetscCall(KSPMatSolve(ksp, B, X));
169: else PetscCall(KSPMatSolveTranspose(ksp, B, X));
170: PetscCall(KSPGetType(ksp, &type));
171: PetscCall(PetscStrcmp(type, KSPHPDDM, &flg));
172: #if defined(PETSC_HAVE_HPDDM)
173: if (flg) {
174: PetscReal norm;
175: KSPHPDDMType type;
177: PetscCall(KSPHPDDMGetType(ksp, &type));
178: if (type == KSP_HPDDM_TYPE_PREONLY || type == KSP_HPDDM_TYPE_CG || type == KSP_HPDDM_TYPE_GMRES || type == KSP_HPDDM_TYPE_GCRODR) {
179: Mat C;
181: PetscCall(MatDuplicate(X, MAT_DO_NOT_COPY_VALUES, &C));
182: PetscCall(KSPSetMatSolveBatchSize(ksp, 1));
183: if (!transpose) PetscCall(KSPMatSolve(ksp, B, C));
184: else PetscCall(KSPMatSolveTranspose(ksp, B, C));
185: PetscCall(MatAYPX(C, -1.0, X, SAME_NONZERO_PATTERN));
186: PetscCall(MatNorm(C, NORM_INFINITY, &norm));
187: PetscCall(MatDestroy(&C));
188: PetscCheck(norm <= 100 * PETSC_MACHINE_EPSILON, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "KSPMatSolve%s() and KSPSolve%s() difference has nonzero norm %g with pseudo-block KSPHPDDMType %s", (transpose ? "Transpose" : ""), (transpose ? "Transpose" : ""), (double)norm, KSPHPDDMTypes[type]);
189: }
190: }
191: #endif
192: PetscCall(MatDestroy(&X));
193: PetscCall(MatDestroy(&B));
194: }
195: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCHPDDM, &flg));
196: #if defined(PETSC_HAVE_HPDDM) && defined(PETSC_HAVE_DYNAMIC_LIBRARIES) && defined(PETSC_USE_SHARED_LIBRARIES)
197: if (flg) PetscCall(PCHPDDMGetSTShareSubKSP(pc, &flg));
198: #endif
199: if (flg && PetscDefined(USE_LOG)) {
200: PetscCall(PetscOptionsHasName(NULL, NULL, "-pc_hpddm_harmonic_overlap", &flg));
201: if (!flg) {
202: PetscLogEvent event;
203: PetscEventPerfInfo info1, info2;
205: PetscCall(PetscLogEventRegister("MatLUFactorSym", PC_CLASSID, &event));
206: PetscCall(PetscLogEventGetPerfInfo(PETSC_DETERMINE, event, &info1));
207: PetscCall(PetscLogEventRegister("MatLUFactorNum", PC_CLASSID, &event));
208: PetscCall(PetscLogEventGetPerfInfo(PETSC_DETERMINE, event, &info2));
209: if (!info1.count && !info2.count) {
210: PetscCall(PetscLogEventRegister("MatCholFctrSym", PC_CLASSID, &event));
211: PetscCall(PetscLogEventGetPerfInfo(PETSC_DETERMINE, event, &info1));
212: PetscCall(PetscLogEventRegister("MatCholFctrNum", PC_CLASSID, &event));
213: PetscCall(PetscLogEventGetPerfInfo(PETSC_DETERMINE, event, &info2));
214: PetscCheck(info2.count > info1.count, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Cholesky numerical factorization (%d) not called more times than Cholesky symbolic factorization (%d), broken -pc_hpddm_levels_1_st_share_sub_ksp", info2.count, info1.count);
215: } else PetscCheck(info2.count > info1.count, PETSC_COMM_SELF, PETSC_ERR_PLIB, "LU numerical factorization (%d) not called more times than LU symbolic factorization (%d), broken -pc_hpddm_levels_1_st_share_sub_ksp", info2.count, info1.count);
216: }
217: }
218: #if defined(PETSC_HAVE_HPDDM) && defined(PETSC_HAVE_DYNAMIC_LIBRARIES) && defined(PETSC_USE_SHARED_LIBRARIES)
219: if (N == 1) {
220: flg = PETSC_FALSE;
221: PetscCall(PetscOptionsGetBool(NULL, NULL, "-successive_solves", &flg, NULL));
222: if (flg) {
223: KSPConvergedReason reason[2];
224: PetscInt iterations[3];
226: PetscCall(KSPGetConvergedReason(ksp, reason));
227: PetscCall(KSPGetTotalIterations(ksp, iterations));
228: PetscCall(PetscOptionsClearValue(NULL, "-ksp_converged_reason"));
229: PetscCall(KSPSetFromOptions(ksp));
230: flg = PETSC_FALSE;
231: PetscCall(PetscOptionsGetBool(NULL, NULL, "-pc_hpddm_block_splitting", &flg, NULL));
232: if (!flg) {
233: PetscCall(PetscSNPrintf(name, sizeof(name), "%s/sizes_%d.dat", dir, size));
234: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
235: PetscCall(ISCreate(PETSC_COMM_WORLD, &sizes));
236: PetscCall(ISLoad(sizes, viewer));
237: PetscCall(ISGetIndices(sizes, &idx));
238: PetscCall(MatCreate(PETSC_COMM_WORLD, &X));
239: PetscCall(MatSetSizes(X, idx[4], idx[4], PETSC_DETERMINE, PETSC_DETERMINE));
240: PetscCall(MatSetUp(X));
241: PetscCall(ISRestoreIndices(sizes, &idx));
242: PetscCall(ISDestroy(&sizes));
243: PetscCall(PetscViewerDestroy(&viewer));
244: PetscCall(PetscSNPrintf(name, sizeof(name), "%s/is_%d.dat", dir, size));
245: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
246: PetscCall(ISCreate(PETSC_COMM_WORLD, &sizes));
247: PetscCall(MatGetLayouts(X, &map, NULL));
248: PetscCall(ISSetLayout(sizes, map));
249: PetscCall(ISLoad(sizes, viewer));
250: PetscCall(ISGetLocalSize(sizes, &m));
251: PetscCall(ISGetIndices(sizes, &idx));
252: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m, idx, PETSC_COPY_VALUES, &is));
253: PetscCall(ISRestoreIndices(sizes, &idx));
254: PetscCall(ISDestroy(&sizes));
255: PetscCall(ISSetBlockSize(is, 2));
256: PetscCall(PetscViewerDestroy(&viewer));
257: PetscCall(PetscSNPrintf(name, sizeof(name), "%s/Neumann_%d.dat", dir, size));
258: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
259: PetscCall(MatLoad(X, viewer));
260: PetscCall(PetscViewerDestroy(&viewer));
261: PetscCall(MatGetDiagonalBlock(X, &B));
262: PetscCall(MatDuplicate(B, MAT_COPY_VALUES, &aux));
263: PetscCall(MatDestroy(&X));
264: PetscCall(MatSetBlockSizesFromMats(aux, A, A));
265: PetscCall(MatSetOption(aux, MAT_SYMMETRIC, PETSC_TRUE));
266: PetscCall(MatConvert(aux, type, MAT_INPLACE_MATRIX, &aux));
267: }
268: PetscCall(MatCreateVecs(A, NULL, &b));
269: PetscCall(PetscObjectStateIncrease((PetscObject)A));
270: if (!flg) PetscCall(PCHPDDMSetAuxiliaryMat(pc, NULL, aux, NULL, NULL));
271: PetscCall(VecSet(b, 1.0));
272: if (!transpose) PetscCall(KSPSolve(ksp, b, b));
273: else PetscCall(KSPSolveTranspose(ksp, b, b));
274: PetscCall(KSPGetConvergedReason(ksp, reason + 1));
275: PetscCall(KSPGetTotalIterations(ksp, iterations + 1));
276: iterations[1] -= iterations[0];
277: PetscCheck(reason[0] == reason[1] && PetscAbs(iterations[0] - iterations[1]) <= 3, PetscObjectComm((PetscObject)ksp), PETSC_ERR_PLIB, "Successive calls to KSPSolve%s() did not converge for the same reason (%s v. %s) or with the same number of iterations (+/- 3, %" PetscInt_FMT " v. %" PetscInt_FMT ")", (transpose ? "Transpose" : ""), KSPConvergedReasons[reason[0]], KSPConvergedReasons[reason[1]], iterations[0], iterations[1]);
278: PetscCall(PetscObjectStateIncrease((PetscObject)A));
279: if (!flg) PetscCall(PCHPDDMSetAuxiliaryMat(pc, is, aux, NULL, NULL));
280: PetscCall(PCSetFromOptions(pc));
281: PetscCall(VecSet(b, 1.0));
282: if (!transpose) PetscCall(KSPSolve(ksp, b, b));
283: else PetscCall(KSPSolveTranspose(ksp, b, b));
284: PetscCall(KSPGetConvergedReason(ksp, reason + 1));
285: PetscCall(KSPGetTotalIterations(ksp, iterations + 2));
286: iterations[2] -= iterations[0] + iterations[1];
287: PetscCheck(reason[0] == reason[1] && PetscAbs(iterations[0] - iterations[2]) <= 3, PetscObjectComm((PetscObject)ksp), PETSC_ERR_PLIB, "Successive calls to KSPSolve%s() did not converge for the same reason (%s v. %s) or with the same number of iterations (+/- 3, %" PetscInt_FMT " v. %" PetscInt_FMT ")", (transpose ? "Transpose" : ""), KSPConvergedReasons[reason[0]], KSPConvergedReasons[reason[1]], iterations[0], iterations[2]);
288: PetscCall(VecDestroy(&b));
289: PetscCall(ISDestroy(&is));
290: PetscCall(MatDestroy(&aux));
291: }
292: }
293: PetscCall(PetscOptionsGetBool(NULL, NULL, "-viewer", &flg, NULL));
294: if (flg) {
295: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCHPDDM, &flg));
296: if (flg) {
297: PetscCall(PetscStrncpy(dir, "XXXXXX", sizeof(dir)));
298: if (rank == 0) PetscCall(PetscMkdtemp(dir));
299: PetscCallMPI(MPI_Bcast(dir, 6, MPI_CHAR, 0, PETSC_COMM_WORLD));
300: for (PetscInt i = 0; i < 2; ++i) {
301: PetscCall(PetscSNPrintf(name, sizeof(name), "%s/%s", dir, i == 0 ? "A" : "A.dat"));
302: PetscCall(PetscViewerASCIIOpen(PETSC_COMM_WORLD, name, &viewer));
303: PetscCall(PetscViewerPushFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL));
304: PetscCall(PCView(pc, viewer));
305: PetscCall(PetscViewerPopFormat(viewer));
306: PetscCall(PetscViewerDestroy(&viewer));
307: }
308: PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
309: if (rank == 0) PetscCall(PetscRMTree(dir));
310: }
311: }
312: #endif
313: PetscCall(KSPDestroy(&ksp));
314: PetscCall(MatDestroy(&A));
315: PetscCall(PetscFinalize());
316: return 0;
317: }
319: /*TEST
321: test:
322: requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
323: nsize: 4
324: args: -ksp_rtol 1e-3 -ksp_converged_reason -pc_type {{bjacobi hpddm}shared output} -pc_hpddm_coarse_sub_pc_type lu -sub_pc_type lu -options_left no -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO
326: testset:
327: requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
328: suffix: define_subdomains
329: nsize: 4
330: args: -ksp_rtol 1e-3 -ksp_converged_reason -pc_hpddm_define_subdomains -options_left no -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO
331: test:
332: args: -pc_type {{asm hpddm}shared output} -pc_hpddm_coarse_sub_pc_type lu -sub_pc_type lu -viewer
333: test:
334: args: -pc_type hpddm -pc_hpddm_levels_1_st_pc_type cholesky -pc_hpddm_levels_1_eps_nev 5 -pc_hpddm_coarse_sub_pc_type lu -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_coarse_correction none
336: testset:
337: requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
338: nsize: 4
339: args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_coarse_pc_type redundant -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO
340: test:
341: suffix: geneo
342: args: -pc_hpddm_coarse_p {{1 2}shared output} -pc_hpddm_levels_1_st_pc_type cholesky -pc_hpddm_levels_1_eps_nev {{5 15}separate output} -mat_type {{aij baij sbaij}shared output}
343: test:
344: suffix: geneo_block_splitting
345: output_file: output/ex76_geneo_pc_hpddm_levels_1_eps_nev-15.out
346: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 1[6-9]/Linear solve converged due to CONVERGED_RTOL iterations 11/g"
347: args: -pc_hpddm_coarse_p 2 -pc_hpddm_levels_1_eps_nev 15 -pc_hpddm_block_splitting -pc_hpddm_levels_1_st_pc_type lu -pc_hpddm_levels_1_eps_gen_non_hermitian -mat_type {{aij baij}shared output} -successive_solves
348: test:
349: suffix: geneo_share
350: output_file: output/ex76_geneo_pc_hpddm_levels_1_eps_nev-5.out
351: args: -pc_hpddm_levels_1_st_pc_type cholesky -pc_hpddm_levels_1_eps_nev 5 -pc_hpddm_levels_1_st_share_sub_ksp -reset {{false true}shared output}
352: test:
353: suffix: harmonic_overlap_1_define_false
354: output_file: output/ex76_geneo_share.out
355: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 1[0-3]/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
356: args: -pc_hpddm_harmonic_overlap 1 -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_eps_threshold_relative 1e+1 -pc_hpddm_levels_1_st_pc_type lu -pc_hpddm_levels_1_eps_pc_type lu -pc_hpddm_define_subdomains false -pc_hpddm_levels_1_pc_type asm -pc_hpddm_levels_1_pc_asm_overlap 2 -mat_type baij
357: test:
358: suffix: harmonic_overlap_1
359: output_file: output/ex76_geneo_share.out
360: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 1[0-3]/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
361: args: -pc_hpddm_harmonic_overlap 1 -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_eps_threshold_relative 1e+1 -pc_hpddm_levels_1_st_pc_type lu -pc_hpddm_levels_1_eps_pc_type lu -mat_type baij
362: test:
363: suffix: harmonic_overlap_1_share_petsc
364: output_file: output/ex76_geneo_share.out
365: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 1[0-3]/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
366: args: -pc_hpddm_harmonic_overlap 1 -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_eps_threshold_relative 1e+1 -pc_hpddm_levels_1_st_pc_type lu -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type petsc -pc_hpddm_levels_1_eps_pc_type lu -mat_type baij
367: test:
368: requires: mumps
369: suffix: harmonic_overlap_1_share_mumps
370: output_file: output/ex76_geneo_share.out
371: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 1[0-3]/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
372: args: -pc_hpddm_harmonic_overlap 1 -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_eps_threshold_relative 1e+1 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type mumps -pc_hpddm_coarse_pc_type cholesky -pc_hpddm_coarse_pc_factor_mat_solver_type mumps -pc_hpddm_coarse_mat_mumps_icntl_15 1
373: test:
374: requires: mumps
375: suffix: harmonic_overlap_1_share_mumps_not_set_explicitly
376: output_file: output/ex76_geneo_share.out
377: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 1[0-3]/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
378: args: -pc_hpddm_harmonic_overlap 1 -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_eps_threshold_relative 1e+1 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_eps_mat_type baij
379: test:
380: requires: mkl_pardiso
381: suffix: harmonic_overlap_1_share_mkl_pardiso
382: output_file: output/ex76_geneo_share.out
383: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations [12][0-3]/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
384: args: -pc_hpddm_harmonic_overlap 1 -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_eps_threshold_relative 1e+1 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_eps_mat_type shell -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type mkl_pardiso
385: test:
386: requires: mkl_pardiso !mumps
387: suffix: harmonic_overlap_1_share_mkl_pardiso_no_set_explicitly
388: output_file: output/ex76_geneo_share.out
389: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations [12][0-3]/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
390: args: -pc_hpddm_harmonic_overlap 1 -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_eps_threshold_relative 1e+1 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_eps_mat_type shell
391: test:
392: suffix: harmonic_overlap_2_threshold_relative
393: output_file: output/ex76_geneo_share.out
394: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 9/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
395: args: -pc_hpddm_harmonic_overlap 2 -pc_hpddm_levels_1_svd_nsv 15 -pc_hpddm_levels_1_svd_threshold_relative 1e-1 -pc_hpddm_levels_1_st_share_sub_ksp -mat_type sbaij
396: test:
397: suffix: harmonic_overlap_2
398: output_file: output/ex76_geneo_share.out
399: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 9/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
400: args: -pc_hpddm_harmonic_overlap 2 -pc_hpddm_levels_1_svd_nsv 12 -pc_hpddm_levels_1_st_share_sub_ksp -mat_type sbaij
402: testset:
403: requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
404: nsize: 4
405: args: -ksp_converged_reason -ksp_max_it 150 -pc_type hpddm -pc_hpddm_levels_1_eps_nev 5 -pc_hpddm_coarse_p 1 -pc_hpddm_coarse_pc_type redundant -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -pc_hpddm_define_subdomains
406: test:
407: suffix: geneo_share_cholesky
408: output_file: output/ex76_geneo_share.out
409: # extra -pc_hpddm_levels_1_eps_gen_non_hermitian needed to avoid failures with PETSc Cholesky
410: args: -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_st_pc_type cholesky -mat_type {{aij sbaij}shared output} -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_has_neumann -pc_hpddm_levels_1_st_share_sub_ksp {{false true}shared output} -successive_solves
411: test:
412: suffix: geneo_share_cholesky_matstructure
413: output_file: output/ex76_geneo_share.out
414: # extra -pc_hpddm_levels_1_eps_gen_non_hermitian needed to avoid failures with PETSc Cholesky
415: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 14/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
416: args: -pc_hpddm_levels_1_sub_pc_type cholesky -mat_type {{baij sbaij}shared output} -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_st_matstructure same -set_rhs {{false true} shared output}
417: test:
418: suffix: geneo_transpose
419: output_file: output/ex76_geneo_share.out
420: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 1[234]/Linear solve converged due to CONVERGED_RTOL iterations 15/g" -e "s/Linear solve converged due to CONVERGED_RTOL iterations 26/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
421: args: -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_st_pc_type cholesky -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_has_neumann -pc_hpddm_levels_1_st_share_sub_ksp -successive_solves -transpose -pc_hpddm_coarse_correction {{additive deflated balanced}shared output}
422: test:
423: requires: mumps
424: suffix: geneo_share_lu
425: output_file: output/ex76_geneo_share.out
426: # extra -pc_factor_mat_solver_type mumps needed to avoid failures with PETSc LU
427: args: -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_st_pc_type lu -mat_type baij -pc_hpddm_levels_1_st_pc_factor_mat_solver_type mumps -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type mumps -pc_hpddm_has_neumann -pc_hpddm_levels_1_st_share_sub_ksp {{false true}shared output}
428: test:
429: requires: mumps
430: suffix: geneo_share_lu_matstructure
431: output_file: output/ex76_geneo_share.out
432: # extra -pc_factor_mat_solver_type mumps needed to avoid failures with PETSc LU
433: args: -pc_hpddm_levels_1_sub_pc_type lu -mat_type aij -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type mumps -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_st_matstructure {{same different}shared output} -pc_hpddm_levels_1_st_pc_type lu -pc_hpddm_levels_1_st_pc_factor_mat_solver_type mumps -successive_solves -pc_hpddm_levels_1_eps_target 1e-5
434: test:
435: suffix: geneo_share_not_asm
436: output_file: output/ex76_geneo_pc_hpddm_levels_1_eps_nev-5.out
437: # extra -pc_hpddm_levels_1_eps_gen_non_hermitian needed to avoid failures with PETSc Cholesky
438: args: -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_st_pc_type cholesky -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_has_neumann -pc_hpddm_levels_1_st_share_sub_ksp true -pc_hpddm_levels_1_pc_type gasm -successive_solves
440: test:
441: requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
442: suffix: fgmres_geneo_20_p_2
443: nsize: 4
444: args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_coarse_p 2 -pc_hpddm_coarse_pc_type redundant -ksp_type fgmres -pc_hpddm_coarse_mat_type {{baij sbaij}shared output} -pc_hpddm_log_separate {{false true}shared output} -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO
446: testset:
447: requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
448: output_file: output/ex76_fgmres_geneo_20_p_2.out
449: nsize: 4
450: args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_levels_2_p 2 -pc_hpddm_levels_2_mat_type {{baij sbaij}shared output} -pc_hpddm_levels_2_eps_nev {{5 20}shared output} -pc_hpddm_levels_2_sub_pc_type cholesky -pc_hpddm_levels_2_ksp_type gmres -ksp_type fgmres -pc_hpddm_coarse_mat_type {{baij sbaij}shared output} -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO
451: test:
452: suffix: fgmres_geneo_20_p_2_geneo
453: args: -mat_type {{aij sbaij}shared output}
454: test:
455: suffix: fgmres_geneo_20_p_2_geneo_algebraic
456: args: -pc_hpddm_levels_2_st_pc_type mat
457: # PCHPDDM + KSPHPDDM test to exercise multilevel + multiple RHS in one go
458: test:
459: requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
460: suffix: fgmres_geneo_20_p_2_geneo_rhs
461: output_file: output/ex76_fgmres_geneo_20_p_2.out
462: # for -pc_hpddm_coarse_correction additive
463: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 37/Linear solve converged due to CONVERGED_RTOL iterations 25/g"
464: nsize: 4
465: args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_levels_2_p 2 -pc_hpddm_levels_2_mat_type baij -pc_hpddm_levels_2_eps_nev 5 -pc_hpddm_levels_2_sub_pc_type cholesky -pc_hpddm_levels_2_ksp_max_it 10 -pc_hpddm_levels_2_ksp_type hpddm -pc_hpddm_levels_2_ksp_hpddm_type gmres -ksp_type hpddm -ksp_hpddm_variant flexible -pc_hpddm_coarse_mat_type baij -mat_type aij -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -rhs 4 -pc_hpddm_coarse_correction {{additive deflated balanced}shared output}
467: testset:
468: requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES) mumps defined(PETSC_HAVE_OPENMP_SUPPORT)
469: filter: grep -E -e "Linear solve" -e " executing" | sed -e "s/MPI = 1/MPI = 2/g" -e "s/OMP = 1/OMP = 2/g"
470: nsize: 4
471: args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 15 -pc_hpddm_levels_1_st_pc_type cholesky -pc_hpddm_coarse_p {{1 2}shared output} -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -pc_hpddm_coarse_pc_factor_mat_solver_type mumps -pc_hpddm_coarse_mat_mumps_icntl_4 2 -pc_hpddm_coarse_mat_mumps_use_omp_threads {{1 2}shared output}
472: test:
473: suffix: geneo_mumps_use_omp_threads_1
474: output_file: output/ex76_geneo_mumps_use_omp_threads.out
475: args: -pc_hpddm_coarse_mat_type {{baij sbaij}shared output}
476: test:
477: suffix: geneo_mumps_use_omp_threads_2
478: output_file: output/ex76_geneo_mumps_use_omp_threads.out
479: args: -pc_hpddm_coarse_mat_type aij -pc_hpddm_levels_1_eps_threshold_absolute 0.4 -pc_hpddm_coarse_pc_type cholesky -pc_hpddm_coarse_mat_filter 1e-12
481: testset: # converge really poorly because of a tiny -pc_hpddm_levels_1_eps_threshold_absolute, but needed for proper code coverage where some subdomains don't call EPSSolve()
482: requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
483: nsize: 4
484: args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_threshold_absolute 0.005 -pc_hpddm_levels_1_eps_use_inertia -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_define_subdomains -pc_hpddm_has_neumann -ksp_rtol 0.9
485: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 1/Linear solve converged due to CONVERGED_RTOL iterations 141/g"
486: test:
487: suffix: inertia_petsc
488: output_file: output/ex76_1.out
489: args: -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type petsc
490: test:
491: suffix: inertia_mumps
492: output_file: output/ex76_1.out
493: requires: mumps
495: test:
496: requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
497: suffix: reuse_symbolic
498: output_file: output/empty.out
499: nsize: 4
500: args: -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 20 -rhs 4 -pc_hpddm_coarse_correction {{additive deflated balanced}shared output} -ksp_pc_side {{left right}shared output} -ksp_max_it 20 -ksp_type hpddm -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -pc_hpddm_define_subdomains -ksp_error_if_not_converged -transpose {{true false} shared output}
502: TEST*/