Actual source code: ex76.c

  1: #include <petscksp.h>

  3: static char help[] = "Solves a linear system using PCHPDDM.\n\n";

  5: int main(int argc, char **args)
  6: {
  7:   Vec             b;            /* computed solution and RHS */
  8:   Mat             A, aux, X, B; /* linear system matrix */
  9:   KSP             ksp;          /* linear solver context */
 10:   PC              pc;
 11:   IS              is, sizes;
 12:   const PetscInt *idx;
 13:   PetscMPIInt     rank, size;
 14:   PetscInt        m, N = 1;
 15:   const char     *deft = MATAIJ;
 16:   PetscViewer     viewer;
 17:   char            dir[PETSC_MAX_PATH_LEN], name[PETSC_MAX_PATH_LEN], type[256];
 18:   PetscBool       flg;
 19: #if defined(PETSC_USE_LOG)
 20:   PetscLogEvent event;
 21: #endif
 22:   PetscEventPerfInfo info1, info2;

 25:   PetscInitialize(&argc, &args, NULL, help);
 26:   PetscLogDefaultBegin();
 27:   MPI_Comm_size(PETSC_COMM_WORLD, &size);
 29:   PetscOptionsGetInt(NULL, NULL, "-rhs", &N, NULL);
 30:   MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
 31:   MatCreate(PETSC_COMM_WORLD, &A);
 32:   MatCreate(PETSC_COMM_SELF, &aux);
 33:   ISCreate(PETSC_COMM_SELF, &is);
 34:   PetscStrcpy(dir, ".");
 35:   PetscOptionsGetString(NULL, NULL, "-load_dir", dir, sizeof(dir), NULL);
 36:   /* loading matrices */
 37:   PetscSNPrintf(name, sizeof(name), "%s/sizes_%d_%d.dat", dir, rank, size);
 38:   PetscViewerBinaryOpen(PETSC_COMM_SELF, name, FILE_MODE_READ, &viewer);
 39:   ISCreate(PETSC_COMM_SELF, &sizes);
 40:   ISLoad(sizes, viewer);
 41:   ISGetIndices(sizes, &idx);
 42:   MatSetSizes(A, idx[0], idx[1], idx[2], idx[3]);
 43:   ISRestoreIndices(sizes, &idx);
 44:   ISDestroy(&sizes);
 45:   PetscViewerDestroy(&viewer);
 46:   MatSetUp(A);
 47:   PetscSNPrintf(name, sizeof(name), "%s/A.dat", dir);
 48:   PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer);
 49:   MatLoad(A, viewer);
 50:   PetscViewerDestroy(&viewer);
 51:   PetscSNPrintf(name, sizeof(name), "%s/is_%d_%d.dat", dir, rank, size);
 52:   PetscViewerBinaryOpen(PETSC_COMM_SELF, name, FILE_MODE_READ, &viewer);
 53:   ISLoad(is, viewer);
 54:   PetscViewerDestroy(&viewer);
 55:   PetscSNPrintf(name, sizeof(name), "%s/Neumann_%d_%d.dat", dir, rank, size);
 56:   PetscViewerBinaryOpen(PETSC_COMM_SELF, name, FILE_MODE_READ, &viewer);
 57:   MatLoad(aux, viewer);
 58:   PetscViewerDestroy(&viewer);
 59:   flg = PETSC_FALSE;
 60:   PetscOptionsGetBool(NULL, NULL, "-pc_hpddm_levels_1_st_share_sub_ksp", &flg, NULL);
 61:   if (flg) { /* PETSc LU/Cholesky is struggling numerically for bs > 1          */
 62:              /* only set the proper bs for the geneo_share_* tests, 1 otherwise */
 63:     MatSetBlockSizesFromMats(aux, A, A);
 64:   }
 65:   MatSetOption(A, MAT_SYMMETRIC, PETSC_TRUE);
 66:   MatSetOption(aux, MAT_SYMMETRIC, PETSC_TRUE);
 67:   /* ready for testing */
 68:   PetscOptionsBegin(PETSC_COMM_WORLD, "", "", "");
 69:   PetscOptionsFList("-mat_type", "Matrix type", "MatSetType", MatList, deft, type, 256, &flg);
 70:   PetscOptionsEnd();
 71:   if (flg) {
 72:     MatConvert(A, type, MAT_INPLACE_MATRIX, &A);
 73:     MatConvert(aux, type, MAT_INPLACE_MATRIX, &aux);
 74:   }
 75:   KSPCreate(PETSC_COMM_WORLD, &ksp);
 76:   KSPSetOperators(ksp, A, A);
 77:   KSPGetPC(ksp, &pc);
 78:   PCSetType(pc, PCHPDDM);
 79: #if defined(PETSC_HAVE_HPDDM) && defined(PETSC_HAVE_DYNAMIC_LIBRARIES) && defined(PETSC_USE_SHARED_LIBRARIES)
 80:   flg = PETSC_FALSE;
 81:   PetscOptionsGetBool(NULL, NULL, "-pc_hpddm_block_splitting", &flg, NULL);
 82:   if (!flg) {
 83:     PCHPDDMSetAuxiliaryMat(pc, is, aux, NULL, NULL);
 84:     PCHPDDMHasNeumannMat(pc, PETSC_FALSE); /* PETSC_TRUE is fine as well, just testing */
 85:   }
 86:   flg = PETSC_FALSE;
 87:   PetscOptionsGetBool(NULL, NULL, "-set_rhs", &flg, NULL);
 88:   if (flg) {          /* user-provided RHS for concurrent generalized eigenvalue problems                                 */
 89:     Mat      a, c, P; /* usually assembled automatically in PCHPDDM, this is solely for testing PCHPDDMSetRHSMat() */
 90:     PetscInt rstart, rend, location;
 91:     MatDuplicate(aux, MAT_DO_NOT_COPY_VALUES, &B); /* duplicate so that MatStructure is SAME_NONZERO_PATTERN */
 92:     MatGetDiagonalBlock(A, &a);
 93:     MatGetOwnershipRange(A, &rstart, &rend);
 94:     ISGetLocalSize(is, &m);
 95:     MatCreateSeqAIJ(PETSC_COMM_SELF, rend - rstart, m, 1, NULL, &P);
 96:     for (m = rstart; m < rend; ++m) {
 97:       ISLocate(is, m, &location);
 99:       MatSetValue(P, m - rstart, location, 1.0, INSERT_VALUES);
100:     }
101:     MatAssemblyBegin(P, MAT_FINAL_ASSEMBLY);
102:     MatAssemblyEnd(P, MAT_FINAL_ASSEMBLY);
103:     PetscObjectTypeCompare((PetscObject)a, MATSEQAIJ, &flg);
104:     if (flg) MatPtAP(a, P, MAT_INITIAL_MATRIX, 1.0, &X); /* MatPtAP() is used to extend diagonal blocks with zeros on the overlap */
105:     else { /* workaround for MatPtAP() limitations with some types */ MatConvert(a, MATSEQAIJ, MAT_INITIAL_MATRIX, &c);
106:       MatPtAP(c, P, MAT_INITIAL_MATRIX, 1.0, &X);
107:       MatDestroy(&c);
108:     }
109:     MatDestroy(&P);
110:     MatAXPY(B, 1.0, X, SUBSET_NONZERO_PATTERN);
111:     MatDestroy(&X);
112:     MatSetOption(B, MAT_SYMMETRIC, PETSC_TRUE);
113:     PCHPDDMSetRHSMat(pc, B);
114:     MatDestroy(&B);
115:   }
116: #endif
117:   MatDestroy(&aux);
118:   KSPSetFromOptions(ksp);
119:   PetscObjectTypeCompare((PetscObject)pc, PCASM, &flg);
120:   if (flg) {
121:     flg = PETSC_FALSE;
122:     PetscOptionsGetBool(NULL, NULL, "-pc_hpddm_define_subdomains", &flg, NULL);
123:     if (flg) {
124:       IS rows;
125:       MatGetOwnershipIS(A, &rows, NULL);
126:       PCASMSetLocalSubdomains(pc, 1, &is, &rows);
127:       ISDestroy(&rows);
128:     }
129:   }
130:   ISDestroy(&is);
131:   MatCreateVecs(A, NULL, &b);
132:   VecSet(b, 1.0);
133:   KSPSolve(ksp, b, b);
134:   VecGetLocalSize(b, &m);
135:   VecDestroy(&b);
136:   if (N > 1) {
137:     KSPType type;
138:     PetscOptionsClearValue(NULL, "-ksp_converged_reason");
139:     KSPSetFromOptions(ksp);
140:     MatCreateDense(PETSC_COMM_WORLD, m, PETSC_DECIDE, PETSC_DECIDE, N, NULL, &B);
141:     MatCreateDense(PETSC_COMM_WORLD, m, PETSC_DECIDE, PETSC_DECIDE, N, NULL, &X);
142:     MatSetRandom(B, NULL);
143:     /* this is algorithmically optimal in the sense that blocks of vectors are coarsened or interpolated using matrix--matrix operations */
144:     /* PCHPDDM however heavily relies on MPI[S]BAIJ format for which there is no efficient MatProduct implementation */
145:     KSPMatSolve(ksp, B, X);
146:     KSPGetType(ksp, &type);
147:     PetscStrcmp(type, KSPHPDDM, &flg);
148: #if defined(PETSC_HAVE_HPDDM)
149:     if (flg) {
150:       PetscReal    norm;
151:       KSPHPDDMType type;
152:       KSPHPDDMGetType(ksp, &type);
153:       if (type == KSP_HPDDM_TYPE_PREONLY || type == KSP_HPDDM_TYPE_CG || type == KSP_HPDDM_TYPE_GMRES || type == KSP_HPDDM_TYPE_GCRODR) {
154:         Mat C;
155:         MatDuplicate(X, MAT_DO_NOT_COPY_VALUES, &C);
156:         KSPSetMatSolveBatchSize(ksp, 1);
157:         KSPMatSolve(ksp, B, C);
158:         MatAYPX(C, -1.0, X, SAME_NONZERO_PATTERN);
159:         MatNorm(C, NORM_INFINITY, &norm);
160:         MatDestroy(&C);
162:       }
163:     }
164: #endif
165:     MatDestroy(&X);
166:     MatDestroy(&B);
167:   }
168:   PetscObjectTypeCompare((PetscObject)pc, PCHPDDM, &flg);
169: #if defined(PETSC_HAVE_HPDDM) && defined(PETSC_HAVE_DYNAMIC_LIBRARIES) && defined(PETSC_USE_SHARED_LIBRARIES)
170:   if (flg) PCHPDDMGetSTShareSubKSP(pc, &flg);
171: #endif
172:   if (flg && PetscDefined(USE_LOG)) {
173:     PetscLogEventRegister("MatLUFactorSym", PC_CLASSID, &event);
174:     PetscLogEventGetPerfInfo(PETSC_DETERMINE, event, &info1);
175:     PetscLogEventRegister("MatLUFactorNum", PC_CLASSID, &event);
176:     PetscLogEventGetPerfInfo(PETSC_DETERMINE, event, &info2);
177:     if (!info1.count && !info2.count) {
178:       PetscLogEventRegister("MatCholFctrSym", PC_CLASSID, &event);
179:       PetscLogEventGetPerfInfo(PETSC_DETERMINE, event, &info1);
180:       PetscLogEventRegister("MatCholFctrNum", PC_CLASSID, &event);
181:       PetscLogEventGetPerfInfo(PETSC_DETERMINE, event, &info2);
184:   }
185:   KSPDestroy(&ksp);
186:   MatDestroy(&A);
187:   PetscFinalize();
188:   return 0;
189: }

191: /*TEST

193:    test:
194:       requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
195:       nsize: 4
196:       args: -ksp_rtol 1e-3 -ksp_converged_reason -pc_type {{bjacobi hpddm}shared output} -pc_hpddm_coarse_sub_pc_type lu -sub_pc_type lu -options_left no -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO

198:    test:
199:       requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
200:       suffix: define_subdomains
201:       nsize: 4
202:       args: -ksp_rtol 1e-3 -ksp_converged_reason -pc_type {{asm hpddm}shared output} -pc_hpddm_coarse_sub_pc_type lu -sub_pc_type lu -pc_hpddm_define_subdomains -options_left no -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO

204:    testset:
205:       requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
206:       nsize: 4
207:       args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_coarse_pc_type redundant -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO
208:       test:
209:         suffix: geneo
210:         args: -pc_hpddm_coarse_p {{1 2}shared output} -pc_hpddm_levels_1_st_pc_type cholesky -pc_hpddm_levels_1_eps_nev {{5 15}separate output} -mat_type {{aij baij sbaij}shared output}
211:       test:
212:         suffix: geneo_block_splitting
213:         output_file: output/ex76_geneo_pc_hpddm_levels_1_eps_nev-15.out
214:         filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 1[6-9]/Linear solve converged due to CONVERGED_RTOL iterations 11/g"
215:         args: -pc_hpddm_coarse_p 2 -pc_hpddm_levels_1_eps_nev 15 -pc_hpddm_block_splitting -pc_hpddm_levels_1_st_pc_type lu -pc_hpddm_levels_1_eps_gen_non_hermitian -mat_type {{aij baij}shared output}
216:       test:
217:         suffix: geneo_share
218:         output_file: output/ex76_geneo_pc_hpddm_levels_1_eps_nev-5.out
219:         args: -pc_hpddm_levels_1_st_pc_type cholesky -pc_hpddm_levels_1_eps_nev 5 -pc_hpddm_levels_1_st_share_sub_ksp

221:    testset:
222:       requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
223:       nsize: 4
224:       args: -ksp_converged_reason -ksp_max_it 150 -pc_type hpddm -pc_hpddm_levels_1_eps_nev 5 -pc_hpddm_coarse_p 1 -pc_hpddm_coarse_pc_type redundant -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -pc_hpddm_define_subdomains
225:       test:
226:         suffix: geneo_share_cholesky
227:         output_file: output/ex76_geneo_share.out
228:         # extra -pc_hpddm_levels_1_eps_gen_non_hermitian needed to avoid failures with PETSc Cholesky
229:         args: -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_st_pc_type cholesky -mat_type {{aij baij sbaij}shared output} -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_has_neumann -pc_hpddm_levels_1_st_share_sub_ksp {{false true}shared output}
230:       test:
231:         suffix: geneo_share_cholesky_matstructure
232:         output_file: output/ex76_geneo_share.out
233:         # extra -pc_hpddm_levels_1_eps_gen_non_hermitian needed to avoid failures with PETSc Cholesky
234:         args: -pc_hpddm_levels_1_sub_pc_type cholesky -mat_type {{baij sbaij}shared output} -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_st_matstructure same -set_rhs {{false true} shared output}
235:       test:
236:         requires: mumps
237:         suffix: geneo_share_lu
238:         output_file: output/ex76_geneo_share.out
239:         # extra -pc_factor_mat_solver_type mumps needed to avoid failures with PETSc LU
240:         args: -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_st_pc_type lu -mat_type baij -pc_hpddm_levels_1_st_pc_factor_mat_solver_type mumps -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type mumps -pc_hpddm_has_neumann -pc_hpddm_levels_1_st_share_sub_ksp {{false true}shared output}
241:       test:
242:         requires: mumps
243:         suffix: geneo_share_lu_matstructure
244:         output_file: output/ex76_geneo_share.out
245:         # extra -pc_factor_mat_solver_type mumps needed to avoid failures with PETSc LU
246:         args: -pc_hpddm_levels_1_sub_pc_type lu -mat_type baij -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type mumps -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_st_matstructure {{same different}shared output} -pc_hpddm_levels_1_st_pc_type lu -pc_hpddm_levels_1_st_pc_factor_mat_solver_type mumps
247:       test:
248:         suffix: geneo_share_not_asm
249:         output_file: output/ex76_geneo_pc_hpddm_levels_1_eps_nev-5.out
250:         # extra -pc_hpddm_levels_1_eps_gen_non_hermitian needed to avoid failures with PETSc Cholesky
251:         args: -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_st_pc_type cholesky -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_has_neumann -pc_hpddm_levels_1_st_share_sub_ksp true -pc_hpddm_levels_1_pc_type gasm

253:    test:
254:       requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
255:       suffix: fgmres_geneo_20_p_2
256:       nsize: 4
257:       args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_coarse_p 2 -pc_hpddm_coarse_pc_type redundant -ksp_type fgmres -pc_hpddm_coarse_mat_type {{baij sbaij}shared output} -pc_hpddm_log_separate {{false true}shared output} -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO

259:    testset:
260:       requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
261:       output_file: output/ex76_fgmres_geneo_20_p_2.out
262:       nsize: 4
263:       args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_levels_2_p 2 -pc_hpddm_levels_2_mat_type {{baij sbaij}shared output} -pc_hpddm_levels_2_eps_nev {{5 20}shared output} -pc_hpddm_levels_2_sub_pc_type cholesky -pc_hpddm_levels_2_ksp_type gmres -ksp_type fgmres -pc_hpddm_coarse_mat_type {{baij sbaij}shared output} -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO
264:       test:
265:         suffix: fgmres_geneo_20_p_2_geneo
266:         args: -mat_type {{aij sbaij}shared output}
267:       test:
268:         suffix: fgmres_geneo_20_p_2_geneo_algebraic
269:         args: -pc_hpddm_levels_2_st_pc_type mat
270:    # PCHPDDM + KSPHPDDM test to exercise multilevel + multiple RHS in one go
271:    test:
272:       requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
273:       suffix: fgmres_geneo_20_p_2_geneo_rhs
274:       output_file: output/ex76_fgmres_geneo_20_p_2.out
275:       # for -pc_hpddm_coarse_correction additive
276:       filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 37/Linear solve converged due to CONVERGED_RTOL iterations 25/g"
277:       nsize: 4
278:       args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_levels_2_p 2 -pc_hpddm_levels_2_mat_type baij -pc_hpddm_levels_2_eps_nev 5 -pc_hpddm_levels_2_sub_pc_type cholesky -pc_hpddm_levels_2_ksp_max_it 10 -pc_hpddm_levels_2_ksp_type hpddm -pc_hpddm_levels_2_ksp_hpddm_type gmres -ksp_type hpddm -ksp_hpddm_variant flexible -pc_hpddm_coarse_mat_type baij -mat_type aij -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -rhs 4 -pc_hpddm_coarse_correction {{additive deflated balanced}shared output}

280:    testset:
281:       requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES) mumps defined(PETSC_HAVE_OPENMP_SUPPORT)
282:       filter: grep -E -e "Linear solve" -e "      executing" | sed -e "s/MPI =      1/MPI =      2/g" -e "s/OMP =      1/OMP =      2/g"
283:       nsize: 4
284:       args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 15 -pc_hpddm_levels_1_st_pc_type cholesky -pc_hpddm_coarse_p {{1 2}shared output} -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -pc_hpddm_coarse_pc_factor_mat_solver_type mumps -pc_hpddm_coarse_mat_mumps_icntl_4 2 -pc_hpddm_coarse_mat_mumps_use_omp_threads {{1 2}shared output}
285:       test:
286:         suffix: geneo_mumps_use_omp_threads_1
287:         output_file: output/ex76_geneo_mumps_use_omp_threads.out
288:         args: -pc_hpddm_coarse_mat_type {{baij sbaij}shared output}
289:       test:
290:         suffix: geneo_mumps_use_omp_threads_2
291:         output_file: output/ex76_geneo_mumps_use_omp_threads.out
292:         args: -pc_hpddm_coarse_mat_type aij -pc_hpddm_levels_1_eps_threshold 0.3 -pc_hpddm_coarse_pc_type cholesky

294:    test:
295:       requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
296:       suffix: reuse_symbolic
297:       output_file: output/ex77_preonly.out
298:       nsize: 4
299:       args: -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 20 -rhs 4 -pc_hpddm_coarse_correction {{additive deflated balanced}shared output} -ksp_pc_side {{left right}shared output} -ksp_max_it 20 -ksp_type hpddm -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -pc_hpddm_define_subdomains -ksp_error_if_not_converged

301: TEST*/