Actual source code: strumpack.c
petsc-3.10.5 2019-03-28
1: #include <../src/mat/impls/aij/seq/aij.h>
2: #include <../src/mat/impls/aij/mpi/mpiaij.h>
3: #include <StrumpackSparseSolver.h>
7: static PetscErrorCode MatGetDiagonal_STRUMPACK(Mat A,Vec v)
8: {
10: SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Mat type: STRUMPACK factor");
11: return(0);
12: }
16: static PetscErrorCode MatDestroy_STRUMPACK(Mat A)
17: {
18: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)A->spptr;
19: PetscErrorCode ierr;
20: PetscBool flg;
23: /* Deallocate STRUMPACK storage */
24: PetscStackCall("STRUMPACK_destroy",STRUMPACK_destroy(S));
25: PetscFree(A->spptr);
26: PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);
27: if (flg) {
28: MatDestroy_SeqAIJ(A);
29: } else {
30: MatDestroy_MPIAIJ(A);
31: }
33: /* clear composed functions */
34: PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverType_C",NULL);
35: PetscObjectComposeFunction((PetscObject)A,"MatSTRUMPACKSetReordering_C",NULL);
36: PetscObjectComposeFunction((PetscObject)A,"MatSTRUMPACKSetColPerm_C",NULL);
37: PetscObjectComposeFunction((PetscObject)A,"MatSTRUMPACKSetHSSRelTol_C",NULL);
38: PetscObjectComposeFunction((PetscObject)A,"MatSTRUMPACKSetHSSAbsTol_C",NULL);
39: PetscObjectComposeFunction((PetscObject)A,"MatSTRUMPACKSetHSSMaxRank_C",NULL);
40: PetscObjectComposeFunction((PetscObject)A,"MatSTRUMPACKSetHSSLeafSize_C",NULL);
41: PetscObjectComposeFunction((PetscObject)A,"MatSTRUMPACKSetHSSMinSepSize_C",NULL);
43: return(0);
44: }
49: static PetscErrorCode MatSTRUMPACKSetReordering_STRUMPACK(Mat F,MatSTRUMPACKReordering reordering)
50: {
51: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)F->spptr;
54: PetscStackCall("STRUMPACK_reordering_method",STRUMPACK_set_reordering_method(*S,(STRUMPACK_REORDERING_STRATEGY)reordering));
55: return(0);
56: }
59: /*@
60: MatSTRUMPACKSetReordering - Set STRUMPACK fill-reducing reordering
62: Input Parameters:
63: + F - the factored matrix obtained by calling MatGetFactor() from PETSc-STRUMPACK interface
64: - reordering - the code to be used to find the fill-reducing reordering
65: Possible values: NATURAL=0 METIS=1 PARMETIS=2 SCOTCH=3 PTSCOTCH=4 RCM=5
67: Options Database:
68: . -mat_strumpack_reordering <METIS> - Sparsity reducing matrix reordering (choose one of) NATURAL METIS PARMETIS SCOTCH PTSCOTCH RCM (None)
70: Level: beginner
72: References:
73: . STRUMPACK manual
75: .seealso: MatGetFactor()
76: @*/
77: PetscErrorCode MatSTRUMPACKSetReordering(Mat F,MatSTRUMPACKReordering reordering)
78: {
84: PetscTryMethod(F,"MatSTRUMPACKSetReordering_C",(Mat,MatSTRUMPACKReordering),(F,reordering));
85: return(0);
86: }
91: static PetscErrorCode MatSTRUMPACKSetColPerm_STRUMPACK(Mat F,PetscBool cperm)
92: {
93: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)F->spptr;
96: PetscStackCall("STRUMPACK_set_mc64job",STRUMPACK_set_mc64job(*S,cperm ? 5 : 0));
97: return(0);
98: }
101: /*@
102: MatSTRUMPACKSetColPerm - Set whether STRUMPACK should try to permute the columns of the matrix in order to get a nonzero diagonal
103: Logically Collective on Mat
105: Input Parameters:
106: + F - the factored matrix obtained by calling MatGetFactor() from PETSc-STRUMPACK interface
107: - cperm - whether or not to permute (internally) the columns of the matrix
109: Options Database:
110: . -mat_strumpack_colperm <cperm>
112: Level: beginner
114: References:
115: . STRUMPACK manual
117: .seealso: MatGetFactor()
118: @*/
119: PetscErrorCode MatSTRUMPACKSetColPerm(Mat F,PetscBool cperm)
120: {
126: PetscTryMethod(F,"MatSTRUMPACKSetColPerm_C",(Mat,PetscBool),(F,cperm));
127: return(0);
128: }
132: static PetscErrorCode MatSTRUMPACKSetHSSRelTol_STRUMPACK(Mat F,PetscReal rtol)
133: {
134: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)F->spptr;
137: PetscStackCall("STRUMPACK_set_HSS_rel_tol", STRUMPACK_set_HSS_rel_tol(*S,rtol));
138: return(0);
139: }
142: /*@
143: MatSTRUMPACKSetHSSRelTol - Set STRUMPACK relative tolerance for HSS compression
144: Logically Collective on Mat
146: Input Parameters:
147: + F - the factored matrix obtained by calling MatGetFactor() from PETSc-STRUMPACK interface
148: - rtol - relative compression tolerance
150: Options Database:
151: . -mat_strumpack_hss_rel_tol <1e-2> - Relative compression tolerance (None)
153: Level: beginner
155: References:
156: . STRUMPACK manual
158: .seealso: MatGetFactor()
159: @*/
160: PetscErrorCode MatSTRUMPACKSetHSSRelTol(Mat F,PetscReal rtol)
161: {
167: PetscTryMethod(F,"MatSTRUMPACKSetHSSRelTol_C",(Mat,PetscReal),(F,rtol));
168: return(0);
169: }
174: static PetscErrorCode MatSTRUMPACKSetHSSAbsTol_STRUMPACK(Mat F,PetscReal atol)
175: {
176: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)F->spptr;
179: PetscStackCall("STRUMPACK_set_HSS_abs_tol", STRUMPACK_set_HSS_abs_tol(*S,atol));
180: return(0);
181: }
184: /*@
185: MatSTRUMPACKSetHSSAbsTol - Set STRUMPACK absolute tolerance for HSS compression
186: Logically Collective on Mat
188: Input Parameters:
189: + F - the factored matrix obtained by calling MatGetFactor() from PETSc-STRUMPACK interface
190: - atol - absolute compression tolerance
192: Options Database:
193: . -mat_strumpack_hss_abs_tol <1e-8> - Absolute compression tolerance (None)
195: Level: beginner
197: References:
198: . STRUMPACK manual
200: .seealso: MatGetFactor()
201: @*/
202: PetscErrorCode MatSTRUMPACKSetHSSAbsTol(Mat F,PetscReal atol)
203: {
209: PetscTryMethod(F,"MatSTRUMPACKSetHSSAbsTol_C",(Mat,PetscReal),(F,atol));
210: return(0);
211: }
216: static PetscErrorCode MatSTRUMPACKSetHSSMaxRank_STRUMPACK(Mat F,PetscInt hssmaxrank)
217: {
218: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)F->spptr;
221: PetscStackCall("STRUMPACK_set_HSS_max_rank", STRUMPACK_set_HSS_max_rank(*S,hssmaxrank));
222: return(0);
223: }
226: /*@
227: MatSTRUMPACKSetHSSMaxRank - Set STRUMPACK maximum HSS rank
228: Logically Collective on Mat
230: Input Parameters:
231: + F - the factored matrix obtained by calling MatGetFactor() from PETSc-STRUMPACK interface
232: - hssmaxrank - maximum rank used in low-rank approximation
234: Options Database:
235: . -mat_strumpack_max_rank - Maximum rank in HSS compression, when using pctype ilu (None)
237: Level: beginner
239: References:
240: . STRUMPACK manual
242: .seealso: MatGetFactor()
243: @*/
244: PetscErrorCode MatSTRUMPACKSetHSSMaxRank(Mat F,PetscInt hssmaxrank)
245: {
251: PetscTryMethod(F,"MatSTRUMPACKSetHSSMaxRank_C",(Mat,PetscInt),(F,hssmaxrank));
252: return(0);
253: }
258: static PetscErrorCode MatSTRUMPACKSetHSSLeafSize_STRUMPACK(Mat F,PetscInt leaf_size)
259: {
260: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)F->spptr;
263: PetscStackCall("STRUMPACK_set_HSS_leaf_size", STRUMPACK_set_HSS_leaf_size(*S,leaf_size));
264: return(0);
265: }
268: /*@
269: MatSTRUMPACKSetHSSLeafSize - Set STRUMPACK HSS leaf size
270: Logically Collective on Mat
272: Input Parameters:
273: + F - the factored matrix obtained by calling MatGetFactor() from PETSc-STRUMPACK interface
274: - leaf_size - Size of diagonal blocks in HSS approximation
276: Options Database:
277: . -mat_strumpack_leaf_size - Size of diagonal blocks in HSS approximation, when using pctype ilu (None)
279: Level: beginner
281: References:
282: . STRUMPACK manual
284: .seealso: MatGetFactor()
285: @*/
286: PetscErrorCode MatSTRUMPACKSetHSSLeafSize(Mat F,PetscInt leaf_size)
287: {
293: PetscTryMethod(F,"MatSTRUMPACKSetHSSLeafSize_C",(Mat,PetscInt),(F,leaf_size));
294: return(0);
295: }
301: static PetscErrorCode MatSTRUMPACKSetHSSMinSepSize_STRUMPACK(Mat F,PetscInt hssminsize)
302: {
303: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)F->spptr;
306: PetscStackCall("STRUMPACK_set_HSS_min_sep_size", STRUMPACK_set_HSS_min_sep_size(*S,hssminsize));
307: return(0);
308: }
311: /*@
312: MatSTRUMPACKSetHSSMinSepSize - Set STRUMPACK minimum separator size for low-rank approximation
313: Logically Collective on Mat
315: Input Parameters:
316: + F - the factored matrix obtained by calling MatGetFactor() from PETSc-STRUMPACK interface
317: - hssminsize - minimum dense matrix size for low-rank approximation
319: Options Database:
320: . -mat_strumpack_hss_min_sep_size <hssminsize>
322: Level: beginner
324: References:
325: . STRUMPACK manual
327: .seealso: MatGetFactor()
328: @*/
329: PetscErrorCode MatSTRUMPACKSetHSSMinSepSize(Mat F,PetscInt hssminsize)
330: {
336: PetscTryMethod(F,"MatSTRUMPACKSetHSSMinSepSize_C",(Mat,PetscInt),(F,hssminsize));
337: return(0);
338: }
343: static PetscErrorCode MatSolve_STRUMPACK(Mat A,Vec b_mpi,Vec x)
344: {
345: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)A->spptr;
346: STRUMPACK_RETURN_CODE sp_err;
347: PetscErrorCode ierr;
348: const PetscScalar *bptr;
349: PetscScalar *xptr;
352: VecGetArray(x,&xptr);
353: VecGetArrayRead(b_mpi,&bptr);
355: PetscStackCall("STRUMPACK_solve",sp_err = STRUMPACK_solve(*S,(PetscScalar*)bptr,xptr,0));
356: switch (sp_err) {
357: case STRUMPACK_SUCCESS: break;
358: case STRUMPACK_MATRIX_NOT_SET: { SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"STRUMPACK error: matrix was not set"); break; }
359: case STRUMPACK_REORDERING_ERROR: { SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"STRUMPACK error: matrix reordering failed"); break; }
360: default: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"STRUMPACK error: solve failed");
361: }
362: VecRestoreArray(x,&xptr);
363: VecRestoreArrayRead(b_mpi,&bptr);
364: return(0);
365: }
369: static PetscErrorCode MatMatSolve_STRUMPACK(Mat A,Mat B_mpi,Mat X)
370: {
371: PetscErrorCode ierr;
372: PetscBool flg;
375: PetscObjectTypeCompareAny((PetscObject)B_mpi,&flg,MATSEQDENSE,MATMPIDENSE,NULL);
376: if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix");
377: PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);
378: if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix");
379: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatMatSolve_STRUMPACK() is not implemented yet");
380: return(0);
381: }
385: static PetscErrorCode MatView_Info_STRUMPACK(Mat A,PetscViewer viewer)
386: {
387: PetscErrorCode ierr;
390: /* check if matrix is strumpack type */
391: if (A->ops->solve != MatSolve_STRUMPACK) return(0);
392: PetscViewerASCIIPrintf(viewer,"STRUMPACK sparse solver!\n");
393: return(0);
394: }
398: static PetscErrorCode MatView_STRUMPACK(Mat A,PetscViewer viewer)
399: {
400: PetscErrorCode ierr;
401: PetscBool iascii;
402: PetscViewerFormat format;
405: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
406: if (iascii) {
407: PetscViewerGetFormat(viewer,&format);
408: if (format == PETSC_VIEWER_ASCII_INFO) {
409: MatView_Info_STRUMPACK(A,viewer);
410: }
411: }
412: return(0);
413: }
417: static PetscErrorCode MatLUFactorNumeric_STRUMPACK(Mat F,Mat A,const MatFactorInfo *info)
418: {
419: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver*)F->spptr;
420: STRUMPACK_RETURN_CODE sp_err;
421: Mat_SeqAIJ *A_d,*A_o;
422: Mat_MPIAIJ *mat;
423: PetscErrorCode ierr;
424: PetscInt M=A->rmap->N,m=A->rmap->n;
425: PetscBool flg;
428: PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);
429: if (flg) { /* A is MATMPIAIJ */
430: mat = (Mat_MPIAIJ*)A->data;
431: A_d = (Mat_SeqAIJ*)(mat->A)->data;
432: A_o = (Mat_SeqAIJ*)(mat->B)->data;
433: PetscStackCall("STRUMPACK_set_MPIAIJ_matrix",STRUMPACK_set_MPIAIJ_matrix(*S,&m,A_d->i,A_d->j,A_d->a,A_o->i,A_o->j,A_o->a,mat->garray));
434: } else { /* A is MATSEQAIJ */
435: A_d = (Mat_SeqAIJ*)A->data;
436: PetscStackCall("STRUMPACK_set_csr_matrix",STRUMPACK_set_csr_matrix(*S,&M,A_d->i,A_d->j,A_d->a,0));
437: }
439: /* Reorder and Factor the matrix. */
440: /* TODO figure out how to avoid reorder if the matrix values changed, but the pattern remains the same. */
441: PetscStackCall("STRUMPACK_reorder",sp_err = STRUMPACK_reorder(*S));
442: PetscStackCall("STRUMPACK_factor",sp_err = STRUMPACK_factor(*S));
443: switch (sp_err) {
444: case STRUMPACK_SUCCESS: break;
445: case STRUMPACK_MATRIX_NOT_SET: { SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"STRUMPACK error: matrix was not set"); break; }
446: case STRUMPACK_REORDERING_ERROR: { SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"STRUMPACK error: matrix reordering failed"); break; }
447: default: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"STRUMPACK error: factorization failed");
448: }
449: F->assembled = PETSC_TRUE;
450: F->preallocated = PETSC_TRUE;
451: return(0);
452: }
456: static PetscErrorCode MatLUFactorSymbolic_STRUMPACK(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
457: {
459: F->ops->lufactornumeric = MatLUFactorNumeric_STRUMPACK;
460: F->ops->solve = MatSolve_STRUMPACK;
461: F->ops->matsolve = MatMatSolve_STRUMPACK;
462: return(0);
463: }
467: static PetscErrorCode MatFactorGetSolverType_aij_strumpack(Mat A,MatSolverType *type)
468: {
470: *type = MATSOLVERSTRUMPACK;
471: return(0);
472: }
474: /*MC
475: MATSOLVERSSTRUMPACK = "strumpack" - A solver package providing a direct sparse solver (PCLU)
476: and a preconditioner (PCILU) using low-rank compression via the external package STRUMPACK.
478: Consult the STRUMPACK-sparse manual for more info.
480: Use
481: ./configure --download-strumpack
482: to have PETSc installed with STRUMPACK
484: Use
485: -pc_type lu -pc_factor_mat_solver_type strumpack
486: to use this as an exact (direct) solver, use
487: -pc_type ilu -pc_factor_mat_solver_type strumpack
488: to enable low-rank compression (i.e, use as a preconditioner).
490: Works with AIJ matrices
492: Options Database Keys:
493: + -mat_strumpack_verbose
494: . -mat_strumpack_hss_rel_tol <1e-2> - Relative compression tolerance (None)
495: . -mat_strumpack_hss_abs_tol <1e-8> - Absolute compression tolerance (None)
496: . -mat_strumpack_colperm <TRUE> - Permute matrix to make diagonal nonzeros (None)
497: . -mat_strumpack_hss_min_sep_size <256> - Minimum size of separator for HSS compression (None)
498: . -mat_strumpack_max_rank - Maximum rank in HSS compression, when using pctype ilu (None)
499: . -mat_strumpack_leaf_size - Size of diagonal blocks in HSS approximation, when using pctype ilu (None)
500: . -mat_strumpack_reordering <METIS> - Sparsity reducing matrix reordering (choose one of) NATURAL METIS PARMETIS SCOTCH PTSCOTCH RCM (None)
501: . -mat_strumpack_iterative_solver <DIRECT> - Select iterative solver from STRUMPACK (choose one of) AUTO DIRECT REFINE PREC_GMRES GMRES PREC_BICGSTAB BICGSTAB (None)
503: Level: beginner
505: .seealso: PCLU, PCILU, MATSOLVERSUPERLU_DIST, MATSOLVERMUMPS, PCFactorSetMatSolverType(), MatSolverType
506: M*/
509: static PetscErrorCode MatGetFactor_aij_strumpack(Mat A,MatFactorType ftype,Mat *F)
510: {
511: Mat B;
512: PetscErrorCode ierr;
513: PetscInt M=A->rmap->N,N=A->cmap->N;
514: PetscBool verb,flg,set;
515: PetscReal ctol;
516: PetscInt hssminsize,max_rank,leaf_size;
517: STRUMPACK_SparseSolver *S;
518: STRUMPACK_INTERFACE iface;
519: STRUMPACK_REORDERING_STRATEGY ndcurrent,ndvalue;
520: STRUMPACK_KRYLOV_SOLVER itcurrent,itsolver;
521: const STRUMPACK_PRECISION table[2][2][2] =
522: {{{STRUMPACK_FLOATCOMPLEX_64, STRUMPACK_DOUBLECOMPLEX_64},
523: {STRUMPACK_FLOAT_64, STRUMPACK_DOUBLE_64}},
524: {{STRUMPACK_FLOATCOMPLEX, STRUMPACK_DOUBLECOMPLEX},
525: {STRUMPACK_FLOAT, STRUMPACK_DOUBLE}}};
526: const STRUMPACK_PRECISION prec =
527: table[(sizeof(PetscInt)==8)?0:1]
528: [(PETSC_SCALAR==PETSC_COMPLEX)?0:1]
529: [(PETSC_REAL==PETSC_FLOAT)?0:1];
530: const char *const STRUMPACKNDTypes[] =
531: {"NATURAL","METIS","PARMETIS","SCOTCH","PTSCOTCH","RCM","STRUMPACKNDTypes","",0};
532: const char *const SolverTypes[] =
533: {"AUTO","NONE","REFINE","PREC_GMRES","GMRES","PREC_BICGSTAB","BICGSTAB","SolverTypes","",0};
536: /* Create the factorization matrix */
537: MatCreate(PetscObjectComm((PetscObject)A),&B);
538: MatSetSizes(B,A->rmap->n,A->cmap->n,M,N);
539: MatSetType(B,((PetscObject)A)->type_name);
540: MatSeqAIJSetPreallocation(B,0,NULL);
541: MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);
542: if (ftype == MAT_FACTOR_LU || ftype == MAT_FACTOR_ILU) {
543: B->ops->lufactorsymbolic = MatLUFactorSymbolic_STRUMPACK;
544: B->ops->ilufactorsymbolic = MatLUFactorSymbolic_STRUMPACK;
545: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Factor type not supported");
546: B->ops->view = MatView_STRUMPACK;
547: B->ops->destroy = MatDestroy_STRUMPACK;
548: B->ops->getdiagonal = MatGetDiagonal_STRUMPACK;
549: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_aij_strumpack);
550: PetscObjectComposeFunction((PetscObject)B,"MatSTRUMPACKSetReordering_C",MatSTRUMPACKSetReordering_STRUMPACK);
551: PetscObjectComposeFunction((PetscObject)B,"MatSTRUMPACKSetColPerm_C",MatSTRUMPACKSetColPerm_STRUMPACK);
552: PetscObjectComposeFunction((PetscObject)B,"MatSTRUMPACKSetHSSRelTol_C",MatSTRUMPACKSetHSSRelTol_STRUMPACK);
553: PetscObjectComposeFunction((PetscObject)B,"MatSTRUMPACKSetHSSAbsTol_C",MatSTRUMPACKSetHSSAbsTol_STRUMPACK);
554: PetscObjectComposeFunction((PetscObject)B,"MatSTRUMPACKSetHSSMaxRank_C",MatSTRUMPACKSetHSSMaxRank_STRUMPACK);
555: PetscObjectComposeFunction((PetscObject)B,"MatSTRUMPACKSetHSSLeafSize_C",MatSTRUMPACKSetHSSLeafSize_STRUMPACK);
556: PetscObjectComposeFunction((PetscObject)B,"MatSTRUMPACKSetHSSMinSepSize_C",MatSTRUMPACKSetHSSMinSepSize_STRUMPACK);
557: B->factortype = ftype;
558: PetscNewLog(B,&S);
559: B->spptr = S;
561: PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);
562: iface = flg ? STRUMPACK_MT : STRUMPACK_MPI_DIST;
564: PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"STRUMPACK Options","Mat");
566: verb = PetscLogPrintInfo ? PETSC_TRUE : PETSC_FALSE;
567: PetscOptionsBool("-mat_strumpack_verbose","Print STRUMPACK information","None",verb,&verb,NULL);
569: PetscStackCall("STRUMPACK_init",STRUMPACK_init(S,PetscObjectComm((PetscObject)A),prec,iface,0,NULL,verb));
571: PetscStackCall("STRUMPACK_HSS_rel_tol",ctol = (PetscReal)STRUMPACK_HSS_rel_tol(*S));
572: PetscOptionsReal("-mat_strumpack_hss_rel_tol","Relative compression tolerance","None",ctol,&ctol,&set);
573: if (set) PetscStackCall("STRUMPACK_set_HSS_rel_tol",STRUMPACK_set_HSS_rel_tol(*S,(double)ctol));
575: PetscStackCall("STRUMPACK_HSS_abs_tol",ctol = (PetscReal)STRUMPACK_HSS_abs_tol(*S));
576: PetscOptionsReal("-mat_strumpack_hss_abs_tol","Absolute compression tolerance","None",ctol,&ctol,&set);
577: if (set) PetscStackCall("STRUMPACK_set_HSS_abs_tol",STRUMPACK_set_HSS_abs_tol(*S,(double)ctol));
579: PetscStackCall("STRUMPACK_mc64job",flg = (STRUMPACK_mc64job(*S) == 0) ? PETSC_FALSE : PETSC_TRUE);
580: PetscOptionsBool("-mat_strumpack_colperm","Find a col perm to get nonzero diagonal","None",flg,&flg,&set);
581: if (set) PetscStackCall("STRUMPACK_set_mc64job",STRUMPACK_set_mc64job(*S,flg ? 5 : 0));
583: PetscStackCall("STRUMPACK_HSS_min_sep_size",hssminsize = (PetscInt)STRUMPACK_HSS_min_sep_size(*S));
584: PetscOptionsInt("-mat_strumpack_hss_min_sep_size","Minimum size of separator for HSS compression","None",hssminsize,&hssminsize,&set);
585: if (set) PetscStackCall("STRUMPACK_set_HSS_min_sep_size",STRUMPACK_set_HSS_min_sep_size(*S,(int)hssminsize));
587: PetscStackCall("STRUMPACK_HSS_max_rank",max_rank = (PetscInt)STRUMPACK_HSS_max_rank(*S));
588: PetscOptionsInt("-mat_strumpack_max_rank","Maximum rank in HSS approximation","None",max_rank,&max_rank,&set);
589: if (set) PetscStackCall("STRUMPACK_set_HSS_max_rank",STRUMPACK_set_HSS_max_rank(*S,(int)max_rank));
591: PetscStackCall("STRUMPACK_HSS_leaf_size",leaf_size = (PetscInt)STRUMPACK_HSS_leaf_size(*S));
592: PetscOptionsInt("-mat_strumpack_leaf_size","Size of diagonal blocks in HSS approximation","None",leaf_size,&leaf_size,&set);
593: if (set) PetscStackCall("STRUMPACK_set_HSS_leaf_size",STRUMPACK_set_HSS_leaf_size(*S,(int)leaf_size));
595: PetscStackCall("STRUMPACK_reordering_method",ndcurrent = STRUMPACK_reordering_method(*S));
596: PetscOptionsEnum("-mat_strumpack_reordering","Sparsity reducing matrix reordering","None",STRUMPACKNDTypes,(PetscEnum)ndcurrent,(PetscEnum*)&ndvalue,&set);
597: if (set) PetscStackCall("STRUMPACK_set_reordering_method",STRUMPACK_set_reordering_method(*S,ndvalue));
599: if (ftype == MAT_FACTOR_ILU) {
600: /* When enabling HSS compression, the STRUMPACK solver becomes an incomplete */
601: /* (or approximate) LU factorization. */
602: PetscStackCall("STRUMPACK_enable_HSS",STRUMPACK_enable_HSS(*S));
603: }
605: /* Disable the outer iterative solver from STRUMPACK. */
606: /* When STRUMPACK is used as a direct solver, it will by default do iterative refinement. */
607: /* When STRUMPACK is used as an approximate factorization preconditioner (by enabling */
608: /* low-rank compression), it will use it's own preconditioned GMRES. Here we can disable */
609: /* the outer iterative solver, as PETSc uses STRUMPACK from within a KSP. */
610: PetscStackCall("STRUMPACK_set_Krylov_solver", STRUMPACK_set_Krylov_solver(*S, STRUMPACK_DIRECT));
612: PetscStackCall("STRUMPACK_Krylov_solver",itcurrent = STRUMPACK_Krylov_solver(*S));
613: PetscOptionsEnum("-mat_strumpack_iterative_solver","Select iterative solver from STRUMPACK","None",SolverTypes,(PetscEnum)itcurrent,(PetscEnum*)&itsolver,&set);
614: if (set) PetscStackCall("STRUMPACK_set_Krylov_solver",STRUMPACK_set_Krylov_solver(*S,itsolver));
616: PetscOptionsEnd();
618: *F = B;
619: return(0);
620: }
624: PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_STRUMPACK(void)
625: {
629: MatSolverTypeRegister(MATSOLVERSTRUMPACK,MATMPIAIJ,MAT_FACTOR_LU,MatGetFactor_aij_strumpack);
630: MatSolverTypeRegister(MATSOLVERSTRUMPACK,MATSEQAIJ,MAT_FACTOR_LU,MatGetFactor_aij_strumpack);
631: MatSolverTypeRegister(MATSOLVERSTRUMPACK,MATMPIAIJ,MAT_FACTOR_ILU,MatGetFactor_aij_strumpack);
632: MatSolverTypeRegister(MATSOLVERSTRUMPACK,MATSEQAIJ,MAT_FACTOR_ILU,MatGetFactor_aij_strumpack);
633: return(0);
634: }