Actual source code: mpisbaijspooles.c
petsc-3.3-p7 2013-05-11
2: /*
3: Provides an interface to the Spooles parallel sparse solver (MPI SPOOLES)
4: */
6: #include <../src/mat/impls/aij/seq/spooles/spooles.h>
7: #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
9: #if !defined(PETSC_USE_COMPLEX)
10: /*
11: input:
12: F: numeric factor
13: output:
14: nneg, nzero, npos: global matrix inertia in all processors
15: */
19: PetscErrorCode MatGetInertia_MPISBAIJSpooles(Mat F,int *nneg,int *nzero,int *npos)
20: {
21: Mat_Spooles *lu = (Mat_Spooles*)F->spptr;
23: int neg,zero,pos,sbuf[3],rbuf[3];
26: FrontMtx_inertia(lu->frontmtx, &neg, &zero, &pos);
27: sbuf[0] = neg; sbuf[1] = zero; sbuf[2] = pos;
28: MPI_Allreduce(sbuf,rbuf,3,MPI_INT,MPI_SUM,((PetscObject)F)->comm);
29: *nneg = rbuf[0]; *nzero = rbuf[1]; *npos = rbuf[2];
30: return(0);
31: }
32: #endif /* !defined(PETSC_USE_COMPLEX) */
34: /* Note the Petsc r permutation is ignored */
37: PetscErrorCode MatCholeskyFactorSymbolic_MPISBAIJSpooles(Mat B,Mat A,IS r,const MatFactorInfo *info)
38: {
40: (B)->ops->choleskyfactornumeric = MatFactorNumeric_MPISpooles;
41: return(0);
42: }
44: extern PetscErrorCode MatDestroy_MPISBAIJ(Mat);
47: PetscErrorCode MatDestroy_MPISBAIJSpooles(Mat A)
48: {
49: Mat_Spooles *lu = (Mat_Spooles*)A->spptr;
53: if (lu && lu->CleanUpSpooles) {
54: FrontMtx_free(lu->frontmtx);
55: IV_free(lu->newToOldIV);
56: IV_free(lu->oldToNewIV);
57: IV_free(lu->vtxmapIV);
58: InpMtx_free(lu->mtxA);
59: ETree_free(lu->frontETree);
60: IVL_free(lu->symbfacIVL);
61: SubMtxManager_free(lu->mtxmanager);
62: DenseMtx_free(lu->mtxX);
63: DenseMtx_free(lu->mtxY);
64: MPI_Comm_free(&(lu->comm_spooles));
65: VecDestroy(&lu->vec_spooles);
66: ISDestroy(&lu->iden);
67: ISDestroy(&lu->is_petsc);
68: VecScatterDestroy(&lu->scat);
69: }
70: PetscFree(A->spptr);
71: MatDestroy_MPISBAIJ(A);
72: return(0);
73: }
75: EXTERN_C_BEGIN
78: PetscErrorCode MatFactorGetSolverPackage_mpisbaij_spooles(Mat A,const MatSolverPackage *type)
79: {
81: *type = MATSOLVERSPOOLES;
82: return(0);
83: }
84: EXTERN_C_END
86: EXTERN_C_BEGIN
89: PetscErrorCode MatGetFactor_mpisbaij_spooles(Mat A,MatFactorType ftype,Mat *F)
90: {
91: Mat_Spooles *lu;
92: Mat B;
96: /* Create the factorization matrix F */
97: MatCreate(((PetscObject)A)->comm,&B);
98: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
99: MatSetType(B,((PetscObject)A)->type_name);
100: MatMPISBAIJSetPreallocation(B,1,0,PETSC_NULL,0,PETSC_NULL);
102: PetscNewLog(B,Mat_Spooles,&lu);
103: B->spptr = lu;
104: lu->flg = DIFFERENT_NONZERO_PATTERN;
105: lu->options.useQR = PETSC_FALSE;
107: if (ftype == MAT_FACTOR_CHOLESKY) {
108: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MPISBAIJSpooles;
109: B->ops->view = MatView_Spooles;
110: B->ops->destroy = MatDestroy_MPISBAIJSpooles;
111: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_mpisbaij_spooles",MatFactorGetSolverPackage_mpisbaij_spooles);
113: lu->options.symflag = SPOOLES_SYMMETRIC;
114: lu->options.pivotingflag = SPOOLES_NO_PIVOTING;
115: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only Cholesky for SBAIJ matrices, use AIJ for LU");
117: B->factortype = ftype;
118: MPI_Comm_dup(((PetscObject)A)->comm,&(lu->comm_spooles));
119: *F = B;
120: return(0);
121: }
122: EXTERN_C_END
124: /*MC
125: MATSOLVERSPOOLES - "spooles" - a matrix type providing direct solvers (LU and Cholesky) for distributed symmetric
126: and non-symmetric matrices via the external package Spooles.
128: If Spooles is installed (run ./configure with the option --download-spooles)
130: Options Database Keys:
131: + -mat_spooles_tau <tau> - upper bound on the magnitude of the largest element in L or U
132: . -mat_spooles_seed <seed> - random number seed used for ordering
133: . -mat_spooles_msglvl <msglvl> - message output level
134: . -mat_spooles_ordering <BestOfNDandMS,MMD,MS,ND> - ordering used
135: . -mat_spooles_maxdomainsize <n> - maximum subgraph size used by Spooles orderings
136: . -mat_spooles_maxzeros <n> - maximum number of zeros inside a supernode
137: . -mat_spooles_maxsize <n> - maximum size of a supernode
138: . -mat_spooles_FrontMtxInfo <true,fase> - print Spooles information about the computed factorization
139: . -mat_spooles_symmetryflag <0,1,2> - 0: SPOOLES_SYMMETRIC, 1: SPOOLES_HERMITIAN, 2: SPOOLES_NONSYMMETRIC
140: . -mat_spooles_patchAndGoFlag <0,1,2> - 0: no patch, 1: use PatchAndGo strategy 1, 2: use PatchAndGo strategy 2
141: . -mat_spooles_toosmall <dt> - drop tolerance for PatchAndGo strategy 1
142: . -mat_spooles_storeids <bool integer> - if nonzero, stores row and col numbers where patches were applied in an IV object
143: . -mat_spooles_fudge <delta> - fudge factor for rescaling diagonals with PatchAndGo strategy 2
144: - -mat_spooles_storevalues <bool integer> - if nonzero and PatchAndGo strategy 2 is used, store change in diagonal value in a DV object
146: Level: beginner
148: .seealso: MATSOLVERSUPERLU, MATSOLVERMUMPS, MATSOLVERSUPERLU_DIST, PCFactorSetMatSolverPackage(), MatSolverPackage
149: M*/