Actual source code: clique.cxx
petsc-3.6.1 2015-08-06
1: #include <../src/mat/impls/aij/mpi/clique/matcliqueimpl.h> /*I "petscmat.h" I*/
2: /*
3: Provides an interface to the Clique sparse solver (http://poulson.github.com/Clique/)
4: */
8: PetscErrorCode PetscCliqueFinalizePackage(void)
9: {
11: cliq::Finalize();
12: return(0);
13: }
17: PetscErrorCode PetscCliqueInitializePackage(void)
18: {
22: if (cliq::Initialized()) return(0);
23: { /* We have already initialized MPI, so this song and dance is just to pass these variables (which won't be used by Clique) through the interface that needs references */
24: int zero = 0;
25: char **nothing = 0;
26: cliq::Initialize(zero,nothing);
27: }
28: PetscRegisterFinalize(PetscCliqueFinalizePackage);
29: return(0);
30: }
32: /*
33: MatConvertToClique: Convert Petsc aij matrix to Clique matrix
35: input:
36: + A - matrix in seqaij or mpiaij format
37: - reuse - denotes if the destination matrix is to be created or reused. Currently
38: MAT_REUSE_MATRIX is only supported for inplace conversion, otherwise use MAT_INITIAL_MATRIX.
40: output:
41: . cliq - Clique context
42: */
45: PetscErrorCode MatConvertToClique(Mat A,MatReuse reuse,Mat_Clique *cliq)
46: {
47: PetscErrorCode ierr;
48: PetscInt i,j,rstart,rend,ncols;
49: const PetscInt *cols;
50: const PetscCliqScalar *vals;
51: cliq::DistSparseMatrix<PetscCliqScalar> *cmat;
54: if (reuse == MAT_INITIAL_MATRIX){
55: /* create Clique matrix */
56: cmat = new cliq::DistSparseMatrix<PetscCliqScalar>(A->rmap->N,cliq->cliq_comm);
57: cliq->cmat = cmat;
58: } else {
59: cmat = cliq->cmat;
60: }
61: /* fill matrix values */
62: MatGetOwnershipRange(A,&rstart,&rend);
63: const int firstLocalRow = cmat->FirstLocalRow();
64: const int localHeight = cmat->LocalHeight();
65: if (rstart != firstLocalRow || rend-rstart != localHeight) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"matrix rowblock distribution does not match");
67: cmat->StartAssembly();
68: //cmat->Reserve( 7*localHeight ); ???
69: for (i=rstart; i<rend; i++){
70: MatGetRow(A,i,&ncols,&cols,&vals);
71: for (j=0; j<ncols; j++){
72: cmat->Update(i,cols[j],vals[j]);
73: }
74: MatRestoreRow(A,i,&ncols,&cols,&vals);
75: }
76: cmat->StopAssembly();
77: return(0);
78: }
82: static PetscErrorCode MatMult_Clique(Mat A,Vec X,Vec Y)
83: {
84: PetscErrorCode ierr;
85: PetscInt i;
86: const PetscCliqScalar *x;
87: Mat_Clique *cliq=(Mat_Clique*)A->spptr;
88: cliq::DistSparseMatrix<PetscCliqScalar> *cmat=cliq->cmat;
89: cliq::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
92: if (!cmat) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Clique matrix cmat is not created yet");
93: VecGetArrayRead(X,(const PetscScalar **)&x);
95: cliq::DistMultiVec<PetscCliqScalar> xc(A->cmap->N,1,cxxcomm);
96: cliq::DistMultiVec<PetscCliqScalar> yc(A->rmap->N,1,cxxcomm);
97: for (i=0; i<A->cmap->n; i++) {
98: xc.SetLocal(i,0,x[i]);
99: }
100: cliq::Multiply(1.0,*cmat,xc,0.0,yc);
101: VecRestoreArrayRead(X,(const PetscScalar **)&x);
103: for (i=0; i<A->cmap->n; i++) {
104: VecSetValueLocal(Y,i,yc.GetLocal(i,0),INSERT_VALUES);
105: }
106: VecAssemblyBegin(Y);
107: VecAssemblyEnd(Y);
108: return(0);
109: }
113: PetscErrorCode MatView_Clique(Mat A,PetscViewer viewer)
114: {
116: PetscBool iascii;
119: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
120: if (iascii) {
121: PetscViewerFormat format;
122: PetscViewerGetFormat(viewer,&format);
123: if (format == PETSC_VIEWER_ASCII_INFO) {
124: PetscViewerASCIIPrintf(viewer,"Clique run parameters:\n");
125: } else if (format == PETSC_VIEWER_DEFAULT) { /* matrix A is factored matrix, remove this block */
126: Mat Aaij;
127: PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);
128: PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);
129: PetscPrintf(PetscObjectComm((PetscObject)viewer),"Clique matrix\n");
130: MatComputeExplicitOperator(A,&Aaij);
131: MatView(Aaij,PETSC_VIEWER_STDOUT_WORLD);
132: MatDestroy(&Aaij);
133: }
134: }
135: return(0);
136: }
140: PetscErrorCode MatDestroy_Clique(Mat A)
141: {
143: Mat_Clique *cliq=(Mat_Clique*)A->spptr;
146: printf("MatDestroy_Clique ...\n");
147: if (cliq && cliq->CleanUpClique) {
148: /* Terminate instance, deallocate memories */
149: printf("MatDestroy_Clique ... destroy clique struct \n");
150: PetscCommDestroy(&(cliq->cliq_comm));
151: // free cmat here
152: delete cliq->cmat;
153: delete cliq->frontTree;
154: delete cliq->rhs;
155: delete cliq->xNodal;
156: delete cliq->info;
157: delete cliq->inverseMap;
158: }
159: if (cliq && cliq->Destroy) {
160: cliq->Destroy(A);
161: }
162: PetscFree(A->spptr);
164: /* clear composed functions */
165: PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverPackage_C",NULL);
167: return(0);
168: }
172: PetscErrorCode MatSolve_Clique(Mat A,Vec B,Vec X)
173: {
174: PetscErrorCode ierr;
175: PetscInt i,rank;
176: const PetscCliqScalar *b;
177: Mat_Clique *cliq=(Mat_Clique*)A->spptr;
178: cliq::DistMultiVec<PetscCliqScalar> *bc=cliq->rhs;
179: cliq::DistNodalMultiVec<PetscCliqScalar> *xNodal=cliq->xNodal;
182: VecGetArrayRead(B,(const PetscScalar **)&b);
183: for (i=0; i<A->rmap->n; i++) {
184: bc->SetLocal(i,0,b[i]);
185: }
186: VecRestoreArrayRead(B,(const PetscScalar **)&b);
188: xNodal->Pull( *cliq->inverseMap, *cliq->info, *bc );
189: cliq::Solve( *cliq->info, *cliq->frontTree, *xNodal);
190: xNodal->Push( *cliq->inverseMap, *cliq->info, *bc );
192: MPI_Comm_rank(cliq->cliq_comm,&rank);
193: for (i=0; i<bc->LocalHeight(); i++) {
194: VecSetValue(X,rank*bc->Blocksize()+i,bc->GetLocal(i,0),INSERT_VALUES);
195: }
196: VecAssemblyBegin(X);
197: VecAssemblyEnd(X);
198: return(0);
199: }
203: PetscErrorCode MatCholeskyFactorNumeric_Clique(Mat F,Mat A,const MatFactorInfo *info)
204: {
205: PetscErrorCode ierr;
206: Mat_Clique *cliq=(Mat_Clique*)F->spptr;
207: PETSC_UNUSED
208: cliq::DistSparseMatrix<PetscCliqScalar> *cmat;
211: cmat = cliq->cmat;
212: if (cliq->matstruc == SAME_NONZERO_PATTERN){ /* successing numerical factorization */
213: /* Update cmat */
214: MatConvertToClique(A,MAT_REUSE_MATRIX,cliq);
215: }
217: /* Numeric factorization */
218: cliq::LDL( *cliq->info, *cliq->frontTree, cliq::LDL_1D);
219: //L.frontType = cliq::SYMM_2D;
221: // refactor
222: //cliq::ChangeFrontType( *cliq->frontTree, cliq::LDL_2D );
223: //*(cliq->frontTree.frontType) = cliq::LDL_2D;
224: //cliq::LDL( *cliq->info, *cliq->frontTree, cliq::LDL_2D );
226: cliq->matstruc = SAME_NONZERO_PATTERN;
227: F->assembled = PETSC_TRUE;
228: return(0);
229: }
233: PetscErrorCode MatCholeskyFactorSymbolic_Clique(Mat F,Mat A,IS r,const MatFactorInfo *info)
234: {
235: PetscErrorCode ierr;
236: Mat_Clique *Acliq=(Mat_Clique*)F->spptr;
237: cliq::DistSparseMatrix<PetscCliqScalar> *cmat;
238: cliq::DistSeparatorTree sepTree;
239: cliq::DistMap map;
242: /* Convert A to Aclique */
243: MatConvertToClique(A,MAT_INITIAL_MATRIX,Acliq);
244: cmat = Acliq->cmat;
246: cliq::NestedDissection( cmat->DistGraph(), map, sepTree, *Acliq->info, PETSC_TRUE, Acliq->numDistSeps, Acliq->numSeqSeps, Acliq->cutoff);
247: map.FormInverse( *Acliq->inverseMap );
248: Acliq->frontTree = new cliq::DistSymmFrontTree<PetscCliqScalar>( cliq::TRANSPOSE, *cmat, map, sepTree, *Acliq->info );
250: Acliq->matstruc = DIFFERENT_NONZERO_PATTERN;
251: Acliq->CleanUpClique = PETSC_TRUE;
252: return(0);
253: }
255: /*MC
256: MATSOLVERCLIQUE - A solver package providing direct solvers for distributed
257: and sequential matrices via the external package Clique.
259: Use ./configure --download-clique to have PETSc installed with Clique
261: Use -pc_type lu -pc_factor_mat_solver_package clique to us this direct solver
263: Options Database Keys:
264: + -mat_clique_ -
265: - -mat_clique_ <integer> -
267: Level: beginner
269: .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage
271: M*/
275: PetscErrorCode MatFactorGetSolverPackage_Clique(Mat A,const MatSolverPackage *type)
276: {
278: *type = MATSOLVERCLIQUE;
279: return(0);
280: }
284: PETSC_EXTERN PetscErrorCode MatGetFactor_aij_clique(Mat A,MatFactorType ftype,Mat *F)
285: {
286: Mat B;
287: Mat_Clique *cliq;
291: PetscCliqueInitializePackage();
292: MatCreate(PetscObjectComm((PetscObject)A),&B);
293: MatSetSizes(B,A->rmap->n,A->cmap->n,PETSC_DETERMINE,PETSC_DETERMINE);
294: MatSetType(B,((PetscObject)A)->type_name);
295: MatSetUp(B);
297: if (ftype == MAT_FACTOR_CHOLESKY){
298: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_Clique;
299: B->ops->choleskyfactornumeric = MatCholeskyFactorNumeric_Clique;
300: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Factor type not supported");
302: PetscNewLog(B,&cliq);
303: B->spptr = (void*)cliq;
304: cliq::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
305: PetscCommDuplicate(cxxcomm,&(cliq->cliq_comm),NULL);
306: cliq->rhs = new cliq::DistMultiVec<PetscCliqScalar>(A->rmap->N,1,cliq->cliq_comm);
307: cliq->xNodal = new cliq::DistNodalMultiVec<PetscCliqScalar>();
308: cliq->info = new cliq::DistSymmInfo;
309: cliq->inverseMap = new cliq::DistMap;
310: cliq->CleanUpClique = PETSC_FALSE;
311: cliq->Destroy = B->ops->destroy;
313: B->ops->view = MatView_Clique;
314: B->ops->mult = MatMult_Clique; /* for cliq->cmat */
315: B->ops->solve = MatSolve_Clique;
317: B->ops->destroy = MatDestroy_Clique;
318: B->factortype = ftype;
319: B->assembled = PETSC_FALSE;
321: /* Set Clique options */
322: PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"Clique Options","Mat");
323: cliq->cutoff = 128; /* maximum size of leaf node */
324: cliq->numDistSeps = 1; /* number of distributed separators to try */
325: cliq->numSeqSeps = 1; /* number of sequential separators to try */
326: PetscOptionsEnd();
328: *F = B;
329: return(0);
330: }
334: PETSC_EXTERN PetscErrorCode MatSolverPackageRegister_Clique(void)
335: {
339: MatSolverPackageRegister(MATSOLVERCLIQUE,MATMPIAIJ, MAT_FACTOR_LU,MatGetFactor_aij_clique);
340: return(0);
341: }