Actual source code: sbaijcholmod.c
petsc-3.14.6 2021-03-30
2: /*
3: Provides an interface to the CHOLMOD sparse solver available through SuiteSparse version 4.2.1
5: When built with PETSC_USE_64BIT_INDICES this will use Suitesparse_long as the
6: integer type in UMFPACK, otherwise it will use int. This means
7: all integers in this file as simply declared as PetscInt. Also it means
8: that one cannot use 64BIT_INDICES on 32bit machines [as Suitesparse_long is 32bit only]
10: */
12: #include <../src/mat/impls/sbaij/seq/sbaij.h>
13: #include <../src/mat/impls/sbaij/seq/cholmod/cholmodimpl.h>
15: /*
16: This is a terrible hack, but it allows the error handler to retain a context.
17: Note that this hack really cannot be made both reentrant and concurrent.
18: */
19: static Mat static_F;
21: static void CholmodErrorHandler(int status,const char *file,int line,const char *message)
22: {
26: if (status > CHOLMOD_OK) {
27: PetscInfo4(static_F,"CHOLMOD warning %d at %s:%d: %s\n",status,file,line,message);CHKERRV(ierr);
28: } else if (status == CHOLMOD_OK) { /* Documentation says this can happen, but why? */
29: PetscInfo3(static_F,"CHOLMOD OK at %s:%d: %s\n",file,line,message);CHKERRV(ierr);
30: } else {
31: PetscErrorPrintf("CHOLMOD error %d at %s:%d: %s\n",status,file,line,message);CHKERRV(ierr);
32: }
33: PetscFunctionReturnVoid();
34: }
36: PetscErrorCode CholmodStart(Mat F)
37: {
39: Mat_CHOLMOD *chol=(Mat_CHOLMOD*)F->data;
40: cholmod_common *c;
41: PetscBool flg;
44: if (chol->common) return(0);
45: PetscMalloc1(1,&chol->common);
46: !cholmod_X_start(chol->common);
48: c = chol->common;
49: c->error_handler = CholmodErrorHandler;
51: #define CHOLMOD_OPTION_DOUBLE(name,help) do { \
52: PetscReal tmp = (PetscReal)c->name; \
53: PetscOptionsReal("-mat_cholmod_" #name,help,"None",tmp,&tmp,NULL); \
54: c->name = (double)tmp; \
55: } while (0)
57: #define CHOLMOD_OPTION_INT(name,help) do { \
58: PetscInt tmp = (PetscInt)c->name; \
59: PetscOptionsInt("-mat_cholmod_" #name,help,"None",tmp,&tmp,NULL); \
60: c->name = (int)tmp; \
61: } while (0)
63: #define CHOLMOD_OPTION_SIZE_T(name,help) do { \
64: PetscReal tmp = (PetscInt)c->name; \
65: PetscOptionsReal("-mat_cholmod_" #name,help,"None",tmp,&tmp,NULL); \
66: if (tmp < 0) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_OUTOFRANGE,"value must be positive"); \
67: c->name = (size_t)tmp; \
68: } while (0)
70: #define CHOLMOD_OPTION_BOOL(name,help) do { \
71: PetscBool tmp = (PetscBool) !!c->name; \
72: PetscOptionsBool("-mat_cholmod_" #name,help,"None",tmp,&tmp,NULL); \
73: c->name = (int)tmp; \
74: } while (0)
76: PetscOptionsBegin(PetscObjectComm((PetscObject)F),((PetscObject)F)->prefix,"CHOLMOD Options","Mat");
77: CHOLMOD_OPTION_INT(nmethods,"Number of different ordering methods to try");
79: #if defined(PETSC_USE_SUITESPARSE_GPU)
80: c->useGPU = 1;
81: CHOLMOD_OPTION_INT(useGPU,"Use GPU for BLAS 1, otherwise 0");
82: CHOLMOD_OPTION_SIZE_T(maxGpuMemBytes,"Maximum memory to allocate on the GPU");
83: CHOLMOD_OPTION_DOUBLE(maxGpuMemFraction,"Fraction of available GPU memory to allocate");
84: #endif
86: /* CHOLMOD handles first-time packing and refactor-packing separately, but we usually want them to be the same. */
87: chol->pack = (PetscBool)c->final_pack;
88: PetscOptionsBool("-mat_cholmod_pack","Pack factors after factorization [disable for frequent repeat factorization]","None",chol->pack,&chol->pack,NULL);
89: c->final_pack = (int)chol->pack;
91: CHOLMOD_OPTION_DOUBLE(dbound,"Minimum absolute value of diagonal entries of D");
92: CHOLMOD_OPTION_DOUBLE(grow0,"Global growth ratio when factors are modified");
93: CHOLMOD_OPTION_DOUBLE(grow1,"Column growth ratio when factors are modified");
94: CHOLMOD_OPTION_SIZE_T(grow2,"Affine column growth constant when factors are modified");
95: CHOLMOD_OPTION_SIZE_T(maxrank,"Max rank of update, larger values are faster but use more memory [2,4,8]");
96: {
97: static const char *const list[] = {"SIMPLICIAL","AUTO","SUPERNODAL","MatCholmodFactorType","MAT_CHOLMOD_FACTOR_",0};
98: PetscOptionsEnum("-mat_cholmod_factor","Factorization method","None",list,(PetscEnum)c->supernodal,(PetscEnum*)&c->supernodal,NULL);
99: }
100: if (c->supernodal) CHOLMOD_OPTION_DOUBLE(supernodal_switch,"flop/nnz_L threshold for switching to supernodal factorization");
101: CHOLMOD_OPTION_BOOL(final_asis,"Leave factors \"as is\"");
102: CHOLMOD_OPTION_BOOL(final_pack,"Pack the columns when finished (use FALSE if the factors will be updated later)");
103: if (!c->final_asis) {
104: CHOLMOD_OPTION_BOOL(final_super,"Leave supernodal factors instead of converting to simplicial");
105: CHOLMOD_OPTION_BOOL(final_ll,"Turn LDL' factorization into LL'");
106: CHOLMOD_OPTION_BOOL(final_monotonic,"Ensure columns are monotonic when done");
107: CHOLMOD_OPTION_BOOL(final_resymbol,"Remove numerically zero values resulting from relaxed supernodal amalgamation");
108: }
109: {
110: PetscReal tmp[] = {(PetscReal)c->zrelax[0],(PetscReal)c->zrelax[1],(PetscReal)c->zrelax[2]};
111: PetscInt n = 3;
112: PetscOptionsRealArray("-mat_cholmod_zrelax","3 real supernodal relaxed amalgamation parameters","None",tmp,&n,&flg);
113: if (flg && n != 3) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_OUTOFRANGE,"must provide exactly 3 parameters to -mat_cholmod_zrelax");
114: if (flg) while (n--) c->zrelax[n] = (double)tmp[n];
115: }
116: {
117: PetscInt n,tmp[] = {(PetscInt)c->nrelax[0],(PetscInt)c->nrelax[1],(PetscInt)c->nrelax[2]};
118: PetscOptionsIntArray("-mat_cholmod_nrelax","3 size_t supernodal relaxed amalgamation parameters","None",tmp,&n,&flg);
119: if (flg && n != 3) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_OUTOFRANGE,"must provide exactly 3 parameters to -mat_cholmod_nrelax");
120: if (flg) while (n--) c->nrelax[n] = (size_t)tmp[n];
121: }
122: CHOLMOD_OPTION_BOOL(prefer_upper,"Work with upper triangular form [faster when using fill-reducing ordering, slower in natural ordering]");
123: CHOLMOD_OPTION_BOOL(default_nesdis,"Use NESDIS instead of METIS for nested dissection");
124: CHOLMOD_OPTION_INT(print,"Verbosity level");
125: PetscOptionsEnd();
126: return(0);
127: }
129: static PetscErrorCode MatWrapCholmod_seqsbaij(Mat A,PetscBool values,cholmod_sparse *C,PetscBool *aijalloc,PetscBool *valloc)
130: {
131: Mat_SeqSBAIJ *sbaij = (Mat_SeqSBAIJ*)A->data;
132: PetscBool vallocin = PETSC_FALSE;
136: PetscMemzero(C,sizeof(*C));
137: /* CHOLMOD uses column alignment, SBAIJ stores the upper factor, so we pass it on as a lower factor, swapping the meaning of row and column */
138: C->nrow = (size_t)A->cmap->n;
139: C->ncol = (size_t)A->rmap->n;
140: C->nzmax = (size_t)sbaij->maxnz;
141: C->p = sbaij->i;
142: C->i = sbaij->j;
143: if (values) {
144: #if defined(PETSC_USE_COMPLEX)
145: /* we need to pass CHOLMOD the conjugate matrix */
146: PetscScalar *v;
147: PetscInt i;
149: PetscMalloc1(sbaij->maxnz,&v);
150: for (i = 0; i < sbaij->maxnz; i++) v[i] = PetscConj(sbaij->a[i]);
151: C->x = v;
152: vallocin = PETSC_TRUE;
153: #else
154: C->x = sbaij->a;
155: #endif
156: }
157: C->stype = -1;
158: C->itype = CHOLMOD_INT_TYPE;
159: C->xtype = values ? CHOLMOD_SCALAR_TYPE : CHOLMOD_PATTERN;
160: C->dtype = CHOLMOD_DOUBLE;
161: C->sorted = 1;
162: C->packed = 1;
163: *aijalloc = PETSC_FALSE;
164: *valloc = vallocin;
165: return(0);
166: }
168: #define GET_ARRAY_READ 0
169: #define GET_ARRAY_WRITE 1
171: static PetscErrorCode VecWrapCholmod(Vec X,PetscInt rw,cholmod_dense *Y)
172: {
174: PetscScalar *x;
175: PetscInt n;
178: PetscMemzero(Y,sizeof(*Y));
179: switch (rw) {
180: case GET_ARRAY_READ:
181: VecGetArrayRead(X,(const PetscScalar**)&x);
182: break;
183: case GET_ARRAY_WRITE:
184: VecGetArrayWrite(X,&x);
185: break;
186: default:
187: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Case %D not handled",rw);
188: break;
189: }
190: VecGetSize(X,&n);
192: Y->x = x;
193: Y->nrow = n;
194: Y->ncol = 1;
195: Y->nzmax = n;
196: Y->d = n;
197: Y->xtype = CHOLMOD_SCALAR_TYPE;
198: Y->dtype = CHOLMOD_DOUBLE;
199: return(0);
200: }
202: static PetscErrorCode VecUnWrapCholmod(Vec X,PetscInt rw,cholmod_dense *Y)
203: {
204: PetscErrorCode ierr;
207: switch (rw) {
208: case GET_ARRAY_READ:
209: VecRestoreArrayRead(X,(const PetscScalar**)&Y->x);
210: break;
211: case GET_ARRAY_WRITE:
212: VecRestoreArrayWrite(X,(PetscScalar**)&Y->x);
213: break;
214: default:
215: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Case %D not handled",rw);
216: break;
217: }
218: return(0);
219: }
221: static PetscErrorCode MatDenseWrapCholmod(Mat X,PetscInt rw,cholmod_dense *Y)
222: {
224: PetscScalar *x;
225: PetscInt m,n,lda;
228: PetscMemzero(Y,sizeof(*Y));
229: switch (rw) {
230: case GET_ARRAY_READ:
231: MatDenseGetArrayRead(X,(const PetscScalar**)&x);
232: break;
233: case GET_ARRAY_WRITE:
234: MatDenseGetArrayWrite(X,&x);
235: break;
236: default:
237: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Case %D not handled",rw);
238: break;
239: }
240: MatDenseGetLDA(X,&lda);
241: MatGetLocalSize(X,&m,&n);
243: Y->x = x;
244: Y->nrow = m;
245: Y->ncol = n;
246: Y->nzmax = lda*n;
247: Y->d = lda;
248: Y->xtype = CHOLMOD_SCALAR_TYPE;
249: Y->dtype = CHOLMOD_DOUBLE;
250: return(0);
251: }
253: static PetscErrorCode MatDenseUnWrapCholmod(Mat X,PetscInt rw,cholmod_dense *Y)
254: {
255: PetscErrorCode ierr;
258: switch (rw) {
259: case GET_ARRAY_READ:
260: MatDenseRestoreArrayRead(X,(const PetscScalar**)&Y->x);
261: break;
262: case GET_ARRAY_WRITE:
263: /* we don't have MatDenseRestoreArrayWrite */
264: MatDenseRestoreArray(X,(PetscScalar**)&Y->x);
265: break;
266: default:
267: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Case %D not handled",rw);
268: break;
269: }
270: return(0);
271: }
273: PETSC_INTERN PetscErrorCode MatDestroy_CHOLMOD(Mat F)
274: {
276: Mat_CHOLMOD *chol=(Mat_CHOLMOD*)F->data;
279: !cholmod_X_free_factor(&chol->factor,chol->common);
280: !cholmod_X_finish(chol->common);
281: PetscFree(chol->common);
282: PetscFree(chol->matrix);
283: PetscObjectComposeFunction((PetscObject)F,"MatFactorGetSolverType_C",NULL);
284: PetscFree(F->data);
285: return(0);
286: }
288: static PetscErrorCode MatSolve_CHOLMOD(Mat,Vec,Vec);
289: static PetscErrorCode MatMatSolve_CHOLMOD(Mat,Mat,Mat);
291: /*static const char *const CholmodOrderingMethods[] = {"User","AMD","METIS","NESDIS(default)","Natural","NESDIS(small=20000)","NESDIS(small=4,no constrained)","NESDIS()"};*/
293: static PetscErrorCode MatView_Info_CHOLMOD(Mat F,PetscViewer viewer)
294: {
295: Mat_CHOLMOD *chol = (Mat_CHOLMOD*)F->data;
296: const cholmod_common *c = chol->common;
297: PetscErrorCode ierr;
298: PetscInt i;
301: if (F->ops->solve != MatSolve_CHOLMOD) return(0);
302: PetscViewerASCIIPrintf(viewer,"CHOLMOD run parameters:\n");
303: PetscViewerASCIIPushTab(viewer);
304: PetscViewerASCIIPrintf(viewer,"Pack factors after symbolic factorization: %s\n",chol->pack ? "TRUE" : "FALSE");
305: PetscViewerASCIIPrintf(viewer,"Common.dbound %g (Smallest absolute value of diagonal entries of D)\n",c->dbound);
306: PetscViewerASCIIPrintf(viewer,"Common.grow0 %g\n",c->grow0);
307: PetscViewerASCIIPrintf(viewer,"Common.grow1 %g\n",c->grow1);
308: PetscViewerASCIIPrintf(viewer,"Common.grow2 %u\n",(unsigned)c->grow2);
309: PetscViewerASCIIPrintf(viewer,"Common.maxrank %u\n",(unsigned)c->maxrank);
310: PetscViewerASCIIPrintf(viewer,"Common.supernodal_switch %g\n",c->supernodal_switch);
311: PetscViewerASCIIPrintf(viewer,"Common.supernodal %d\n",c->supernodal);
312: PetscViewerASCIIPrintf(viewer,"Common.final_asis %d\n",c->final_asis);
313: PetscViewerASCIIPrintf(viewer,"Common.final_super %d\n",c->final_super);
314: PetscViewerASCIIPrintf(viewer,"Common.final_ll %d\n",c->final_ll);
315: PetscViewerASCIIPrintf(viewer,"Common.final_pack %d\n",c->final_pack);
316: PetscViewerASCIIPrintf(viewer,"Common.final_monotonic %d\n",c->final_monotonic);
317: PetscViewerASCIIPrintf(viewer,"Common.final_resymbol %d\n",c->final_resymbol);
318: PetscViewerASCIIPrintf(viewer,"Common.zrelax [%g,%g,%g]\n",c->zrelax[0],c->zrelax[1],c->zrelax[2]);
319: PetscViewerASCIIPrintf(viewer,"Common.nrelax [%u,%u,%u]\n",(unsigned)c->nrelax[0],(unsigned)c->nrelax[1],(unsigned)c->nrelax[2]);
320: PetscViewerASCIIPrintf(viewer,"Common.prefer_upper %d\n",c->prefer_upper);
321: PetscViewerASCIIPrintf(viewer,"Common.print %d\n",c->print);
322: for (i=0; i<c->nmethods; i++) {
323: PetscViewerASCIIPrintf(viewer,"Ordering method %D%s:\n",i,i==c->selected ? " [SELECTED]" : "");
324: PetscViewerASCIIPrintf(viewer," lnz %g, fl %g, prune_dense %g, prune_dense2 %g\n",
325: c->method[i].lnz,c->method[i].fl,c->method[i].prune_dense,c->method[i].prune_dense2);
326: }
327: PetscViewerASCIIPrintf(viewer,"Common.postorder %d\n",c->postorder);
328: PetscViewerASCIIPrintf(viewer,"Common.default_nesdis %d (use NESDIS instead of METIS for nested dissection)\n",c->default_nesdis);
329: /* Statistics */
330: PetscViewerASCIIPrintf(viewer,"Common.fl %g (flop count from most recent analysis)\n",c->fl);
331: PetscViewerASCIIPrintf(viewer,"Common.lnz %g (fundamental nz in L)\n",c->lnz);
332: PetscViewerASCIIPrintf(viewer,"Common.anz %g\n",c->anz);
333: PetscViewerASCIIPrintf(viewer,"Common.modfl %g (flop count from most recent update)\n",c->modfl);
334: PetscViewerASCIIPrintf(viewer,"Common.malloc_count %g (number of live objects)\n",(double)c->malloc_count);
335: PetscViewerASCIIPrintf(viewer,"Common.memory_usage %g (peak memory usage in bytes)\n",(double)c->memory_usage);
336: PetscViewerASCIIPrintf(viewer,"Common.memory_inuse %g (current memory usage in bytes)\n",(double)c->memory_inuse);
337: PetscViewerASCIIPrintf(viewer,"Common.nrealloc_col %g (number of column reallocations)\n",c->nrealloc_col);
338: PetscViewerASCIIPrintf(viewer,"Common.nrealloc_factor %g (number of factor reallocations due to column reallocations)\n",c->nrealloc_factor);
339: PetscViewerASCIIPrintf(viewer,"Common.ndbounds_hit %g (number of times diagonal was modified by dbound)\n",c->ndbounds_hit);
340: PetscViewerASCIIPrintf(viewer,"Common.rowfacfl %g (number of flops in last call to cholmod_rowfac)\n",c->rowfacfl);
341: PetscViewerASCIIPrintf(viewer,"Common.aatfl %g (number of flops to compute A(:,f)*A(:,f)')\n",c->aatfl);
342: #if defined(PETSC_USE_SUITESPARSE_GPU)
343: PetscViewerASCIIPrintf(viewer,"Common.useGPU %d\n",c->useGPU);
344: #endif
345: PetscViewerASCIIPopTab(viewer);
346: return(0);
347: }
349: PETSC_INTERN PetscErrorCode MatView_CHOLMOD(Mat F,PetscViewer viewer)
350: {
351: PetscErrorCode ierr;
352: PetscBool iascii;
353: PetscViewerFormat format;
356: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
357: if (iascii) {
358: PetscViewerGetFormat(viewer,&format);
359: if (format == PETSC_VIEWER_ASCII_INFO) {
360: MatView_Info_CHOLMOD(F,viewer);
361: }
362: }
363: return(0);
364: }
366: static PetscErrorCode MatSolve_CHOLMOD(Mat F,Vec B,Vec X)
367: {
368: Mat_CHOLMOD *chol = (Mat_CHOLMOD*)F->data;
369: cholmod_dense cholB,cholX,*X_handle,*Y_handle = NULL,*E_handle = NULL;
373: static_F = F;
374: VecWrapCholmod(B,GET_ARRAY_READ,&cholB);
375: VecWrapCholmod(X,GET_ARRAY_WRITE,&cholX);
376: X_handle = &cholX;
377: !cholmod_X_solve2(CHOLMOD_A,chol->factor,&cholB,NULL,&X_handle,NULL,&Y_handle,&E_handle,chol->common);
378: !cholmod_X_free_dense(&Y_handle,chol->common);
379: !cholmod_X_free_dense(&E_handle,chol->common);
380: VecUnWrapCholmod(B,GET_ARRAY_READ,&cholB);
381: VecUnWrapCholmod(X,GET_ARRAY_WRITE,&cholX);
382: return(0);
383: }
385: static PetscErrorCode MatMatSolve_CHOLMOD(Mat F,Mat B,Mat X)
386: {
387: Mat_CHOLMOD *chol = (Mat_CHOLMOD*)F->data;
388: cholmod_dense cholB,cholX,*X_handle,*Y_handle = NULL,*E_handle = NULL;
392: static_F = F;
393: MatDenseWrapCholmod(B,GET_ARRAY_READ,&cholB);
394: MatDenseWrapCholmod(X,GET_ARRAY_WRITE,&cholX);
395: X_handle = &cholX;
396: !cholmod_X_solve2(CHOLMOD_A,chol->factor,&cholB,NULL,&X_handle,NULL,&Y_handle,&E_handle,chol->common);
397: !cholmod_X_free_dense(&Y_handle,chol->common);
398: !cholmod_X_free_dense(&E_handle,chol->common);
399: MatDenseUnWrapCholmod(B,GET_ARRAY_READ,&cholB);
400: MatDenseUnWrapCholmod(X,GET_ARRAY_WRITE,&cholX);
401: return(0);
402: }
404: static PetscErrorCode MatCholeskyFactorNumeric_CHOLMOD(Mat F,Mat A,const MatFactorInfo *info)
405: {
406: Mat_CHOLMOD *chol = (Mat_CHOLMOD*)F->data;
407: cholmod_sparse cholA;
408: PetscBool aijalloc,valloc;
412: (*chol->Wrap)(A,PETSC_TRUE,&cholA,&aijalloc,&valloc);
413: static_F = F;
414: !cholmod_X_factorize(&cholA,chol->factor,chol->common);
415: if (ierr) SETERRQ1(PetscObjectComm((PetscObject)F),PETSC_ERR_LIB,"CHOLMOD factorization failed with status %d",chol->common->status);
416: if (chol->common->status == CHOLMOD_NOT_POSDEF) SETERRQ1(PetscObjectComm((PetscObject)F),PETSC_ERR_MAT_CH_ZRPVT,"CHOLMOD detected that the matrix is not positive definite, failure at column %u",(unsigned)chol->factor->minor);
418: if (aijalloc) {PetscFree2(cholA.p,cholA.i);}
419: if (valloc) {PetscFree(cholA.x);}
420: #if defined(PETSC_USE_SUITESPARSE_GPU)
421: PetscLogGpuTimeAdd(chol->common->CHOLMOD_GPU_GEMM_TIME + chol->common->CHOLMOD_GPU_SYRK_TIME + chol->common->CHOLMOD_GPU_TRSM_TIME + chol->common->CHOLMOD_GPU_POTRF_TIME);
422: #endif
424: F->ops->solve = MatSolve_CHOLMOD;
425: F->ops->solvetranspose = MatSolve_CHOLMOD;
426: F->ops->matsolve = MatMatSolve_CHOLMOD;
427: F->ops->matsolvetranspose = MatMatSolve_CHOLMOD;
428: return(0);
429: }
431: PETSC_INTERN PetscErrorCode MatCholeskyFactorSymbolic_CHOLMOD(Mat F,Mat A,IS perm,const MatFactorInfo *info)
432: {
433: Mat_CHOLMOD *chol = (Mat_CHOLMOD*)F->data;
435: cholmod_sparse cholA;
436: PetscBool aijalloc,valloc;
437: PetscInt *fset = 0;
438: size_t fsize = 0;
441: (*chol->Wrap)(A,PETSC_FALSE,&cholA,&aijalloc,&valloc);
442: static_F = F;
443: if (chol->factor) {
444: !cholmod_X_resymbol(&cholA,fset,fsize,(int)chol->pack,chol->factor,chol->common);
445: if (ierr) SETERRQ1(PetscObjectComm((PetscObject)F),PETSC_ERR_LIB,"CHOLMOD analysis failed with status %d",chol->common->status);
446: } else if (perm) {
447: const PetscInt *ip;
448: ISGetIndices(perm,&ip);
449: chol->factor = cholmod_X_analyze_p(&cholA,(PetscInt*)ip,fset,fsize,chol->common);
450: if (!chol->factor) SETERRQ1(PetscObjectComm((PetscObject)F),PETSC_ERR_LIB,"CHOLMOD analysis failed using PETSc ordering with status %d",chol->common->status);
451: ISRestoreIndices(perm,&ip);
452: } else {
453: chol->factor = cholmod_X_analyze(&cholA,chol->common);
454: if (!chol->factor) SETERRQ1(PetscObjectComm((PetscObject)F),PETSC_ERR_LIB,"CHOLMOD analysis failed using internal ordering with status %d",chol->common->status);
455: }
457: if (aijalloc) {PetscFree2(cholA.p,cholA.i);}
458: if (valloc) {PetscFree(cholA.x);}
460: F->ops->choleskyfactornumeric = MatCholeskyFactorNumeric_CHOLMOD;
461: return(0);
462: }
464: static PetscErrorCode MatFactorGetSolverType_seqsbaij_cholmod(Mat A,MatSolverType *type)
465: {
467: *type = MATSOLVERCHOLMOD;
468: return(0);
469: }
471: PETSC_INTERN PetscErrorCode MatGetInfo_CHOLMOD(Mat F,MatInfoType flag,MatInfo *info)
472: {
473: Mat_CHOLMOD *chol = (Mat_CHOLMOD*)F->data;
476: info->block_size = 1.0;
477: info->nz_allocated = chol->common->lnz;
478: info->nz_used = chol->common->lnz;
479: info->nz_unneeded = 0.0;
480: info->assemblies = 0.0;
481: info->mallocs = 0.0;
482: info->memory = chol->common->memory_inuse;
483: info->fill_ratio_given = 0;
484: info->fill_ratio_needed = 0;
485: info->factor_mallocs = chol->common->malloc_count;
486: return(0);
487: }
489: /*MC
490: MATSOLVERCHOLMOD
492: A matrix type providing direct solvers (Cholesky) for sequential matrices
493: via the external package CHOLMOD.
495: Use ./configure --download-suitesparse to install PETSc to use CHOLMOD
497: Use -pc_type cholesky -pc_factor_mat_solver_type cholmod to use this direct solver
499: Consult CHOLMOD documentation for more information about the Common parameters
500: which correspond to the options database keys below.
502: Options Database Keys:
503: + -mat_cholmod_dbound <0> - Minimum absolute value of diagonal entries of D (None)
504: . -mat_cholmod_grow0 <1.2> - Global growth ratio when factors are modified (None)
505: . -mat_cholmod_grow1 <1.2> - Column growth ratio when factors are modified (None)
506: . -mat_cholmod_grow2 <5> - Affine column growth constant when factors are modified (None)
507: . -mat_cholmod_maxrank <8> - Max rank of update, larger values are faster but use more memory [2,4,8] (None)
508: . -mat_cholmod_factor <AUTO> - (choose one of) SIMPLICIAL AUTO SUPERNODAL
509: . -mat_cholmod_supernodal_switch <40> - flop/nnz_L threshold for switching to supernodal factorization (None)
510: . -mat_cholmod_final_asis <TRUE> - Leave factors "as is" (None)
511: . -mat_cholmod_final_pack <TRUE> - Pack the columns when finished (use FALSE if the factors will be updated later) (None)
512: . -mat_cholmod_zrelax <0.8> - 3 real supernodal relaxed amalgamation parameters (None)
513: . -mat_cholmod_nrelax <4> - 3 size_t supernodal relaxed amalgamation parameters (None)
514: . -mat_cholmod_prefer_upper <TRUE> - Work with upper triangular form (faster when using fill-reducing ordering, slower in natural ordering) (None)
515: . -mat_cholmod_print <3> - Verbosity level (None)
516: - -mat_ordering_type internal - Use the ordering provided by Cholmod
518: Level: beginner
520: Note: CHOLMOD is part of SuiteSparse http://faculty.cse.tamu.edu/davis/suitesparse.html
522: .seealso: PCCHOLESKY, PCFactorSetMatSolverType(), MatSolverType
523: M*/
525: PETSC_INTERN PetscErrorCode MatGetFactor_seqsbaij_cholmod(Mat A,MatFactorType ftype,Mat *F)
526: {
527: Mat B;
528: Mat_CHOLMOD *chol;
530: PetscInt m=A->rmap->n,n=A->cmap->n,bs;
531: const char *prefix;
534: MatGetBlockSize(A,&bs);
535: if (bs != 1) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"CHOLMOD only supports block size=1, given %D",bs);
536: #if defined(PETSC_USE_COMPLEX)
537: if (!A->hermitian) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only for Hermitian matrices");
538: #endif
539: /* Create the factorization matrix F */
540: MatCreate(PetscObjectComm((PetscObject)A),&B);
541: MatSetSizes(B,PETSC_DECIDE,PETSC_DECIDE,m,n);
542: PetscStrallocpy("cholmod",&((PetscObject)B)->type_name);
543: MatGetOptionsPrefix(A,&prefix);
544: MatSetOptionsPrefix(B,prefix);
545: MatSetUp(B);
546: PetscNewLog(B,&chol);
548: chol->Wrap = MatWrapCholmod_seqsbaij;
549: B->data = chol;
551: B->ops->getinfo = MatGetInfo_CHOLMOD;
552: B->ops->view = MatView_CHOLMOD;
553: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_CHOLMOD;
554: B->ops->destroy = MatDestroy_CHOLMOD;
555: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_seqsbaij_cholmod);
556: B->factortype = MAT_FACTOR_CHOLESKY;
557: B->assembled = PETSC_TRUE;
558: B->preallocated = PETSC_TRUE;
560: CholmodStart(B);
562: PetscFree(B->solvertype);
563: PetscStrallocpy(MATSOLVERCHOLMOD,&B->solvertype);
564: B->useordering = PETSC_TRUE;
565: *F = B;
566: return(0);
567: }