Actual source code: sbaijcholmod.c

petsc-3.8.4 2018-03-24
Report Typos and Errors

  2: /*
  3:    Provides an interface to the CHOLMOD sparse solver available through SuiteSparse version 4.2.1

  5:    When build with PETSC_USE_64BIT_INDICES this will use Suitesparse_long as the
  6:    integer type in UMFPACK, otherwise it will use int. This means
  7:    all integers in this file as simply declared as PetscInt. Also it means
  8:    that one cannot use 64BIT_INDICES on 32bit machines [as Suitesparse_long is 32bit only]

 10: */

 12:  #include <../src/mat/impls/sbaij/seq/sbaij.h>
 13:  #include <../src/mat/impls/sbaij/seq/cholmod/cholmodimpl.h>

 15: /*
 16:    This is a terrible hack, but it allows the error handler to retain a context.
 17:    Note that this hack really cannot be made both reentrant and concurrent.
 18: */
 19: static Mat static_F;

 21: static void CholmodErrorHandler(int status,const char *file,int line,const char *message)
 22: {

 26:   if (status > CHOLMOD_OK) {
 27:     PetscInfo4(static_F,"CHOLMOD warning %d at %s:%d: %s\n",status,file,line,message);CHKERRV(ierr);
 28:   } else if (status == CHOLMOD_OK) { /* Documentation says this can happen, but why? */
 29:     PetscInfo3(static_F,"CHOLMOD OK at %s:%d: %s\n",file,line,message);CHKERRV(ierr);
 30:   } else {
 31:     PetscErrorPrintf("CHOLMOD error %d at %s:%d: %s\n",status,file,line,message);CHKERRV(ierr);
 32:   }
 33:   PetscFunctionReturnVoid();
 34: }

 36: PetscErrorCode  CholmodStart(Mat F)
 37: {
 39:   Mat_CHOLMOD    *chol=(Mat_CHOLMOD*)F->data;
 40:   cholmod_common *c;
 41:   PetscBool      flg;

 44:   if (chol->common) return(0);
 45:   PetscMalloc1(1,&chol->common);
 46:   !cholmod_X_start(chol->common);

 48:   c                = chol->common;
 49:   c->error_handler = CholmodErrorHandler;

 51: #define CHOLMOD_OPTION_DOUBLE(name,help) do {                            \
 52:     PetscReal tmp = (PetscReal)c->name;                                  \
 53:     PetscOptionsReal("-mat_cholmod_" #name,help,"None",tmp,&tmp,NULL); \
 54:     c->name = (double)tmp;                                               \
 55: } while (0)

 57: #define CHOLMOD_OPTION_INT(name,help) do {                               \
 58:     PetscInt tmp = (PetscInt)c->name;                                    \
 59:     PetscOptionsInt("-mat_cholmod_" #name,help,"None",tmp,&tmp,NULL); \
 60:     c->name = (int)tmp;                                                  \
 61: } while (0)

 63: #define CHOLMOD_OPTION_SIZE_T(name,help) do {                            \
 64:     PetscInt tmp = (PetscInt)c->name;                                    \
 65:     PetscOptionsInt("-mat_cholmod_" #name,help,"None",tmp,&tmp,NULL); \
 66:     if (tmp < 0) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_OUTOFRANGE,"value must be positive"); \
 67:     c->name = (size_t)tmp;                                               \
 68: } while (0)

 70: #define CHOLMOD_OPTION_BOOL(name,help) do {                             \
 71:     PetscBool tmp = (PetscBool) !!c->name;                              \
 72:     PetscOptionsBool("-mat_cholmod_" #name,help,"None",tmp,&tmp,NULL); \
 73:     c->name = (int)tmp;                                                  \
 74: } while (0)

 76:   PetscOptionsBegin(PetscObjectComm((PetscObject)F),((PetscObject)F)->prefix,"CHOLMOD Options","Mat");
 77:   /* CHOLMOD handles first-time packing and refactor-packing separately, but we usually want them to be the same. */
 78:   chol->pack = (PetscBool)c->final_pack;

 80: #if defined(PETSC_USE_SUITESPARSE_GPU)
 81:   c->useGPU = 1;
 82:   CHOLMOD_OPTION_INT(useGPU,"Use GPU for BLAS 1, otherwise 0");
 83: #endif

 85:   PetscOptionsBool("-mat_cholmod_pack","Pack factors after factorization [disable for frequent repeat factorization]","None",chol->pack,&chol->pack,NULL);
 86:   c->final_pack = (int)chol->pack;

 88:   CHOLMOD_OPTION_DOUBLE(dbound,"Minimum absolute value of diagonal entries of D");
 89:   CHOLMOD_OPTION_DOUBLE(grow0,"Global growth ratio when factors are modified");
 90:   CHOLMOD_OPTION_DOUBLE(grow1,"Column growth ratio when factors are modified");
 91:   CHOLMOD_OPTION_SIZE_T(grow2,"Affine column growth constant when factors are modified");
 92:   CHOLMOD_OPTION_SIZE_T(maxrank,"Max rank of update, larger values are faster but use more memory [2,4,8]");
 93:   {
 94:     static const char *const list[] = {"SIMPLICIAL","AUTO","SUPERNODAL","MatCholmodFactorType","MAT_CHOLMOD_FACTOR_",0};
 95:     PetscOptionsEnum("-mat_cholmod_factor","Factorization method","None",list,(PetscEnum)c->supernodal,(PetscEnum*)&c->supernodal,NULL);
 96:   }
 97:   if (c->supernodal) CHOLMOD_OPTION_DOUBLE(supernodal_switch,"flop/nnz_L threshold for switching to supernodal factorization");
 98:   CHOLMOD_OPTION_BOOL(final_asis,"Leave factors \"as is\"");
 99:   CHOLMOD_OPTION_BOOL(final_pack,"Pack the columns when finished (use FALSE if the factors will be updated later)");
100:   if (!c->final_asis) {
101:     CHOLMOD_OPTION_BOOL(final_super,"Leave supernodal factors instead of converting to simplicial");
102:     CHOLMOD_OPTION_BOOL(final_ll,"Turn LDL' factorization into LL'");
103:     CHOLMOD_OPTION_BOOL(final_monotonic,"Ensure columns are monotonic when done");
104:     CHOLMOD_OPTION_BOOL(final_resymbol,"Remove numerically zero values resulting from relaxed supernodal amalgamation");
105:   }
106:   {
107:     PetscReal tmp[] = {(PetscReal)c->zrelax[0],(PetscReal)c->zrelax[1],(PetscReal)c->zrelax[2]};
108:     PetscInt  n     = 3;
109:     PetscOptionsRealArray("-mat_cholmod_zrelax","3 real supernodal relaxed amalgamation parameters","None",tmp,&n,&flg);
110:     if (flg && n != 3) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_OUTOFRANGE,"must provide exactly 3 parameters to -mat_cholmod_zrelax");
111:     if (flg) while (n--) c->zrelax[n] = (double)tmp[n];
112:   }
113:   {
114:     PetscInt n,tmp[] = {(PetscInt)c->nrelax[0],(PetscInt)c->nrelax[1],(PetscInt)c->nrelax[2]};
115:     PetscOptionsIntArray("-mat_cholmod_nrelax","3 size_t supernodal relaxed amalgamation parameters","None",tmp,&n,&flg);
116:     if (flg && n != 3) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_OUTOFRANGE,"must provide exactly 3 parameters to -mat_cholmod_nrelax");
117:     if (flg) while (n--) c->nrelax[n] = (size_t)tmp[n];
118:   }
119:   CHOLMOD_OPTION_BOOL(prefer_upper,"Work with upper triangular form [faster when using fill-reducing ordering, slower in natural ordering]");
120:   CHOLMOD_OPTION_BOOL(default_nesdis,"Use NESDIS instead of METIS for nested dissection");
121:   CHOLMOD_OPTION_INT(print,"Verbosity level");
122:   PetscOptionsEnd();
123:   return(0);
124: }

126: static PetscErrorCode MatWrapCholmod_seqsbaij(Mat A,PetscBool values,cholmod_sparse *C,PetscBool  *aijalloc)
127: {
128:   Mat_SeqSBAIJ   *sbaij = (Mat_SeqSBAIJ*)A->data;

132:   PetscMemzero(C,sizeof(*C));
133:   /* CHOLMOD uses column alignment, SBAIJ stores the upper factor, so we pass it on as a lower factor, swapping the meaning of row and column */
134:   C->nrow   = (size_t)A->cmap->n;
135:   C->ncol   = (size_t)A->rmap->n;
136:   C->nzmax  = (size_t)sbaij->maxnz;
137:   C->p      = sbaij->i;
138:   C->i      = sbaij->j;
139:   C->x      = sbaij->a;
140:   C->stype  = -1;
141:   C->itype  = CHOLMOD_INT_TYPE;
142:   C->xtype  = CHOLMOD_SCALAR_TYPE;
143:   C->dtype  = CHOLMOD_DOUBLE;
144:   C->sorted = 1;
145:   C->packed = 1;
146:   *aijalloc = PETSC_FALSE;
147:   return(0);
148: }

150: static PetscErrorCode VecWrapCholmodRead(Vec X,cholmod_dense *Y)
151: {
152:   PetscErrorCode    ierr;
153:   const PetscScalar *x;
154:   PetscInt          n;

157:   PetscMemzero(Y,sizeof(*Y));
158:   VecGetArrayRead(X,&x);
159:   VecGetSize(X,&n);

161:   Y->x     = (double*)x;
162:   Y->nrow  = n;
163:   Y->ncol  = 1;
164:   Y->nzmax = n;
165:   Y->d     = n;
166:   Y->x     = (double*)x;
167:   Y->xtype = CHOLMOD_SCALAR_TYPE;
168:   Y->dtype = CHOLMOD_DOUBLE;
169:   return(0);
170: }

172: static PetscErrorCode VecUnWrapCholmodRead(Vec X,cholmod_dense *Y)
173: {
174:   PetscErrorCode    ierr;

177:   VecRestoreArrayRead(X,NULL);
178:   return(0);
179: }

181: PETSC_INTERN PetscErrorCode  MatDestroy_CHOLMOD(Mat F)
182: {
184:   Mat_CHOLMOD    *chol=(Mat_CHOLMOD*)F->data;

187:   !cholmod_X_free_factor(&chol->factor,chol->common);
188:   !cholmod_X_finish(chol->common);
189:   PetscFree(chol->common);
190:   PetscFree(chol->matrix);
191:   PetscFree(F->data);
192:   return(0);
193: }

195: static PetscErrorCode MatSolve_CHOLMOD(Mat,Vec,Vec);

197: /*static const char *const CholmodOrderingMethods[] = {"User","AMD","METIS","NESDIS(default)","Natural","NESDIS(small=20000)","NESDIS(small=4,no constrained)","NESDIS()"};*/

199: static PetscErrorCode MatFactorInfo_CHOLMOD(Mat F,PetscViewer viewer)
200: {
201:   Mat_CHOLMOD          *chol = (Mat_CHOLMOD*)F->data;
202:   const cholmod_common *c    = chol->common;
203:   PetscErrorCode       ierr;
204:   PetscInt             i;

207:   if (F->ops->solve != MatSolve_CHOLMOD) return(0);
208:   PetscViewerASCIIPrintf(viewer,"CHOLMOD run parameters:\n");
209:   PetscViewerASCIIPushTab(viewer);
210:   PetscViewerASCIIPrintf(viewer,"Pack factors after symbolic factorization: %s\n",chol->pack ? "TRUE" : "FALSE");
211:   PetscViewerASCIIPrintf(viewer,"Common.dbound            %g  (Smallest absolute value of diagonal entries of D)\n",c->dbound);
212:   PetscViewerASCIIPrintf(viewer,"Common.grow0             %g\n",c->grow0);
213:   PetscViewerASCIIPrintf(viewer,"Common.grow1             %g\n",c->grow1);
214:   PetscViewerASCIIPrintf(viewer,"Common.grow2             %u\n",(unsigned)c->grow2);
215:   PetscViewerASCIIPrintf(viewer,"Common.maxrank           %u\n",(unsigned)c->maxrank);
216:   PetscViewerASCIIPrintf(viewer,"Common.supernodal_switch %g\n",c->supernodal_switch);
217:   PetscViewerASCIIPrintf(viewer,"Common.supernodal        %d\n",c->supernodal);
218:   PetscViewerASCIIPrintf(viewer,"Common.final_asis        %d\n",c->final_asis);
219:   PetscViewerASCIIPrintf(viewer,"Common.final_super       %d\n",c->final_super);
220:   PetscViewerASCIIPrintf(viewer,"Common.final_ll          %d\n",c->final_ll);
221:   PetscViewerASCIIPrintf(viewer,"Common.final_pack        %d\n",c->final_pack);
222:   PetscViewerASCIIPrintf(viewer,"Common.final_monotonic   %d\n",c->final_monotonic);
223:   PetscViewerASCIIPrintf(viewer,"Common.final_resymbol    %d\n",c->final_resymbol);
224:   PetscViewerASCIIPrintf(viewer,"Common.zrelax            [%g,%g,%g]\n",c->zrelax[0],c->zrelax[1],c->zrelax[2]);
225:   PetscViewerASCIIPrintf(viewer,"Common.nrelax            [%u,%u,%u]\n",(unsigned)c->nrelax[0],(unsigned)c->nrelax[1],(unsigned)c->nrelax[2]);
226:   PetscViewerASCIIPrintf(viewer,"Common.prefer_upper      %d\n",c->prefer_upper);
227:   PetscViewerASCIIPrintf(viewer,"Common.print             %d\n",c->print);
228:   for (i=0; i<c->nmethods; i++) {
229:     PetscViewerASCIIPrintf(viewer,"Ordering method %D%s:\n",i,i==c->selected ? " [SELECTED]" : "");
230:     PetscViewerASCIIPrintf(viewer,"  lnz %g, fl %g, prune_dense %g, prune_dense2 %g\n",
231:                                   c->method[i].lnz,c->method[i].fl,c->method[i].prune_dense,c->method[i].prune_dense2);
232:   }
233:   PetscViewerASCIIPrintf(viewer,"Common.postorder         %d\n",c->postorder);
234:   PetscViewerASCIIPrintf(viewer,"Common.default_nesdis    %d (use NESDIS instead of METIS for nested dissection)\n",c->default_nesdis);
235:   /* Statistics */
236:   PetscViewerASCIIPrintf(viewer,"Common.fl                %g (flop count from most recent analysis)\n",c->fl);
237:   PetscViewerASCIIPrintf(viewer,"Common.lnz               %g (fundamental nz in L)\n",c->lnz);
238:   PetscViewerASCIIPrintf(viewer,"Common.anz               %g\n",c->anz);
239:   PetscViewerASCIIPrintf(viewer,"Common.modfl             %g (flop count from most recent update)\n",c->modfl);
240:   PetscViewerASCIIPrintf(viewer,"Common.malloc_count      %g (number of live objects)\n",(double)c->malloc_count);
241:   PetscViewerASCIIPrintf(viewer,"Common.memory_usage      %g (peak memory usage in bytes)\n",(double)c->memory_usage);
242:   PetscViewerASCIIPrintf(viewer,"Common.memory_inuse      %g (current memory usage in bytes)\n",(double)c->memory_inuse);
243:   PetscViewerASCIIPrintf(viewer,"Common.nrealloc_col      %g (number of column reallocations)\n",c->nrealloc_col);
244:   PetscViewerASCIIPrintf(viewer,"Common.nrealloc_factor   %g (number of factor reallocations due to column reallocations)\n",c->nrealloc_factor);
245:   PetscViewerASCIIPrintf(viewer,"Common.ndbounds_hit      %g (number of times diagonal was modified by dbound)\n",c->ndbounds_hit);
246:   PetscViewerASCIIPrintf(viewer,"Common.rowfacfl          %g (number of flops in last call to cholmod_rowfac)\n",c->rowfacfl);
247:   PetscViewerASCIIPrintf(viewer,"Common.aatfl             %g (number of flops to compute A(:,f)*A(:,f)')\n",c->aatfl);
248: #if defined(PETSC_USE_SUITESPARSE_GPU)
249:   PetscViewerASCIIPrintf(viewer,"Common.useGPU            %d\n",c->useGPU);
250: #endif
251:   PetscViewerASCIIPopTab(viewer);
252:   return(0);
253: }

255: PETSC_INTERN PetscErrorCode  MatView_CHOLMOD(Mat F,PetscViewer viewer)
256: {
257:   PetscErrorCode    ierr;
258:   PetscBool         iascii;
259:   PetscViewerFormat format;

262:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
263:   if (iascii) {
264:     PetscViewerGetFormat(viewer,&format);
265:     if (format == PETSC_VIEWER_ASCII_INFO) {
266:       MatFactorInfo_CHOLMOD(F,viewer);
267:     }
268:   }
269:   return(0);
270: }

272: static PetscErrorCode MatSolve_CHOLMOD(Mat F,Vec B,Vec X)
273: {
274:   Mat_CHOLMOD    *chol = (Mat_CHOLMOD*)F->data;
275:   cholmod_dense  cholB,*cholX;
276:   PetscScalar    *x;

280:   VecWrapCholmodRead(B,&cholB);
281:   static_F = F;
282:   cholX    = cholmod_X_solve(CHOLMOD_A,chol->factor,&cholB,chol->common);
283:   if (!cholX) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"CHOLMOD failed");
284:   VecUnWrapCholmodRead(B,&cholB);
285:   VecGetArray(X,&x);
286:   PetscMemcpy(x,cholX->x,cholX->nrow*sizeof(*x));
287:   !cholmod_X_free_dense(&cholX,chol->common);
288:   VecRestoreArray(X,&x);
289:   return(0);
290: }

292: static PetscErrorCode MatCholeskyFactorNumeric_CHOLMOD(Mat F,Mat A,const MatFactorInfo *info)
293: {
294:   Mat_CHOLMOD    *chol = (Mat_CHOLMOD*)F->data;
295:   cholmod_sparse cholA;
296:   PetscBool      aijalloc;

300:   (*chol->Wrap)(A,PETSC_TRUE,&cholA,&aijalloc);
301:   static_F = F;
302:   !cholmod_X_factorize(&cholA,chol->factor,chol->common);
303:   if (ierr) SETERRQ1(PetscObjectComm((PetscObject)F),PETSC_ERR_LIB,"CHOLMOD factorization failed with status %d",chol->common->status);
304:   if (chol->common->status == CHOLMOD_NOT_POSDEF) SETERRQ1(PetscObjectComm((PetscObject)F),PETSC_ERR_MAT_CH_ZRPVT,"CHOLMOD detected that the matrix is not positive definite, failure at column %u",(unsigned)chol->factor->minor);

306:   if (aijalloc) {PetscFree3(cholA.p,cholA.i,cholA.x);}

308:   F->ops->solve          = MatSolve_CHOLMOD;
309:   F->ops->solvetranspose = MatSolve_CHOLMOD;
310:   return(0);
311: }

313: PETSC_INTERN PetscErrorCode  MatCholeskyFactorSymbolic_CHOLMOD(Mat F,Mat A,IS perm,const MatFactorInfo *info)
314: {
315:   Mat_CHOLMOD    *chol = (Mat_CHOLMOD*)F->data;
317:   cholmod_sparse cholA;
318:   PetscBool      aijalloc;
319:   PetscInt       *fset = 0;
320:   size_t         fsize = 0;

323:   (*chol->Wrap)(A,PETSC_FALSE,&cholA,&aijalloc);
324:   static_F = F;
325:   if (chol->factor) {
326:     !cholmod_X_resymbol(&cholA,fset,fsize,(int)chol->pack,chol->factor,chol->common);
327:     if (ierr) SETERRQ1(PetscObjectComm((PetscObject)F),PETSC_ERR_LIB,"CHOLMOD analysis failed with status %d",chol->common->status);
328:   } else if (perm) {
329:     const PetscInt *ip;
330:     ISGetIndices(perm,&ip);
331:     chol->factor = cholmod_X_analyze_p(&cholA,(PetscInt*)ip,fset,fsize,chol->common);
332:     if (!chol->factor) SETERRQ1(PetscObjectComm((PetscObject)F),PETSC_ERR_LIB,"CHOLMOD analysis failed with status %d",chol->common->status);
333:     ISRestoreIndices(perm,&ip);
334:   } else {
335:     chol->factor = cholmod_X_analyze(&cholA,chol->common);
336:     if (!chol->factor) SETERRQ1(PetscObjectComm((PetscObject)F),PETSC_ERR_LIB,"CHOLMOD analysis failed with status %d",chol->common->status);
337:   }

339:   if (aijalloc) {PetscFree3(cholA.p,cholA.i,cholA.x);}

341:   F->ops->choleskyfactornumeric = MatCholeskyFactorNumeric_CHOLMOD;
342:   return(0);
343: }

345: static PetscErrorCode MatFactorGetSolverPackage_seqsbaij_cholmod(Mat A,const MatSolverPackage *type)
346: {
348:   *type = MATSOLVERCHOLMOD;
349:   return(0);
350: }

352: /*MC
353:   MATSOLVERCHOLMOD = "cholmod" - A matrix type providing direct solvers (Cholesky) for sequential matrices
354:   via the external package CHOLMOD.

356:   Use ./configure --download-suitesparse to install PETSc to use CHOLMOD

358:   Use -pc_type lu -pc_factor_mat_solver_package cholmod to use this direct solver

360:   Consult CHOLMOD documentation for more information about the Common parameters
361:   which correspond to the options database keys below.

363:   Options Database Keys:
364: + -mat_cholmod_dbound <0>          - Minimum absolute value of diagonal entries of D (None)
365: . -mat_cholmod_grow0 <1.2>         - Global growth ratio when factors are modified (None)
366: . -mat_cholmod_grow1 <1.2>         - Column growth ratio when factors are modified (None)
367: . -mat_cholmod_grow2 <5>           - Affine column growth constant when factors are modified (None)
368: . -mat_cholmod_maxrank <8>         - Max rank of update, larger values are faster but use more memory [2,4,8] (None)
369: . -mat_cholmod_factor <AUTO>       - (choose one of) SIMPLICIAL AUTO SUPERNODAL
370: . -mat_cholmod_supernodal_switch <40> - flop/nnz_L threshold for switching to supernodal factorization (None)
371: . -mat_cholmod_final_asis <TRUE>   - Leave factors "as is" (None)
372: . -mat_cholmod_final_pack <TRUE>   - Pack the columns when finished (use FALSE if the factors will be updated later) (None)
373: . -mat_cholmod_zrelax <0.8>        - 3 real supernodal relaxed amalgamation parameters (None)
374: . -mat_cholmod_nrelax <4>          - 3 size_t supernodal relaxed amalgamation parameters (None)
375: . -mat_cholmod_prefer_upper <TRUE> - Work with upper triangular form (faster when using fill-reducing ordering, slower in natural ordering) (None)
376: - -mat_cholmod_print <3>           - Verbosity level (None)

378:    Level: beginner

380:    Note: CHOLMOD is part of SuiteSparse http://faculty.cse.tamu.edu/davis/suitesparse.html

382: .seealso: PCCHOLESKY, PCFactorSetMatSolverPackage(), MatSolverPackage
383: M*/

385: PETSC_INTERN PetscErrorCode MatGetFactor_seqsbaij_cholmod(Mat A,MatFactorType ftype,Mat *F)
386: {
387:   Mat            B;
388:   Mat_CHOLMOD    *chol;
390:   PetscInt       m=A->rmap->n,n=A->cmap->n,bs;

393:   if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"CHOLMOD cannot do %s factorization with SBAIJ, only %s",
394:                                              MatFactorTypes[ftype],MatFactorTypes[MAT_FACTOR_CHOLESKY]);
395:   MatGetBlockSize(A,&bs);
396:   if (bs != 1) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"CHOLMOD only supports block size=1, given %D",bs);
397:   /* Create the factorization matrix F */
398:   MatCreate(PetscObjectComm((PetscObject)A),&B);
399:   MatSetSizes(B,PETSC_DECIDE,PETSC_DECIDE,m,n);
400:   PetscStrallocpy("cholmod",&((PetscObject)B)->type_name);
401:   MatSetUp(B);
402:   PetscNewLog(B,&chol);

404:   chol->Wrap    = MatWrapCholmod_seqsbaij;
405:   B->data       = chol;

407:   B->ops->getinfo                = MatGetInfo_External;
408:   B->ops->view                   = MatView_CHOLMOD;
409:   B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_CHOLMOD;
410:   B->ops->destroy                = MatDestroy_CHOLMOD;
411:   PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_seqsbaij_cholmod);
412:   B->factortype                  = MAT_FACTOR_CHOLESKY;
413:   B->assembled                   = PETSC_TRUE; /* required by -ksp_view */
414:   B->preallocated                = PETSC_TRUE;

416:   CholmodStart(B);

418:   PetscFree(B->solvertype);
419:   PetscStrallocpy(MATSOLVERCHOLMOD,&B->solvertype);

421:   *F   = B;
422:   return(0);
423: }