Actual source code: pastix.c

  1: #define PETSCMAT_DLL

  3: /* 
  4:     Provides an interface to the PaStiX sparse solver
  5: */
 6:  #include ../src/mat/impls/aij/seq/aij.h
 7:  #include ../src/mat/impls/aij/mpi/mpiaij.h
 8:  #include ../src/mat/impls/sbaij/seq/sbaij.h
 9:  #include ../src/mat/impls/sbaij/mpi/mpisbaij.h

 11: #if defined(PETSC_HAVE_STDLIB_H)
 12: #include <stdlib.h>
 13: #endif
 14: #if defined(PETSC_HAVE_STRING_H)
 15: #include <string.h> 
 16: #endif

 19: #include "pastix.h"

 22: typedef struct Mat_Pastix_ {
 23:   pastix_data_t *pastix_data;              /* Pastix data storage structure                        */
 24:   MatStructure   matstruc;
 25:   PetscInt       n;                        /* Number of columns in the matrix                      */
 26:   PetscInt       *colptr;                  /* Index of first element of each column in row and val */
 27:   PetscInt       *row;                     /* Row of each element of the matrix                    */
 28:   PetscScalar    *val;                     /* Value of each element of the matrix                  */
 29:   PetscInt       *perm;                    /* Permutation tabular                                  */
 30:   PetscInt       *invp;                    /* Reverse permutation tabular                          */
 31:   PetscScalar    *rhs;                     /* Rhight-hand-side member                              */
 32:   PetscInt       rhsnbr;                   /* Rhight-hand-side number (must be 1)                  */
 33:   PetscInt       iparm[64];                /* Integer parameters                                   */
 34:   double         dparm[64];                /* Floating point parameters                            */
 35:   MPI_Comm       pastix_comm;              /* PaStiX MPI communicator                              */
 36:   PetscMPIInt    commRank;                 /* MPI rank                                             */
 37:   PetscMPIInt    commSize;                 /* MPI communicator size                                */
 38:   PetscTruth     CleanUpPastix;            /* Boolean indicating if we call PaStiX clean step      */
 39:   VecScatter     scat_rhs;
 40:   VecScatter     scat_sol;
 41:   Vec            b_seq;
 42:   PetscTruth     isAIJ;
 43:   PetscErrorCode (*MatDestroy)(Mat);
 44: } Mat_Pastix;

 46: EXTERN PetscErrorCode MatDuplicate_Pastix(Mat,MatDuplicateOption,Mat*);

 50: /* 
 51:    convert Petsc seqaij matrix to CSC: colptr[n], row[nz], val[nz] 

 53:   input: 
 54:     A       - matrix in seqaij or mpisbaij (bs=1) format
 55:     valOnly - FALSE: spaces are allocated and values are set for the CSC 
 56:               TRUE:  Only fill values
 57:   output:     
 58:     n       - Size of the matrix
 59:     colptr  - Index of first element of each column in row and val
 60:     row     - Row of each element of the matrix                   
 61:     values  - Value of each element of the matrix                 
 62:  */
 63: PetscErrorCode MatConvertToCSC(Mat A,PetscTruth valOnly,PetscInt *n,PetscInt **colptr,PetscInt **row,PetscScalar **values)
 64: {
 65:   Mat_SeqAIJ     *aa      = (Mat_SeqAIJ*)A->data;
 66:   PetscInt       *rowptr  = aa->i;
 67:   PetscInt       *col     = aa->j;
 68:   PetscScalar    *rvalues = aa->a;
 69:   PetscInt        m       = A->rmap->N;
 70:   PetscInt        nnz;
 71:   PetscInt        i,j, k;
 72:   PetscInt        base = 1;
 73:   PetscInt        idx;
 74:   PetscErrorCode  ierr;
 75:   PetscInt        colidx;
 76:   PetscInt       *colcount;
 77:   PetscTruth      isSBAIJ;
 78:   PetscTruth      isSeqSBAIJ;
 79:   PetscTruth      isMpiSBAIJ;
 80:   PetscTruth      isSym;

 83:   MatIsSymmetric(A,0.0,&isSym);
 84:   PetscTypeCompare((PetscObject)A,MATSBAIJ,&isSBAIJ);
 85:   PetscTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);
 86:   PetscTypeCompare((PetscObject)A,MATMPISBAIJ,&isMpiSBAIJ);

 88:   *n = A->cmap->N;

 90:   /* PaStiX only needs triangular matrix if matrix is symmetric 
 91:    */
 92:   if (isSym && !(isSBAIJ || isSeqSBAIJ || isMpiSBAIJ)) {
 93:     nnz = (aa->nz - *n)/2 + *n;
 94:   }
 95:   else {
 96:     nnz     = aa->nz;
 97:   }

 99:   if (!valOnly){
100:     PetscMalloc(((*n)+1) *sizeof(PetscInt)   ,colptr );
101:     PetscMalloc( nnz     *sizeof(PetscInt)   ,row);
102:     PetscMalloc( nnz     *sizeof(PetscScalar),values);

104:     if (isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) {
105:         PetscMemcpy (*colptr, rowptr, ((*n)+1)*sizeof(PetscInt));
106:         for (i = 0; i < *n+1; i++)
107:           (*colptr)[i] += base;
108:         PetscMemcpy (*row, col, (nnz)*sizeof(PetscInt));
109:         for (i = 0; i < nnz; i++)
110:           (*row)[i] += base;
111:         PetscMemcpy (*values, rvalues, (nnz)*sizeof(PetscScalar));
112:     } else {
113:       PetscMalloc((*n)*sizeof(PetscInt)   ,&colcount);

115:       for (i = 0; i < m; i++) colcount[i] = 0;
116:       /* Fill-in colptr */
117:       for (i = 0; i < m; i++) {
118:         for (j = rowptr[i]; j < rowptr[i+1]; j++) {
119:           if (!isSym || col[j] <= i)  colcount[col[j]]++;
120:         }
121:       }
122: 
123:       (*colptr)[0] = base;
124:       for (j = 0; j < *n; j++) {
125:         (*colptr)[j+1] = (*colptr)[j] + colcount[j];
126:         /* in next loop we fill starting from (*colptr)[colidx] - base */
127:         colcount[j] = -base;
128:       }
129: 
130:       /* Fill-in rows and values */
131:       for (i = 0; i < m; i++) {
132:         for (j = rowptr[i]; j < rowptr[i+1]; j++) {
133:           if (!isSym || col[j] <= i) {
134:             colidx = col[j];
135:             idx    = (*colptr)[colidx] + colcount[colidx];
136:             (*row)[idx]    = i + base;
137:             (*values)[idx] = rvalues[j];
138:             colcount[colidx]++;
139:           }
140:         }
141:       }
142:       PetscFree(colcount);
143:     }
144:   } else {
145:     /* Fill-in only values */
146:     for (i = 0; i < m; i++) {
147:       for (j = rowptr[i]; j < rowptr[i+1]; j++) {
148:         colidx = col[j];
149:         if ((isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) ||!isSym || col[j] <= i)
150:           {
151:             /* look for the value to fill */
152:             for (k = (*colptr)[colidx] - base; k < (*colptr)[colidx + 1] - base; k++) {
153:               if (((*row)[k]-base) == i) {
154:                 (*values)[k] = rvalues[j];
155:                 break;
156:               }
157:             }
158:             /* data structure of sparse matrix has changed */
159:             if (k == (*colptr)[colidx + 1] - base) SETERRQ1(PETSC_ERR_PLIB,"overflow on k %D",k);
160:           }
161:       }
162:     }
163:   }
164:   {
165:     PetscScalar *tmpvalues;
166:     PetscInt    *tmprows,*tmpcolptr;
167:     tmpvalues = (PetscScalar*)malloc(nnz*sizeof(PetscScalar)); if (!tmpvalues) SETERRQ(PETSC_ERR_MEM,"Unable to allocate memory");
168:     tmprows   = (PetscInt*)malloc(nnz*sizeof(PetscInt));if (!tmprows) SETERRQ(PETSC_ERR_MEM,"Unable to allocate memory");
169:     tmpcolptr = (PetscInt*)malloc((*n+1)*sizeof(PetscInt));if (!tmpcolptr) SETERRQ(PETSC_ERR_MEM,"Unable to allocate memory");

171:     if (sizeof(PetscScalar) != sizeof(pastix_float_t)) {
172:       SETERRQ2(PETSC_ERR_SUP,"sizeof(PetscScalar) %d != sizeof(pastix_float_t) %d",sizeof(PetscScalar),sizeof(pastix_float_t));
173:     }
174:     if (sizeof(PetscInt) != sizeof(pastix_int_t)) {
175:       SETERRQ2(PETSC_ERR_SUP,"sizeof(PetscInt) %d != sizeof(pastix_int_t) %d",sizeof(PetscInt),sizeof(pastix_int_t));
176:     }

178:     PetscMemcpy(tmpcolptr,*colptr,(*n+1)*sizeof(PetscInt));
179:     PetscMemcpy(tmprows,*row,nnz*sizeof(PetscInt));
180:     PetscMemcpy(tmpvalues,*values,nnz*sizeof(PetscScalar));
181:     PetscFree(*row);
182:     PetscFree(*values);

184:     pastix_checkMatrix(MPI_COMM_WORLD,API_VERBOSE_NO,((isSym != 0) ? API_SYM_YES : API_SYM_NO),API_YES,*n,&tmpcolptr,&tmprows,&tmpvalues,NULL,1);
185: 
186:     PetscMemcpy(*colptr,tmpcolptr,(*n+1)*sizeof(PetscInt));
187:     PetscMalloc(((*colptr)[*n]-1)*sizeof(PetscInt),row);
188:     PetscMemcpy(*row,tmprows,((*colptr)[*n]-1)*sizeof(PetscInt));
189:     PetscMalloc(((*colptr)[*n]-1)*sizeof(PetscScalar),values);
190:     PetscMemcpy(*values,tmpvalues,((*colptr)[*n]-1)*sizeof(PetscScalar));
191:     free(tmpvalues);
192:     free(tmprows);
193:     free(tmpcolptr);
194:   }
195:   return(0);
196: }



202: /*
203:   Call clean step of PaStiX if lu->CleanUpPastix == true.
204:   Free the CSC matrix.
205:  */
206: PetscErrorCode MatDestroy_Pastix(Mat A)
207: {
208:   Mat_Pastix      *lu=(Mat_Pastix*)A->spptr;
209:   PetscErrorCode   ierr;
210:   PetscMPIInt      size=lu->commSize;

213:   if (lu->CleanUpPastix) {
214:     /* Terminate instance, deallocate memories */
215:     if (size > 1){
216:       VecScatterDestroy(lu->scat_rhs);
217:       VecDestroy(lu->b_seq);
218:       VecScatterDestroy(lu->scat_sol);
219:     }
220: 
221:     lu->iparm[IPARM_START_TASK]=API_TASK_CLEAN;
222:     lu->iparm[IPARM_END_TASK]=API_TASK_CLEAN;

224:     pastix((pastix_data_t **)&(lu->pastix_data),
225:                               lu->pastix_comm,
226:            (pastix_int_t)     lu->n,
227:            (pastix_int_t*)    lu->colptr,
228:            (pastix_int_t*)    lu->row,
229:            (pastix_float_t*)  lu->val,
230:            (pastix_int_t*)    lu->perm,
231:            (pastix_int_t*)    lu->invp,
232:            (pastix_float_t*)  lu->rhs,
233:            (pastix_int_t)     lu->rhsnbr,
234:            (pastix_int_t*)    lu->iparm,
235:                               lu->dparm);

237:     PetscFree(lu->colptr);
238:     PetscFree(lu->row);
239:     PetscFree(lu->val);
240:     PetscFree(lu->perm);
241:     PetscFree(lu->invp);
242:     MPI_Comm_free(&(lu->pastix_comm));
243:   }
244:   (lu->MatDestroy)(A);
245:   return(0);
246: }

250: /*
251:   Gather right-hand-side.
252:   Call for Solve step.
253:   Scatter solution.
254:  */
255: PetscErrorCode MatSolve_PaStiX(Mat A,Vec b,Vec x)
256: {
257:   Mat_Pastix     *lu=(Mat_Pastix*)A->spptr;
258:   PetscScalar    *array;
259:   Vec             x_seq;
260:   PetscErrorCode  ierr;

263:   lu->rhsnbr = 1;
264:   x_seq = lu->b_seq;
265:   if (lu->commSize > 1){
266:     /* PaStiX only supports centralized rhs. Scatter b into a seqential rhs vector */
267:     VecScatterBegin(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);
268:     VecScatterEnd(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);
269:     VecGetArray(x_seq,&array);
270:   } else {  /* size == 1 */
271:     VecCopy(b,x);
272:     VecGetArray(x,&array);
273:   }
274:   lu->rhs = array;
275:   if (lu->commSize == 1){
276:     VecRestoreArray(x,&array);
277:   } else {
278:     VecRestoreArray(x_seq,&array);
279:   }

281:   /* solve phase */
282:   /*-------------*/
283:   lu->iparm[IPARM_START_TASK] = API_TASK_SOLVE;
284:   lu->iparm[IPARM_END_TASK]   = API_TASK_REFINE;
285:   lu->iparm[IPARM_RHS_MAKING] = API_RHS_B;
286: 
287:   pastix((pastix_data_t **)&(lu->pastix_data),
288:          (MPI_Comm)         lu->pastix_comm,
289:          (pastix_int_t)     lu->n,
290:          (pastix_int_t*)    lu->colptr,
291:          (pastix_int_t*)    lu->row,
292:          (pastix_float_t*)  lu->val,
293:          (pastix_int_t*)    lu->perm,
294:          (pastix_int_t*)    lu->invp,
295:          (pastix_float_t*)  lu->rhs,
296:          (pastix_int_t)     lu->rhsnbr,
297:          (pastix_int_t*)    lu->iparm,
298:          (double*)          lu->dparm);
299: 
300:   if (lu->iparm[IPARM_ERROR_NUMBER] < 0) {
301:     SETERRQ1(PETSC_ERR_LIB,"Error reported by PaStiX in solve phase: lu->iparm[IPARM_ERROR_NUMBER] = %d\n",lu->iparm[IPARM_ERROR_NUMBER] );
302:   }

304:   if (lu->commSize == 1){
305:     VecRestoreArray(x,&(lu->rhs));
306:   } else {
307:     VecRestoreArray(x_seq,&(lu->rhs));
308:   }

310:   if (lu->commSize > 1) { /* convert PaStiX centralized solution to petsc mpi x */
311:     VecScatterBegin(lu->scat_sol,x_seq,x,INSERT_VALUES,SCATTER_FORWARD);
312:     VecScatterEnd(lu->scat_sol,x_seq,x,INSERT_VALUES,SCATTER_FORWARD);
313:   }
314:   return(0);
315: }

317: /*
318:   Numeric factorisation using PaStiX solver.

320:  */
323: PetscErrorCode MatFactorNumeric_PaStiX(Mat F,Mat A,const MatFactorInfo *info)
324: {
325:   Mat_Pastix    *lu =(Mat_Pastix*)(F)->spptr;
326:   Mat           *tseq;
327:   PetscErrorCode 0;
328:   PetscInt       icntl;
329:   PetscInt       M=A->rmap->N;
330:   PetscTruth     valOnly,flg, isSym;
331:   Mat            F_diag;
332:   IS             is_iden;
333:   Vec            b;
334:   IS             isrow;
335:   PetscTruth     isSeqAIJ,isSeqSBAIJ,isMPIAIJ;

338: 
339:   PetscTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);
340:   PetscTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);
341:   PetscTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);
342:   if (lu->matstruc == DIFFERENT_NONZERO_PATTERN){
343:     (F)->ops->solve   = MatSolve_PaStiX;

345:     /* Initialize a PASTIX instance */
346:     MPI_Comm_dup(((PetscObject)A)->comm,&(lu->pastix_comm));
347:     MPI_Comm_rank(lu->pastix_comm, &lu->commRank);
348:     MPI_Comm_size(lu->pastix_comm, &lu->commSize);

350:     /* Set pastix options */
351:     lu->iparm[IPARM_MODIFY_PARAMETER] = API_NO;
352:     lu->iparm[IPARM_START_TASK]       = API_TASK_INIT;
353:     lu->iparm[IPARM_END_TASK]         = API_TASK_INIT;
354:     lu->rhsnbr = 1;

356:     /* Call to set default pastix options */
357:     pastix((pastix_data_t **)&(lu->pastix_data),
358:            (MPI_Comm)         lu->pastix_comm,
359:            (pastix_int_t)     lu->n,
360:            (pastix_int_t*)    lu->colptr,
361:            (pastix_int_t*)    lu->row,
362:            (pastix_float_t*)  lu->val,
363:            (pastix_int_t*)    lu->perm,
364:            (pastix_int_t*)    lu->invp,
365:            (pastix_float_t*)  lu->rhs,
366:            (pastix_int_t)     lu->rhsnbr,
367:            (pastix_int_t*)    lu->iparm,
368:            (double*)          lu->dparm);

370:     PetscOptionsBegin(((PetscObject)A)->comm,((PetscObject)A)->prefix,"PaStiX Options","Mat");

372:     icntl=-1;
373:     lu->iparm[IPARM_VERBOSE] = API_VERBOSE_NOT;
374:     PetscOptionsInt("-mat_pastix_verbose","iparm[IPARM_VERBOSE] : level of printing (0 to 2)","None",lu->iparm[IPARM_VERBOSE],&icntl,&flg);
375:     if ((flg && icntl > 0) || PetscLogPrintInfo) {
376:       lu->iparm[IPARM_VERBOSE] =  icntl;
377:     }
378:     icntl=-1;
379:     PetscOptionsInt("-mat_pastix_threadnbr","iparm[IPARM_THREAD_NBR] : Number of thread by MPI node","None",lu->iparm[IPARM_THREAD_NBR],&icntl,PETSC_NULL);
380:     if ((flg && icntl > 0)) {
381:       lu->iparm[IPARM_THREAD_NBR] = icntl;
382:     }
383:     PetscOptionsEnd();
384:     valOnly = PETSC_FALSE;
385:   }  else {
386:     if (isSeqAIJ || isMPIAIJ)  {
387:       PetscFree(lu->colptr);
388:       PetscFree(lu->row);
389:       PetscFree(lu->val);
390:       valOnly = PETSC_FALSE;
391:     } else valOnly = PETSC_TRUE;
392:   }

394:   lu->iparm[IPARM_MATRIX_VERIFICATION] = API_YES;

396:   /* convert mpi A to seq mat A */
397:   ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);
398:   MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);
399:   ISDestroy(isrow);

401:   MatConvertToCSC(*tseq,valOnly, &lu->n, &lu->colptr, &lu->row, &lu->val);
402:   MatIsSymmetric(*tseq,0.0,&isSym);
403:   MatDestroyMatrices(1,&tseq);

405:   if (!lu->perm) {
406:     PetscMalloc((lu->n)*sizeof(PetscInt)   ,&(lu->perm));
407:     PetscMalloc((lu->n)*sizeof(PetscInt)   ,&(lu->invp));
408:   }

410:   if (isSym) {
411:     /* On symmetric matrix, LLT */
412:     lu->iparm[IPARM_SYM] = API_SYM_YES;
413:     lu->iparm[IPARM_FACTORIZATION] = API_FACT_LDLT;
414:   } else {
415:     /* On unsymmetric matrix, LU */
416:     lu->iparm[IPARM_SYM] = API_SYM_NO;
417:     lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
418:   }
419: 
420:   /*----------------*/
421:   if (lu->matstruc == DIFFERENT_NONZERO_PATTERN){
422:     if (!(isSeqAIJ || isSeqSBAIJ)) {
423:       /* PaStiX only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
424:         VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&lu->b_seq);
425:         ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);
426:         VecCreate(((PetscObject)A)->comm,&b);
427:         VecSetSizes(b,A->rmap->n,PETSC_DECIDE);
428:         VecSetFromOptions(b);
429: 
430:         VecScatterCreate(b,is_iden,lu->b_seq,is_iden,&lu->scat_rhs);
431:         VecScatterCreate(lu->b_seq,is_iden,b,is_iden,&lu->scat_sol);
432:         ISDestroy(is_iden);
433:         VecDestroy(b);
434:     }
435:     lu->iparm[IPARM_START_TASK] = API_TASK_ORDERING;
436:     lu->iparm[IPARM_END_TASK]   = API_TASK_NUMFACT;

438:     pastix((pastix_data_t **)&(lu->pastix_data),
439:            (MPI_Comm)         lu->pastix_comm,
440:            (pastix_int_t)     lu->n,
441:            (pastix_int_t*)    lu->colptr,
442:            (pastix_int_t*)    lu->row,
443:            (pastix_float_t*)  lu->val,
444:            (pastix_int_t*)    lu->perm,
445:            (pastix_int_t*)    lu->invp,
446:            (pastix_float_t*)  lu->rhs,
447:            (pastix_int_t)     lu->rhsnbr,
448:            (pastix_int_t*)    lu->iparm,
449:            (double*)          lu->dparm);
450:     if (lu->iparm[IPARM_ERROR_NUMBER] < 0) {
451:       SETERRQ1(PETSC_ERR_LIB,"Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%d\n",lu->iparm[IPARM_ERROR_NUMBER]);
452:     }
453:   } else {
454:     lu->iparm[IPARM_START_TASK] = API_TASK_NUMFACT;
455:     lu->iparm[IPARM_END_TASK]   = API_TASK_NUMFACT;
456:     pastix((pastix_data_t **)&(lu->pastix_data),
457:            (MPI_Comm)         lu->pastix_comm,
458:            (pastix_int_t)     lu->n,
459:            (pastix_int_t*)    lu->colptr,
460:            (pastix_int_t*)    lu->row,
461:            (pastix_float_t*)  lu->val,
462:            (pastix_int_t*)    lu->perm,
463:            (pastix_int_t*)    lu->invp,
464:            (pastix_float_t*)  lu->rhs,
465:            (pastix_int_t)     lu->rhsnbr,
466:            (pastix_int_t*)    lu->iparm,
467:            (double*)          lu->dparm);

469:     if (lu->iparm[IPARM_ERROR_NUMBER] < 0) {
470:       SETERRQ1(PETSC_ERR_LIB,"Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%d\n",lu->iparm[IPARM_ERROR_NUMBER]);
471:     }
472:   }

474:   if (lu->commSize > 1){
475:     if ((F)->factor == MAT_FACTOR_LU){
476:       F_diag = ((Mat_MPIAIJ *)(F)->data)->A;
477:     } else {
478:       F_diag = ((Mat_MPISBAIJ *)(F)->data)->A;
479:     }
480:     F_diag->assembled = PETSC_TRUE;
481:   }
482:   (F)->assembled     = PETSC_TRUE;
483:   lu->matstruc       = SAME_NONZERO_PATTERN;
484:   lu->CleanUpPastix  = PETSC_TRUE;
485:   return(0);
486: }

488: /* Note the Petsc r and c permutations are ignored */
491: PetscErrorCode MatLUFactorSymbolic_AIJPASTIX(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
492: {
493:   Mat_Pastix      *lu = (Mat_Pastix*)F->spptr;

496:   lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
497:   lu->iparm[IPARM_SYM]           = API_SYM_YES;
498:   lu->matstruc                   = DIFFERENT_NONZERO_PATTERN;
499:   F->ops->lufactornumeric        = MatFactorNumeric_PaStiX;
500:   return(0);
501: }


504: /* Note the Petsc r permutation is ignored */
507: PetscErrorCode MatCholeskyFactorSymbolic_SBAIJPASTIX(Mat F,Mat A,IS r,const MatFactorInfo *info)
508: {
509:   Mat_Pastix      *lu = (Mat_Pastix*)(F)->spptr;

512:   lu->iparm[IPARM_FACTORIZATION]  = API_FACT_LLT;
513:   lu->iparm[IPARM_SYM]            = API_SYM_NO;
514:   lu->matstruc                    = DIFFERENT_NONZERO_PATTERN;
515:   (F)->ops->choleskyfactornumeric = MatFactorNumeric_PaStiX;
516:   return(0);
517: }

521: PetscErrorCode MatView_PaStiX(Mat A,PetscViewer viewer)
522: {
523:   PetscErrorCode    ierr;
524:   PetscTruth        iascii;
525:   PetscViewerFormat format;

528:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
529:   if (iascii) {
530:     PetscViewerGetFormat(viewer,&format);
531:     if (format == PETSC_VIEWER_ASCII_INFO){
532:       Mat_Pastix      *lu=(Mat_Pastix*)A->spptr;

534:       PetscViewerASCIIPrintf(viewer,"PaStiX run parameters:\n");
535:       PetscViewerASCIIPrintf(viewer,"  Matrix type :                      %s \n",((lu->iparm[IPARM_SYM] == API_SYM_YES)?"Symmetric":"Unsymmetric"));
536:       PetscViewerASCIIPrintf(viewer,"  Level of printing (0,1,2):         %d \n",lu->iparm[IPARM_VERBOSE]);
537:       PetscViewerASCIIPrintf(viewer,"  Number of refinements iterations : %d \n",lu->iparm[IPARM_NBITER]);
538:       PetscPrintf(PETSC_COMM_SELF,"  Error :                        %g \n",lu->dparm[DPARM_RELATIVE_ERROR]);
539:     }
540:   }
541:   return(0);
542: }


545: /*MC
546:      MAT_SOLVER_PASTIX  - A solver package providing direct solvers (LU) for distributed
547:   and sequential matrices via the external package PaStiX.

549:   Use config/configure.py --download-pastix to have PETSc installed with PaStiX

551:   Options Database Keys:
552: + -mat_pastix_verbose   <0,1,2>   - print level
553: - -mat_pastix_threadnbr <integer> - Set the thread number by MPI task.

555:   Level: beginner

557: .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage

559: M*/


564: PetscErrorCode MatGetInfo_PaStiX(Mat A,MatInfoType flag,MatInfo *info)
565: {
566:     Mat_Pastix  *lu =(Mat_Pastix*)A->spptr;

569:     info->block_size        = 1.0;
570:     info->nz_allocated      = lu->iparm[IPARM_NNZEROS];
571:     info->nz_used           = lu->iparm[IPARM_NNZEROS];
572:     info->nz_unneeded       = 0.0;
573:     info->assemblies        = 0.0;
574:     info->mallocs           = 0.0;
575:     info->memory            = 0.0;
576:     info->fill_ratio_given  = 0;
577:     info->fill_ratio_needed = 0;
578:     info->factor_mallocs    = 0;
579:     return(0);
580: }

585: PetscErrorCode MatFactorGetSolverPackage_pastix(Mat A,const MatSolverPackage *type)
586: {
588:   *type = MAT_SOLVER_PASTIX;
589:   return(0);
590: }

594: /*
595:     The seq and mpi versions of this function are the same 
596: */
599: PetscErrorCode MatGetFactor_seqaij_pastix(Mat A,MatFactorType ftype,Mat *F)
600: {
601:   Mat            B;
603:   Mat_Pastix    *pastix;

606:   if (ftype != MAT_FACTOR_LU) {
607:     SETERRQ(PETSC_ERR_SUP,"Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
608:   }
609:   /* Create the factorization matrix */
610:   MatCreate(((PetscObject)A)->comm,&B);
611:   MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
612:   MatSetType(B,((PetscObject)A)->type_name);
613:   MatSeqAIJSetPreallocation(B,0,PETSC_NULL);

615:   B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
616:   B->ops->view             = MatView_PaStiX;
617:   B->ops->getinfo          = MatGetInfo_PaStiX;
618:   PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_pastix", MatFactorGetSolverPackage_pastix);
619:   B->factor                = MAT_FACTOR_LU;

621:   PetscNewLog(B,Mat_Pastix,&pastix);
622:   pastix->CleanUpPastix             = PETSC_FALSE;
623:   pastix->isAIJ                     = PETSC_TRUE;
624:   pastix->scat_rhs                  = PETSC_NULL;
625:   pastix->scat_sol                  = PETSC_NULL;
626:   pastix->MatDestroy                = B->ops->destroy;
627:   B->ops->destroy                   = MatDestroy_Pastix;
628:   B->spptr                          = (void*)pastix;

630:   *F = B;
631:   return(0);
632: }


639: PetscErrorCode MatGetFactor_mpiaij_pastix(Mat A,MatFactorType ftype,Mat *F)
640: {
641:   Mat            B;
643:   Mat_Pastix    *pastix;

646:   if (ftype != MAT_FACTOR_LU) SETERRQ(PETSC_ERR_SUP,"Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
647:   /* Create the factorization matrix */
648:   MatCreate(((PetscObject)A)->comm,&B);
649:   MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
650:   MatSetType(B,((PetscObject)A)->type_name);
651:   MatSeqAIJSetPreallocation(B,0,PETSC_NULL);
652:   MatMPIAIJSetPreallocation(B,0,PETSC_NULL,0,PETSC_NULL);

654:   B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
655:   B->ops->view             = MatView_PaStiX;
656:   PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_pastix",MatFactorGetSolverPackage_pastix);
657:   B->factor                = MAT_FACTOR_LU;

659:   PetscNewLog(B,Mat_Pastix,&pastix);
660:   pastix->CleanUpPastix             = PETSC_FALSE;
661:   pastix->isAIJ                     = PETSC_TRUE;
662:   pastix->scat_rhs                  = PETSC_NULL;
663:   pastix->scat_sol                  = PETSC_NULL;
664:   pastix->MatDestroy                = B->ops->destroy;
665:   B->ops->destroy                  = MatDestroy_Pastix;
666:   B->spptr                         = (void*)pastix;

668:   *F = B;
669:   return(0);
670: }

676: PetscErrorCode MatGetFactor_seqsbaij_pastix(Mat A,MatFactorType ftype,Mat *F)
677: {
678:   Mat            B;
680:   Mat_Pastix    *pastix;

683:   if (ftype != MAT_FACTOR_CHOLESKY) {
684:     SETERRQ(PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
685:   }
686:   /* Create the factorization matrix */
687:   MatCreate(((PetscObject)A)->comm,&B);
688:   MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
689:   MatSetType(B,((PetscObject)A)->type_name);
690:   MatSeqSBAIJSetPreallocation(B,1,0,PETSC_NULL);
691:   MatMPISBAIJSetPreallocation(B,1,0,PETSC_NULL,0,PETSC_NULL);

693:   B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
694:   B->ops->view                   = MatView_PaStiX;
695:   PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_pastix",MatFactorGetSolverPackage_pastix);

697:   B->factor                      = MAT_FACTOR_CHOLESKY;

699:   PetscNewLog(B,Mat_Pastix,&pastix);
700:   pastix->CleanUpPastix             = PETSC_FALSE;
701:   pastix->isAIJ                     = PETSC_TRUE;
702:   pastix->scat_rhs                  = PETSC_NULL;
703:   pastix->scat_sol                  = PETSC_NULL;
704:   pastix->MatDestroy                = B->ops->destroy;
705:   B->ops->destroy                  = MatDestroy_Pastix;
706:   B->spptr                         = (void*)pastix;

708:   *F = B;
709:   return(0);
710: }

716: PetscErrorCode MatGetFactor_mpisbaij_pastix(Mat A,MatFactorType ftype,Mat *F)
717: {
718:   Mat            B;
720:   Mat_Pastix    *pastix;
721: 
723:   if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");

725:   /* Create the factorization matrix */
726:   MatCreate(((PetscObject)A)->comm,&B);
727:   MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
728:   MatSetType(B,((PetscObject)A)->type_name);
729:   MatSeqSBAIJSetPreallocation(B,1,0,PETSC_NULL);
730:   MatMPISBAIJSetPreallocation(B,1,0,PETSC_NULL,0,PETSC_NULL);

732:   B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
733:   B->ops->view                   = MatView_PaStiX;
734:   PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_pastix",MatFactorGetSolverPackage_pastix);
735:   B->factor                      = MAT_FACTOR_CHOLESKY;

737:   PetscNewLog(B,Mat_Pastix,&pastix);
738:   pastix->CleanUpPastix             = PETSC_FALSE;
739:   pastix->isAIJ                     = PETSC_TRUE;
740:   pastix->scat_rhs                  = PETSC_NULL;
741:   pastix->scat_sol                  = PETSC_NULL;
742:   pastix->MatDestroy                = B->ops->destroy;
743:   B->ops->destroy                   = MatDestroy_Pastix;
744:   B->spptr                          = (void*)pastix;

746:   *F = B;
747:   return(0);
748: }