Actual source code: pastix.c
1: /*
2: Provides an interface to the PaStiX sparse solver
3: */
4: #include <../src/mat/impls/aij/seq/aij.h>
5: #include <../src/mat/impls/aij/mpi/mpiaij.h>
6: #include <../src/mat/impls/sbaij/seq/sbaij.h>
7: #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
9: #if defined(PETSC_USE_COMPLEX)
10: #define _H_COMPLEX
11: #endif
13: EXTERN_C_BEGIN
14: #include <pastix.h>
15: EXTERN_C_END
17: #if defined(PETSC_USE_COMPLEX)
18: #if defined(PETSC_USE_REAL_SINGLE)
19: #define PASTIX_CALL c_pastix
20: #else
21: #define PASTIX_CALL z_pastix
22: #endif
24: #else /* PETSC_USE_COMPLEX */
26: #if defined(PETSC_USE_REAL_SINGLE)
27: #define PASTIX_CALL s_pastix
28: #else
29: #define PASTIX_CALL d_pastix
30: #endif
32: #endif /* PETSC_USE_COMPLEX */
34: typedef PetscScalar PastixScalar;
36: typedef struct Mat_Pastix_ {
37: pastix_data_t *pastix_data; /* Pastix data storage structure */
38: MatStructure matstruc;
39: PetscInt n; /* Number of columns in the matrix */
40: PetscInt *colptr; /* Index of first element of each column in row and val */
41: PetscInt *row; /* Row of each element of the matrix */
42: PetscScalar *val; /* Value of each element of the matrix */
43: PetscInt *perm; /* Permutation tabular */
44: PetscInt *invp; /* Reverse permutation tabular */
45: PetscScalar *rhs; /* Rhight-hand-side member */
46: PetscInt rhsnbr; /* Rhight-hand-side number (must be 1) */
47: PetscInt iparm[IPARM_SIZE]; /* Integer parameters */
48: double dparm[DPARM_SIZE]; /* Floating point parameters */
49: MPI_Comm pastix_comm; /* PaStiX MPI communicator */
50: PetscMPIInt commRank; /* MPI rank */
51: PetscMPIInt commSize; /* MPI communicator size */
52: PetscBool CleanUpPastix; /* Boolean indicating if we call PaStiX clean step */
53: VecScatter scat_rhs;
54: VecScatter scat_sol;
55: Vec b_seq;
56: } Mat_Pastix;
58: extern PetscErrorCode MatDuplicate_Pastix(Mat,MatDuplicateOption,Mat*);
60: /*
61: convert Petsc seqaij matrix to CSC: colptr[n], row[nz], val[nz]
63: input:
64: A - matrix in seqaij or mpisbaij (bs=1) format
65: valOnly - FALSE: spaces are allocated and values are set for the CSC
66: TRUE: Only fill values
67: output:
68: n - Size of the matrix
69: colptr - Index of first element of each column in row and val
70: row - Row of each element of the matrix
71: values - Value of each element of the matrix
72: */
73: PetscErrorCode MatConvertToCSC(Mat A,PetscBool valOnly,PetscInt *n,PetscInt **colptr,PetscInt **row,PetscScalar **values)
74: {
75: Mat_SeqAIJ *aa = (Mat_SeqAIJ*)A->data;
76: PetscInt *rowptr = aa->i;
77: PetscInt *col = aa->j;
78: PetscScalar *rvalues = aa->a;
79: PetscInt m = A->rmap->N;
80: PetscInt nnz;
81: PetscInt i,j, k;
82: PetscInt base = 1;
83: PetscInt idx;
85: PetscInt colidx;
86: PetscInt *colcount;
87: PetscBool isSBAIJ;
88: PetscBool isSeqSBAIJ;
89: PetscBool isMpiSBAIJ;
90: PetscBool isSym;
93: MatIsSymmetric(A,0.0,&isSym);
94: PetscObjectTypeCompare((PetscObject)A,MATSBAIJ,&isSBAIJ);
95: PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);
96: PetscObjectTypeCompare((PetscObject)A,MATMPISBAIJ,&isMpiSBAIJ);
98: *n = A->cmap->N;
100: /* PaStiX only needs triangular matrix if matrix is symmetric
101: */
102: if (isSym && !(isSBAIJ || isSeqSBAIJ || isMpiSBAIJ)) nnz = (aa->nz - *n)/2 + *n;
103: else nnz = aa->nz;
105: if (!valOnly) {
106: PetscMalloc1((*n)+1,colptr);
107: PetscMalloc1(nnz,row);
108: PetscMalloc1(nnz,values);
110: if (isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) {
111: PetscArraycpy (*colptr, rowptr, (*n)+1);
112: for (i = 0; i < *n+1; i++) (*colptr)[i] += base;
113: PetscArraycpy (*row, col, nnz);
114: for (i = 0; i < nnz; i++) (*row)[i] += base;
115: PetscArraycpy (*values, rvalues, nnz);
116: } else {
117: PetscMalloc1(*n,&colcount);
119: for (i = 0; i < m; i++) colcount[i] = 0;
120: /* Fill-in colptr */
121: for (i = 0; i < m; i++) {
122: for (j = rowptr[i]; j < rowptr[i+1]; j++) {
123: if (!isSym || col[j] <= i) colcount[col[j]]++;
124: }
125: }
127: (*colptr)[0] = base;
128: for (j = 0; j < *n; j++) {
129: (*colptr)[j+1] = (*colptr)[j] + colcount[j];
130: /* in next loop we fill starting from (*colptr)[colidx] - base */
131: colcount[j] = -base;
132: }
134: /* Fill-in rows and values */
135: for (i = 0; i < m; i++) {
136: for (j = rowptr[i]; j < rowptr[i+1]; j++) {
137: if (!isSym || col[j] <= i) {
138: colidx = col[j];
139: idx = (*colptr)[colidx] + colcount[colidx];
140: (*row)[idx] = i + base;
141: (*values)[idx] = rvalues[j];
142: colcount[colidx]++;
143: }
144: }
145: }
146: PetscFree(colcount);
147: }
148: } else {
149: /* Fill-in only values */
150: for (i = 0; i < m; i++) {
151: for (j = rowptr[i]; j < rowptr[i+1]; j++) {
152: colidx = col[j];
153: if ((isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) ||!isSym || col[j] <= i) {
154: /* look for the value to fill */
155: for (k = (*colptr)[colidx] - base; k < (*colptr)[colidx + 1] - base; k++) {
156: if (((*row)[k]-base) == i) {
157: (*values)[k] = rvalues[j];
158: break;
159: }
160: }
161: /* data structure of sparse matrix has changed */
162: if (k == (*colptr)[colidx + 1] - base) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"overflow on k %D",k);
163: }
164: }
165: }
166: }
167: return(0);
168: }
170: /*
171: Call clean step of PaStiX if lu->CleanUpPastix == true.
172: Free the CSC matrix.
173: */
174: PetscErrorCode MatDestroy_Pastix(Mat A)
175: {
176: Mat_Pastix *lu=(Mat_Pastix*)A->data;
180: if (lu->CleanUpPastix) {
181: /* Terminate instance, deallocate memories */
182: VecScatterDestroy(&lu->scat_rhs);
183: VecDestroy(&lu->b_seq);
184: VecScatterDestroy(&lu->scat_sol);
186: lu->iparm[IPARM_START_TASK]=API_TASK_CLEAN;
187: lu->iparm[IPARM_END_TASK] =API_TASK_CLEAN;
189: PASTIX_CALL(&(lu->pastix_data),
190: lu->pastix_comm,
191: lu->n,
192: lu->colptr,
193: lu->row,
194: (PastixScalar*)lu->val,
195: lu->perm,
196: lu->invp,
197: (PastixScalar*)lu->rhs,
198: lu->rhsnbr,
199: lu->iparm,
200: lu->dparm);
201: if (lu->iparm[IPARM_ERROR_NUMBER] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by PaStiX in destroy: iparm(IPARM_ERROR_NUMBER)=%d\n",lu->iparm[IPARM_ERROR_NUMBER]);
202: PetscFree(lu->colptr);
203: PetscFree(lu->row);
204: PetscFree(lu->val);
205: PetscFree(lu->perm);
206: PetscFree(lu->invp);
207: MPI_Comm_free(&(lu->pastix_comm));
208: }
209: PetscFree(A->data);
210: return(0);
211: }
213: /*
214: Gather right-hand-side.
215: Call for Solve step.
216: Scatter solution.
217: */
218: PetscErrorCode MatSolve_PaStiX(Mat A,Vec b,Vec x)
219: {
220: Mat_Pastix *lu=(Mat_Pastix*)A->data;
221: PetscScalar *array;
222: Vec x_seq;
226: lu->rhsnbr = 1;
227: x_seq = lu->b_seq;
228: if (lu->commSize > 1) {
229: /* PaStiX only supports centralized rhs. Scatter b into a seqential rhs vector */
230: VecScatterBegin(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);
231: VecScatterEnd(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);
232: VecGetArray(x_seq,&array);
233: } else { /* size == 1 */
234: VecCopy(b,x);
235: VecGetArray(x,&array);
236: }
237: lu->rhs = array;
238: if (lu->commSize == 1) {
239: VecRestoreArray(x,&array);
240: } else {
241: VecRestoreArray(x_seq,&array);
242: }
244: /* solve phase */
245: /*-------------*/
246: lu->iparm[IPARM_START_TASK] = API_TASK_SOLVE;
247: lu->iparm[IPARM_END_TASK] = API_TASK_REFINE;
248: lu->iparm[IPARM_RHS_MAKING] = API_RHS_B;
250: PASTIX_CALL(&(lu->pastix_data),
251: lu->pastix_comm,
252: lu->n,
253: lu->colptr,
254: lu->row,
255: (PastixScalar*)lu->val,
256: lu->perm,
257: lu->invp,
258: (PastixScalar*)lu->rhs,
259: lu->rhsnbr,
260: lu->iparm,
261: lu->dparm);
262: if (lu->iparm[IPARM_ERROR_NUMBER] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by PaStiX in solve phase: lu->iparm[IPARM_ERROR_NUMBER] = %d\n",lu->iparm[IPARM_ERROR_NUMBER]);
264: if (lu->commSize == 1) {
265: VecRestoreArray(x,&(lu->rhs));
266: } else {
267: VecRestoreArray(x_seq,&(lu->rhs));
268: }
270: if (lu->commSize > 1) { /* convert PaStiX centralized solution to petsc mpi x */
271: VecScatterBegin(lu->scat_sol,x_seq,x,INSERT_VALUES,SCATTER_FORWARD);
272: VecScatterEnd(lu->scat_sol,x_seq,x,INSERT_VALUES,SCATTER_FORWARD);
273: }
274: return(0);
275: }
277: /*
278: Numeric factorisation using PaStiX solver.
280: */
281: PetscErrorCode MatFactorNumeric_PaStiX(Mat F,Mat A,const MatFactorInfo *info)
282: {
283: Mat_Pastix *lu =(Mat_Pastix*)(F)->data;
284: Mat *tseq;
285: PetscErrorCode 0;
286: PetscInt icntl;
287: PetscInt M=A->rmap->N;
288: PetscBool valOnly,flg, isSym;
289: IS is_iden;
290: Vec b;
291: IS isrow;
292: PetscBool isSeqAIJ,isSeqSBAIJ,isMPIAIJ;
295: PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);
296: PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);
297: PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);
298: if (lu->matstruc == DIFFERENT_NONZERO_PATTERN) {
299: (F)->ops->solve = MatSolve_PaStiX;
301: /* Initialize a PASTIX instance */
302: MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(lu->pastix_comm));
303: MPI_Comm_rank(lu->pastix_comm, &lu->commRank);
304: MPI_Comm_size(lu->pastix_comm, &lu->commSize);
306: /* Set pastix options */
307: lu->iparm[IPARM_MODIFY_PARAMETER] = API_NO;
308: lu->iparm[IPARM_START_TASK] = API_TASK_INIT;
309: lu->iparm[IPARM_END_TASK] = API_TASK_INIT;
311: lu->rhsnbr = 1;
313: /* Call to set default pastix options */
314: PASTIX_CALL(&(lu->pastix_data),
315: lu->pastix_comm,
316: lu->n,
317: lu->colptr,
318: lu->row,
319: (PastixScalar*)lu->val,
320: lu->perm,
321: lu->invp,
322: (PastixScalar*)lu->rhs,
323: lu->rhsnbr,
324: lu->iparm,
325: lu->dparm);
326: if (lu->iparm[IPARM_ERROR_NUMBER] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by PaStiX in MatFactorNumeric: iparm(IPARM_ERROR_NUMBER)=%d\n",lu->iparm[IPARM_ERROR_NUMBER]);
328: PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"PaStiX Options","Mat");
329: icntl = -1;
330: lu->iparm[IPARM_VERBOSE] = API_VERBOSE_NOT;
331: PetscOptionsInt("-mat_pastix_verbose","iparm[IPARM_VERBOSE] : level of printing (0 to 2)","None",lu->iparm[IPARM_VERBOSE],&icntl,&flg);
332: if ((flg && icntl >= 0) || PetscLogPrintInfo) {
333: lu->iparm[IPARM_VERBOSE] = icntl;
334: }
335: icntl=-1;
336: PetscOptionsInt("-mat_pastix_threadnbr","iparm[IPARM_THREAD_NBR] : Number of thread by MPI node","None",lu->iparm[IPARM_THREAD_NBR],&icntl,&flg);
337: if ((flg && icntl > 0)) {
338: lu->iparm[IPARM_THREAD_NBR] = icntl;
339: }
340: PetscOptionsEnd();
341: valOnly = PETSC_FALSE;
342: } else {
343: if (isSeqAIJ || isMPIAIJ) {
344: PetscFree(lu->colptr);
345: PetscFree(lu->row);
346: PetscFree(lu->val);
347: valOnly = PETSC_FALSE;
348: } else valOnly = PETSC_TRUE;
349: }
351: lu->iparm[IPARM_MATRIX_VERIFICATION] = API_YES;
353: /* convert mpi A to seq mat A */
354: ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);
355: MatCreateSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);
356: ISDestroy(&isrow);
358: MatConvertToCSC(*tseq,valOnly, &lu->n, &lu->colptr, &lu->row, &lu->val);
359: MatIsSymmetric(*tseq,0.0,&isSym);
360: MatDestroyMatrices(1,&tseq);
362: if (!lu->perm) {
363: PetscMalloc1(lu->n,&(lu->perm));
364: PetscMalloc1(lu->n,&(lu->invp));
365: }
367: if (isSym) {
368: /* On symmetric matrix, LLT */
369: lu->iparm[IPARM_SYM] = API_SYM_YES;
370: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LDLT;
371: } else {
372: /* On unsymmetric matrix, LU */
373: lu->iparm[IPARM_SYM] = API_SYM_NO;
374: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
375: }
377: /*----------------*/
378: if (lu->matstruc == DIFFERENT_NONZERO_PATTERN) {
379: if (!(isSeqAIJ || isSeqSBAIJ) && !lu->b_seq) {
380: /* PaStiX only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
381: VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&lu->b_seq);
382: ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);
383: MatCreateVecs(A,NULL,&b);
384: VecScatterCreate(b,is_iden,lu->b_seq,is_iden,&lu->scat_rhs);
385: VecScatterCreate(lu->b_seq,is_iden,b,is_iden,&lu->scat_sol);
386: ISDestroy(&is_iden);
387: VecDestroy(&b);
388: }
389: lu->iparm[IPARM_START_TASK] = API_TASK_ORDERING;
390: lu->iparm[IPARM_END_TASK] = API_TASK_NUMFACT;
392: PASTIX_CALL(&(lu->pastix_data),
393: lu->pastix_comm,
394: lu->n,
395: lu->colptr,
396: lu->row,
397: (PastixScalar*)lu->val,
398: lu->perm,
399: lu->invp,
400: (PastixScalar*)lu->rhs,
401: lu->rhsnbr,
402: lu->iparm,
403: lu->dparm);
404: if (lu->iparm[IPARM_ERROR_NUMBER] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%d\n",lu->iparm[IPARM_ERROR_NUMBER]);
405: } else {
406: lu->iparm[IPARM_START_TASK] = API_TASK_NUMFACT;
407: lu->iparm[IPARM_END_TASK] = API_TASK_NUMFACT;
408: PASTIX_CALL(&(lu->pastix_data),
409: lu->pastix_comm,
410: lu->n,
411: lu->colptr,
412: lu->row,
413: (PastixScalar*)lu->val,
414: lu->perm,
415: lu->invp,
416: (PastixScalar*)lu->rhs,
417: lu->rhsnbr,
418: lu->iparm,
419: lu->dparm);
420: if (lu->iparm[IPARM_ERROR_NUMBER] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%d\n",lu->iparm[IPARM_ERROR_NUMBER]);
421: }
423: (F)->assembled = PETSC_TRUE;
424: lu->matstruc = SAME_NONZERO_PATTERN;
425: lu->CleanUpPastix = PETSC_TRUE;
426: return(0);
427: }
429: /* Note the Petsc r and c permutations are ignored */
430: PetscErrorCode MatLUFactorSymbolic_AIJPASTIX(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
431: {
432: Mat_Pastix *lu = (Mat_Pastix*)F->data;
435: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
436: lu->iparm[IPARM_SYM] = API_SYM_YES;
437: lu->matstruc = DIFFERENT_NONZERO_PATTERN;
438: F->ops->lufactornumeric = MatFactorNumeric_PaStiX;
439: return(0);
440: }
443: /* Note the Petsc r permutation is ignored */
444: PetscErrorCode MatCholeskyFactorSymbolic_SBAIJPASTIX(Mat F,Mat A,IS r,const MatFactorInfo *info)
445: {
446: Mat_Pastix *lu = (Mat_Pastix*)(F)->data;
449: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LLT;
450: lu->iparm[IPARM_SYM] = API_SYM_NO;
451: lu->matstruc = DIFFERENT_NONZERO_PATTERN;
452: (F)->ops->choleskyfactornumeric = MatFactorNumeric_PaStiX;
453: return(0);
454: }
456: PetscErrorCode MatView_PaStiX(Mat A,PetscViewer viewer)
457: {
458: PetscErrorCode ierr;
459: PetscBool iascii;
460: PetscViewerFormat format;
463: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
464: if (iascii) {
465: PetscViewerGetFormat(viewer,&format);
466: if (format == PETSC_VIEWER_ASCII_INFO) {
467: Mat_Pastix *lu=(Mat_Pastix*)A->data;
469: PetscViewerASCIIPrintf(viewer,"PaStiX run parameters:\n");
470: PetscViewerASCIIPrintf(viewer," Matrix type : %s \n",((lu->iparm[IPARM_SYM] == API_SYM_YES) ? "Symmetric" : "Unsymmetric"));
471: PetscViewerASCIIPrintf(viewer," Level of printing (0,1,2): %d \n",lu->iparm[IPARM_VERBOSE]);
472: PetscViewerASCIIPrintf(viewer," Number of refinements iterations : %d \n",lu->iparm[IPARM_NBITER]);
473: PetscPrintf(PETSC_COMM_SELF," Error : %g \n",lu->dparm[DPARM_RELATIVE_ERROR]);
474: }
475: }
476: return(0);
477: }
480: /*MC
481: MATSOLVERPASTIX - A solver package providing direct solvers (LU) for distributed
482: and sequential matrices via the external package PaStiX.
484: Use ./configure --download-pastix --download-ptscotch to have PETSc installed with PasTiX
486: Use -pc_type lu -pc_factor_mat_solver_type pastix to use this direct solver
488: Options Database Keys:
489: + -mat_pastix_verbose <0,1,2> - print level
490: - -mat_pastix_threadnbr <integer> - Set the thread number by MPI task.
492: Notes:
493: This only works for matrices with symmetric nonzero structure, if you pass it a matrix with
494: nonsymmetric structure PasTiX and hence PETSc return with an error.
496: Level: beginner
498: .seealso: PCFactorSetMatSolverType(), MatSolverType
500: M*/
503: PetscErrorCode MatGetInfo_PaStiX(Mat A,MatInfoType flag,MatInfo *info)
504: {
505: Mat_Pastix *lu =(Mat_Pastix*)A->data;
508: info->block_size = 1.0;
509: info->nz_allocated = lu->iparm[IPARM_NNZEROS];
510: info->nz_used = lu->iparm[IPARM_NNZEROS];
511: info->nz_unneeded = 0.0;
512: info->assemblies = 0.0;
513: info->mallocs = 0.0;
514: info->memory = 0.0;
515: info->fill_ratio_given = 0;
516: info->fill_ratio_needed = 0;
517: info->factor_mallocs = 0;
518: return(0);
519: }
521: static PetscErrorCode MatFactorGetSolverType_pastix(Mat A,MatSolverType *type)
522: {
524: *type = MATSOLVERPASTIX;
525: return(0);
526: }
528: /*
529: The seq and mpi versions of this function are the same
530: */
531: static PetscErrorCode MatGetFactor_seqaij_pastix(Mat A,MatFactorType ftype,Mat *F)
532: {
533: Mat B;
535: Mat_Pastix *pastix;
538: if (ftype != MAT_FACTOR_LU) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
539: /* Create the factorization matrix */
540: MatCreate(PetscObjectComm((PetscObject)A),&B);
541: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
542: PetscStrallocpy("pastix",&((PetscObject)B)->type_name);
543: MatSetUp(B);
545: B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
546: B->ops->view = MatView_PaStiX;
547: B->ops->getinfo = MatGetInfo_PaStiX;
549: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_pastix);
551: B->factortype = MAT_FACTOR_LU;
552: B->useordering = PETSC_TRUE;
554: /* set solvertype */
555: PetscFree(B->solvertype);
556: PetscStrallocpy(MATSOLVERPASTIX,&B->solvertype);
558: PetscNewLog(B,&pastix);
560: pastix->CleanUpPastix = PETSC_FALSE;
561: pastix->scat_rhs = NULL;
562: pastix->scat_sol = NULL;
563: B->ops->getinfo = MatGetInfo_External;
564: B->ops->destroy = MatDestroy_Pastix;
565: B->data = (void*)pastix;
567: *F = B;
568: return(0);
569: }
571: static PetscErrorCode MatGetFactor_mpiaij_pastix(Mat A,MatFactorType ftype,Mat *F)
572: {
573: Mat B;
575: Mat_Pastix *pastix;
578: if (ftype != MAT_FACTOR_LU) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
579: /* Create the factorization matrix */
580: MatCreate(PetscObjectComm((PetscObject)A),&B);
581: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
582: PetscStrallocpy("pastix",&((PetscObject)B)->type_name);
583: MatSetUp(B);
585: B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
586: B->ops->view = MatView_PaStiX;
587: B->ops->getinfo = MatGetInfo_PaStiX;
588: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_pastix);
590: B->factortype = MAT_FACTOR_LU;
592: /* set solvertype */
593: PetscFree(B->solvertype);
594: PetscStrallocpy(MATSOLVERPASTIX,&B->solvertype);
596: PetscNewLog(B,&pastix);
598: pastix->CleanUpPastix = PETSC_FALSE;
599: pastix->scat_rhs = NULL;
600: pastix->scat_sol = NULL;
601: B->ops->getinfo = MatGetInfo_External;
602: B->ops->destroy = MatDestroy_Pastix;
603: B->data = (void*)pastix;
605: *F = B;
606: return(0);
607: }
609: static PetscErrorCode MatGetFactor_seqsbaij_pastix(Mat A,MatFactorType ftype,Mat *F)
610: {
611: Mat B;
613: Mat_Pastix *pastix;
616: if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
617: /* Create the factorization matrix */
618: MatCreate(PetscObjectComm((PetscObject)A),&B);
619: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
620: PetscStrallocpy("pastix",&((PetscObject)B)->type_name);
621: MatSetUp(B);
623: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
624: B->ops->view = MatView_PaStiX;
625: B->ops->getinfo = MatGetInfo_PaStiX;
626: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_pastix);
628: B->factortype = MAT_FACTOR_CHOLESKY;
630: /* set solvertype */
631: PetscFree(B->solvertype);
632: PetscStrallocpy(MATSOLVERPASTIX,&B->solvertype);
634: PetscNewLog(B,&pastix);
636: pastix->CleanUpPastix = PETSC_FALSE;
637: pastix->scat_rhs = NULL;
638: pastix->scat_sol = NULL;
639: B->ops->getinfo = MatGetInfo_External;
640: B->ops->destroy = MatDestroy_Pastix;
641: B->data = (void*)pastix;
643: *F = B;
644: return(0);
645: }
647: static PetscErrorCode MatGetFactor_mpisbaij_pastix(Mat A,MatFactorType ftype,Mat *F)
648: {
649: Mat B;
651: Mat_Pastix *pastix;
654: if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
656: /* Create the factorization matrix */
657: MatCreate(PetscObjectComm((PetscObject)A),&B);
658: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
659: PetscStrallocpy("pastix",&((PetscObject)B)->type_name);
660: MatSetUp(B);
662: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
663: B->ops->view = MatView_PaStiX;
664: B->ops->getinfo = MatGetInfo_PaStiX;
665: B->ops->destroy = MatDestroy_Pastix;
666: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_pastix);
668: B->factortype = MAT_FACTOR_CHOLESKY;
670: /* set solvertype */
671: PetscFree(B->solvertype);
672: PetscStrallocpy(MATSOLVERPASTIX,&B->solvertype);
674: PetscNewLog(B,&pastix);
676: pastix->CleanUpPastix = PETSC_FALSE;
677: pastix->scat_rhs = NULL;
678: pastix->scat_sol = NULL;
679: B->data = (void*)pastix;
681: *F = B;
682: return(0);
683: }
685: PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_Pastix(void)
686: {
690: MatSolverTypeRegister(MATSOLVERPASTIX,MATMPIAIJ, MAT_FACTOR_LU,MatGetFactor_mpiaij_pastix);
691: MatSolverTypeRegister(MATSOLVERPASTIX,MATSEQAIJ, MAT_FACTOR_LU,MatGetFactor_seqaij_pastix);
692: MatSolverTypeRegister(MATSOLVERPASTIX,MATMPISBAIJ, MAT_FACTOR_CHOLESKY,MatGetFactor_mpisbaij_pastix);
693: MatSolverTypeRegister(MATSOLVERPASTIX,MATSEQSBAIJ, MAT_FACTOR_CHOLESKY,MatGetFactor_seqsbaij_pastix);
694: return(0);
695: }