Actual source code: pastix.c
1: /*
2: Provides an interface to the PaStiX sparse solver
3: */
4: #include <../src/mat/impls/aij/seq/aij.h>
5: #include <../src/mat/impls/aij/mpi/mpiaij.h>
6: #include <../src/mat/impls/sbaij/seq/sbaij.h>
7: #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
9: #if defined(PETSC_USE_COMPLEX)
10: #define _H_COMPLEX
11: #endif
13: EXTERN_C_BEGIN
14: #include <pastix.h>
15: EXTERN_C_END
17: #if defined(PETSC_USE_COMPLEX)
18: #if defined(PETSC_USE_REAL_SINGLE)
19: #define PASTIX_CALL c_pastix
20: #else
21: #define PASTIX_CALL z_pastix
22: #endif
24: #else /* PETSC_USE_COMPLEX */
26: #if defined(PETSC_USE_REAL_SINGLE)
27: #define PASTIX_CALL s_pastix
28: #else
29: #define PASTIX_CALL d_pastix
30: #endif
32: #endif /* PETSC_USE_COMPLEX */
34: #define PetscPastixCall(...) \
35: do { \
36: PetscCall(PetscFPTrapPush(PETSC_FP_TRAP_OFF)); \
37: PetscStackCallExternalVoid(PetscStringize(PASTIX_CALL), PASTIX_CALL(__VA_ARGS__)); \
38: PetscCall(PetscFPTrapPop()); \
39: } while (0)
41: typedef PetscScalar PastixScalar;
43: typedef struct Mat_Pastix_ {
44: pastix_data_t *pastix_data; /* Pastix data storage structure */
45: MatStructure matstruc;
46: PetscInt n; /* Number of columns in the matrix */
47: PetscInt *colptr; /* Index of first element of each column in row and val */
48: PetscInt *row; /* Row of each element of the matrix */
49: PetscScalar *val; /* Value of each element of the matrix */
50: PetscInt *perm; /* Permutation tabular */
51: PetscInt *invp; /* Reverse permutation tabular */
52: PetscScalar *rhs; /* Rhight-hand-side member */
53: PetscInt rhsnbr; /* Rhight-hand-side number (must be 1) */
54: PetscInt iparm[IPARM_SIZE]; /* Integer parameters */
55: double dparm[DPARM_SIZE]; /* Floating point parameters */
56: MPI_Comm pastix_comm; /* PaStiX MPI communicator */
57: PetscMPIInt commRank; /* MPI rank */
58: PetscMPIInt commSize; /* MPI communicator size */
59: PetscBool CleanUpPastix; /* Boolean indicating if we call PaStiX clean step */
60: VecScatter scat_rhs;
61: VecScatter scat_sol;
62: Vec b_seq;
63: } Mat_Pastix;
65: extern PetscErrorCode MatDuplicate_Pastix(Mat, MatDuplicateOption, Mat *);
67: /*
68: convert Petsc seqaij matrix to CSC: colptr[n], row[nz], val[nz]
70: input:
71: A - matrix in seqaij or mpisbaij (bs=1) format
72: valOnly - FALSE: spaces are allocated and values are set for the CSC
73: TRUE: Only fill values
74: output:
75: n - Size of the matrix
76: colptr - Index of first element of each column in row and val
77: row - Row of each element of the matrix
78: values - Value of each element of the matrix
79: */
80: static PetscErrorCode MatConvertToCSC(Mat A, PetscBool valOnly, PetscInt *n, PetscInt **colptr, PetscInt **row, PetscScalar **values)
81: {
82: Mat_SeqAIJ *aa = (Mat_SeqAIJ *)A->data;
83: PetscInt *rowptr = aa->i;
84: PetscInt *col = aa->j;
85: PetscScalar *rvalues = aa->a;
86: PetscInt m = A->rmap->N;
87: PetscInt nnz;
88: PetscInt i, j, k;
89: PetscInt base = 1;
90: PetscInt idx;
91: PetscInt colidx;
92: PetscInt *colcount;
93: PetscBool isSBAIJ;
94: PetscBool isSeqSBAIJ;
95: PetscBool isMpiSBAIJ;
96: PetscBool isSym;
98: PetscFunctionBegin;
99: PetscCall(MatIsSymmetric(A, 0.0, &isSym));
100: PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSBAIJ, &isSBAIJ));
101: PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQSBAIJ, &isSeqSBAIJ));
102: PetscCall(PetscObjectTypeCompare((PetscObject)A, MATMPISBAIJ, &isMpiSBAIJ));
104: *n = A->cmap->N;
106: /* PaStiX only needs triangular matrix if matrix is symmetric
107: */
108: if (isSym && !(isSBAIJ || isSeqSBAIJ || isMpiSBAIJ)) nnz = (aa->nz - *n) / 2 + *n;
109: else nnz = aa->nz;
111: if (!valOnly) {
112: PetscCall(PetscMalloc1((*n) + 1, colptr));
113: PetscCall(PetscMalloc1(nnz, row));
114: PetscCall(PetscMalloc1(nnz, values));
116: if (isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) {
117: PetscCall(PetscArraycpy(*colptr, rowptr, (*n) + 1));
118: for (i = 0; i < *n + 1; i++) (*colptr)[i] += base;
119: PetscCall(PetscArraycpy(*row, col, nnz));
120: for (i = 0; i < nnz; i++) (*row)[i] += base;
121: PetscCall(PetscArraycpy(*values, rvalues, nnz));
122: } else {
123: PetscCall(PetscMalloc1(*n, &colcount));
125: for (i = 0; i < m; i++) colcount[i] = 0;
126: /* Fill-in colptr */
127: for (i = 0; i < m; i++) {
128: for (j = rowptr[i]; j < rowptr[i + 1]; j++) {
129: if (!isSym || col[j] <= i) colcount[col[j]]++;
130: }
131: }
133: (*colptr)[0] = base;
134: for (j = 0; j < *n; j++) {
135: (*colptr)[j + 1] = (*colptr)[j] + colcount[j];
136: /* in next loop we fill starting from (*colptr)[colidx] - base */
137: colcount[j] = -base;
138: }
140: /* Fill-in rows and values */
141: for (i = 0; i < m; i++) {
142: for (j = rowptr[i]; j < rowptr[i + 1]; j++) {
143: if (!isSym || col[j] <= i) {
144: colidx = col[j];
145: idx = (*colptr)[colidx] + colcount[colidx];
146: (*row)[idx] = i + base;
147: (*values)[idx] = rvalues[j];
148: colcount[colidx]++;
149: }
150: }
151: }
152: PetscCall(PetscFree(colcount));
153: }
154: } else {
155: /* Fill-in only values */
156: for (i = 0; i < m; i++) {
157: for (j = rowptr[i]; j < rowptr[i + 1]; j++) {
158: colidx = col[j];
159: if ((isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) || !isSym || col[j] <= i) {
160: /* look for the value to fill */
161: for (k = (*colptr)[colidx] - base; k < (*colptr)[colidx + 1] - base; k++) {
162: if (((*row)[k] - base) == i) {
163: (*values)[k] = rvalues[j];
164: break;
165: }
166: }
167: /* data structure of sparse matrix has changed */
168: PetscCheck(k != (*colptr)[colidx + 1] - base, PETSC_COMM_SELF, PETSC_ERR_PLIB, "overflow on k %" PetscInt_FMT, k);
169: }
170: }
171: }
172: }
173: PetscFunctionReturn(PETSC_SUCCESS);
174: }
176: /*
177: Call clean step of PaStiX if lu->CleanUpPastix == true.
178: Free the CSC matrix.
179: */
180: static PetscErrorCode MatDestroy_Pastix(Mat A)
181: {
182: Mat_Pastix *lu = (Mat_Pastix *)A->data;
184: PetscFunctionBegin;
185: if (lu->CleanUpPastix) {
186: /* Terminate instance, deallocate memories */
187: PetscCall(VecScatterDestroy(&lu->scat_rhs));
188: PetscCall(VecDestroy(&lu->b_seq));
189: PetscCall(VecScatterDestroy(&lu->scat_sol));
191: lu->iparm[IPARM_START_TASK] = API_TASK_CLEAN;
192: lu->iparm[IPARM_END_TASK] = API_TASK_CLEAN;
194: PetscPastixCall(&lu->pastix_data, lu->pastix_comm, lu->n, lu->colptr, lu->row, (PastixScalar *)lu->val, lu->perm, lu->invp, (PastixScalar *)lu->rhs, lu->rhsnbr, lu->iparm, lu->dparm);
195: PetscCheck(lu->iparm[IPARM_ERROR_NUMBER] == 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by PaStiX in destroy: iparm(IPARM_ERROR_NUMBER)=%" PetscInt_FMT, lu->iparm[IPARM_ERROR_NUMBER]);
196: PetscCall(PetscFree(lu->colptr));
197: PetscCall(PetscFree(lu->row));
198: PetscCall(PetscFree(lu->val));
199: PetscCall(PetscFree(lu->perm));
200: PetscCall(PetscFree(lu->invp));
201: PetscCallMPI(MPI_Comm_free(&lu->pastix_comm));
202: }
203: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatFactorGetSolverType_C", NULL));
204: PetscCall(PetscFree(A->data));
205: PetscFunctionReturn(PETSC_SUCCESS);
206: }
208: /*
209: Gather right-hand side.
210: Call for Solve step.
211: Scatter solution.
212: */
213: static PetscErrorCode MatSolve_PaStiX(Mat A, Vec b, Vec x)
214: {
215: Mat_Pastix *lu = (Mat_Pastix *)A->data;
216: PetscScalar *array;
217: Vec x_seq;
219: PetscFunctionBegin;
220: lu->rhsnbr = 1;
221: x_seq = lu->b_seq;
222: if (lu->commSize > 1) {
223: /* PaStiX only supports centralized rhs. Scatter b into a sequential rhs vector */
224: PetscCall(VecScatterBegin(lu->scat_rhs, b, x_seq, INSERT_VALUES, SCATTER_FORWARD));
225: PetscCall(VecScatterEnd(lu->scat_rhs, b, x_seq, INSERT_VALUES, SCATTER_FORWARD));
226: PetscCall(VecGetArray(x_seq, &array));
227: } else { /* size == 1 */
228: PetscCall(VecCopy(b, x));
229: PetscCall(VecGetArray(x, &array));
230: }
231: lu->rhs = array;
232: if (lu->commSize == 1) {
233: PetscCall(VecRestoreArray(x, &array));
234: } else {
235: PetscCall(VecRestoreArray(x_seq, &array));
236: }
238: /* solve phase */
239: lu->iparm[IPARM_START_TASK] = API_TASK_SOLVE;
240: lu->iparm[IPARM_END_TASK] = API_TASK_REFINE;
241: lu->iparm[IPARM_RHS_MAKING] = API_RHS_B;
243: PetscPastixCall(&lu->pastix_data, lu->pastix_comm, lu->n, lu->colptr, lu->row, (PastixScalar *)lu->val, lu->perm, lu->invp, (PastixScalar *)lu->rhs, lu->rhsnbr, lu->iparm, lu->dparm);
244: PetscCheck(lu->iparm[IPARM_ERROR_NUMBER] == 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by PaStiX in solve phase: lu->iparm[IPARM_ERROR_NUMBER] = %" PetscInt_FMT, lu->iparm[IPARM_ERROR_NUMBER]);
246: if (lu->commSize == 1) {
247: PetscCall(VecRestoreArray(x, &lu->rhs));
248: } else {
249: PetscCall(VecRestoreArray(x_seq, &lu->rhs));
250: }
252: if (lu->commSize > 1) { /* convert PaStiX centralized solution to petsc mpi x */
253: PetscCall(VecScatterBegin(lu->scat_sol, x_seq, x, INSERT_VALUES, SCATTER_FORWARD));
254: PetscCall(VecScatterEnd(lu->scat_sol, x_seq, x, INSERT_VALUES, SCATTER_FORWARD));
255: }
256: PetscFunctionReturn(PETSC_SUCCESS);
257: }
259: /*
260: Numeric factorisation using PaStiX solver.
262: */
263: static PetscErrorCode MatFactorNumeric_PaStiX(Mat F, Mat A, const MatFactorInfo *info)
264: {
265: Mat_Pastix *lu = (Mat_Pastix *)(F)->data;
266: Mat *tseq;
267: PetscInt icntl;
268: PetscInt M = A->rmap->N;
269: PetscBool valOnly, flg, isSym;
270: IS is_iden;
271: Vec b;
272: IS isrow;
273: PetscBool isSeqAIJ, isSeqSBAIJ, isMPIAIJ;
275: PetscFunctionBegin;
276: PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQAIJ, &isSeqAIJ));
277: PetscCall(PetscObjectTypeCompare((PetscObject)A, MATMPIAIJ, &isMPIAIJ));
278: PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQSBAIJ, &isSeqSBAIJ));
279: if (lu->matstruc == DIFFERENT_NONZERO_PATTERN) {
280: (F)->ops->solve = MatSolve_PaStiX;
282: /* Initialize a PASTIX instance */
283: PetscCallMPI(MPI_Comm_dup(PetscObjectComm((PetscObject)A), &lu->pastix_comm));
284: PetscCallMPI(MPI_Comm_rank(lu->pastix_comm, &lu->commRank));
285: PetscCallMPI(MPI_Comm_size(lu->pastix_comm, &lu->commSize));
287: /* Set pastix options */
288: lu->iparm[IPARM_MODIFY_PARAMETER] = API_NO;
289: lu->iparm[IPARM_START_TASK] = API_TASK_INIT;
290: lu->iparm[IPARM_END_TASK] = API_TASK_INIT;
292: lu->rhsnbr = 1;
294: /* Call to set default pastix options */
295: PetscPastixCall(&lu->pastix_data, lu->pastix_comm, lu->n, lu->colptr, lu->row, (PastixScalar *)lu->val, lu->perm, lu->invp, (PastixScalar *)lu->rhs, lu->rhsnbr, lu->iparm, lu->dparm);
296: PetscCheck(lu->iparm[IPARM_ERROR_NUMBER] == 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by PaStiX in MatFactorNumeric: iparm(IPARM_ERROR_NUMBER)=%" PetscInt_FMT, lu->iparm[IPARM_ERROR_NUMBER]);
298: PetscOptionsBegin(PetscObjectComm((PetscObject)F), ((PetscObject)F)->prefix, "PaStiX Options", "Mat");
299: icntl = -1;
300: lu->iparm[IPARM_VERBOSE] = API_VERBOSE_NOT;
301: PetscCall(PetscOptionsInt("-mat_pastix_verbose", "iparm[IPARM_VERBOSE] : level of printing (0 to 2)", "None", lu->iparm[IPARM_VERBOSE], &icntl, &flg));
302: if ((flg && icntl >= 0) || PetscLogPrintInfo) lu->iparm[IPARM_VERBOSE] = icntl;
303: icntl = -1;
304: PetscCall(PetscOptionsInt("-mat_pastix_threadnbr", "iparm[IPARM_THREAD_NBR] : Number of thread by MPI node", "None", lu->iparm[IPARM_THREAD_NBR], &icntl, &flg));
305: if (flg && icntl > 0) lu->iparm[IPARM_THREAD_NBR] = icntl;
306: PetscOptionsEnd();
307: valOnly = PETSC_FALSE;
308: } else {
309: if (isSeqAIJ || isMPIAIJ) {
310: PetscCall(PetscFree(lu->colptr));
311: PetscCall(PetscFree(lu->row));
312: PetscCall(PetscFree(lu->val));
313: valOnly = PETSC_FALSE;
314: } else valOnly = PETSC_TRUE;
315: }
317: lu->iparm[IPARM_MATRIX_VERIFICATION] = API_YES;
319: /* convert mpi A to seq mat A */
320: PetscCall(ISCreateStride(PETSC_COMM_SELF, M, 0, 1, &isrow));
321: PetscCall(MatCreateSubMatrices(A, 1, &isrow, &isrow, MAT_INITIAL_MATRIX, &tseq));
322: PetscCall(ISDestroy(&isrow));
324: PetscCall(MatConvertToCSC(*tseq, valOnly, &lu->n, &lu->colptr, &lu->row, &lu->val));
325: PetscCall(MatIsSymmetric(*tseq, 0.0, &isSym));
326: PetscCall(MatDestroyMatrices(1, &tseq));
328: if (!lu->perm) {
329: PetscCall(PetscMalloc1(lu->n, &lu->perm));
330: PetscCall(PetscMalloc1(lu->n, &lu->invp));
331: }
333: if (isSym) {
334: /* On symmetric matrix, LLT */
335: lu->iparm[IPARM_SYM] = API_SYM_YES;
336: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LDLT;
337: } else {
338: /* On unsymmetric matrix, LU */
339: lu->iparm[IPARM_SYM] = API_SYM_NO;
340: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
341: }
343: if (lu->matstruc == DIFFERENT_NONZERO_PATTERN) {
344: if (!(isSeqAIJ || isSeqSBAIJ) && !lu->b_seq) {
345: /* PaStiX only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
346: PetscCall(VecCreateSeq(PETSC_COMM_SELF, A->cmap->N, &lu->b_seq));
347: PetscCall(ISCreateStride(PETSC_COMM_SELF, A->cmap->N, 0, 1, &is_iden));
348: PetscCall(MatCreateVecs(A, NULL, &b));
349: PetscCall(VecScatterCreate(b, is_iden, lu->b_seq, is_iden, &lu->scat_rhs));
350: PetscCall(VecScatterCreate(lu->b_seq, is_iden, b, is_iden, &lu->scat_sol));
351: PetscCall(ISDestroy(&is_iden));
352: PetscCall(VecDestroy(&b));
353: }
354: lu->iparm[IPARM_START_TASK] = API_TASK_ORDERING;
355: lu->iparm[IPARM_END_TASK] = API_TASK_NUMFACT;
357: PetscPastixCall(&lu->pastix_data, lu->pastix_comm, lu->n, lu->colptr, lu->row, (PastixScalar *)lu->val, lu->perm, lu->invp, (PastixScalar *)lu->rhs, lu->rhsnbr, lu->iparm, lu->dparm);
358: PetscCheck(lu->iparm[IPARM_ERROR_NUMBER] == 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%" PetscInt_FMT, lu->iparm[IPARM_ERROR_NUMBER]);
359: } else {
360: lu->iparm[IPARM_START_TASK] = API_TASK_NUMFACT;
361: lu->iparm[IPARM_END_TASK] = API_TASK_NUMFACT;
362: PetscPastixCall(&lu->pastix_data, lu->pastix_comm, lu->n, lu->colptr, lu->row, (PastixScalar *)lu->val, lu->perm, lu->invp, (PastixScalar *)lu->rhs, lu->rhsnbr, lu->iparm, lu->dparm);
363: PetscCheck(lu->iparm[IPARM_ERROR_NUMBER] == 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%" PetscInt_FMT, lu->iparm[IPARM_ERROR_NUMBER]);
364: }
366: (F)->assembled = PETSC_TRUE;
367: lu->matstruc = SAME_NONZERO_PATTERN;
368: lu->CleanUpPastix = PETSC_TRUE;
369: PetscFunctionReturn(PETSC_SUCCESS);
370: }
372: /* Note the Petsc r and c permutations are ignored */
373: static PetscErrorCode MatLUFactorSymbolic_AIJPASTIX(Mat F, Mat A, IS r, IS c, const MatFactorInfo *info)
374: {
375: Mat_Pastix *lu = (Mat_Pastix *)F->data;
377: PetscFunctionBegin;
378: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
379: lu->iparm[IPARM_SYM] = API_SYM_YES;
380: lu->matstruc = DIFFERENT_NONZERO_PATTERN;
381: F->ops->lufactornumeric = MatFactorNumeric_PaStiX;
382: PetscFunctionReturn(PETSC_SUCCESS);
383: }
385: static PetscErrorCode MatCholeskyFactorSymbolic_SBAIJPASTIX(Mat F, Mat A, IS r, const MatFactorInfo *info)
386: {
387: Mat_Pastix *lu = (Mat_Pastix *)(F)->data;
389: PetscFunctionBegin;
390: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LLT;
391: lu->iparm[IPARM_SYM] = API_SYM_NO;
392: lu->matstruc = DIFFERENT_NONZERO_PATTERN;
393: (F)->ops->choleskyfactornumeric = MatFactorNumeric_PaStiX;
394: PetscFunctionReturn(PETSC_SUCCESS);
395: }
397: static PetscErrorCode MatView_PaStiX(Mat A, PetscViewer viewer)
398: {
399: PetscBool iascii;
400: PetscViewerFormat format;
402: PetscFunctionBegin;
403: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
404: if (iascii) {
405: PetscCall(PetscViewerGetFormat(viewer, &format));
406: if (format == PETSC_VIEWER_ASCII_INFO) {
407: Mat_Pastix *lu = (Mat_Pastix *)A->data;
409: PetscCall(PetscViewerASCIIPrintf(viewer, "PaStiX run parameters:\n"));
410: PetscCall(PetscViewerASCIIPrintf(viewer, " Matrix type : %s \n", ((lu->iparm[IPARM_SYM] == API_SYM_YES) ? "Symmetric" : "Unsymmetric")));
411: PetscCall(PetscViewerASCIIPrintf(viewer, " Level of printing (0,1,2): %" PetscInt_FMT " \n", lu->iparm[IPARM_VERBOSE]));
412: PetscCall(PetscViewerASCIIPrintf(viewer, " Number of refinements iterations : %" PetscInt_FMT " \n", lu->iparm[IPARM_NBITER]));
413: PetscCall(PetscPrintf(PETSC_COMM_SELF, " Error : %g \n", lu->dparm[DPARM_RELATIVE_ERROR]));
414: }
415: }
416: PetscFunctionReturn(PETSC_SUCCESS);
417: }
419: /*MC
420: MATSOLVERPASTIX - A solver package providing direct solvers (LU) for distributed
421: and sequential matrices via the external package PaStiX.
423: Use `./configure` `--download-pastix` `--download-ptscotch` to have PETSc installed with PasTiX
425: Use `-pc_type lu` `-pc_factor_mat_solver_type pastix` to use this direct solver
427: Options Database Keys:
428: + -mat_pastix_verbose <0,1,2> - print level of information messages from PaStiX
429: - -mat_pastix_threadnbr <integer> - Set the number of threads for each MPI process
431: Notes:
432: This only works for matrices with symmetric nonzero structure, if you pass it a matrix with
433: nonsymmetric structure PasTiX, and hence, PETSc return with an error.
435: Level: beginner
437: .seealso: [](ch_matrices), `Mat`, `PCFactorSetMatSolverType()`, `MatSolverType`, `MatGetFactor()`
438: M*/
440: static PetscErrorCode MatGetInfo_PaStiX(Mat A, MatInfoType flag, MatInfo *info)
441: {
442: Mat_Pastix *lu = (Mat_Pastix *)A->data;
444: PetscFunctionBegin;
445: info->block_size = 1.0;
446: info->nz_allocated = lu->iparm[IPARM_NNZEROS];
447: info->nz_used = lu->iparm[IPARM_NNZEROS];
448: info->nz_unneeded = 0.0;
449: info->assemblies = 0.0;
450: info->mallocs = 0.0;
451: info->memory = 0.0;
452: info->fill_ratio_given = 0;
453: info->fill_ratio_needed = 0;
454: info->factor_mallocs = 0;
455: PetscFunctionReturn(PETSC_SUCCESS);
456: }
458: static PetscErrorCode MatFactorGetSolverType_pastix(Mat A, MatSolverType *type)
459: {
460: PetscFunctionBegin;
461: *type = MATSOLVERPASTIX;
462: PetscFunctionReturn(PETSC_SUCCESS);
463: }
465: /*
466: The seq and mpi versions of this function are the same
467: */
468: static PetscErrorCode MatGetFactor_seqaij_pastix(Mat A, MatFactorType ftype, Mat *F)
469: {
470: Mat B;
471: Mat_Pastix *pastix;
473: PetscFunctionBegin;
474: PetscCheck(ftype == MAT_FACTOR_LU, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
475: /* Create the factorization matrix */
476: PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
477: PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
478: PetscCall(PetscStrallocpy("pastix", &((PetscObject)B)->type_name));
479: PetscCall(MatSetUp(B));
481: B->trivialsymbolic = PETSC_TRUE;
482: B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
483: B->ops->view = MatView_PaStiX;
484: B->ops->getinfo = MatGetInfo_PaStiX;
486: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_pastix));
488: B->factortype = MAT_FACTOR_LU;
490: /* set solvertype */
491: PetscCall(PetscFree(B->solvertype));
492: PetscCall(PetscStrallocpy(MATSOLVERPASTIX, &B->solvertype));
494: PetscCall(PetscNew(&pastix));
496: pastix->CleanUpPastix = PETSC_FALSE;
497: pastix->scat_rhs = NULL;
498: pastix->scat_sol = NULL;
499: B->ops->getinfo = MatGetInfo_External;
500: B->ops->destroy = MatDestroy_Pastix;
501: B->data = (void *)pastix;
503: *F = B;
504: PetscFunctionReturn(PETSC_SUCCESS);
505: }
507: static PetscErrorCode MatGetFactor_mpiaij_pastix(Mat A, MatFactorType ftype, Mat *F)
508: {
509: Mat B;
510: Mat_Pastix *pastix;
512: PetscFunctionBegin;
513: PetscCheck(ftype == MAT_FACTOR_LU, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
514: /* Create the factorization matrix */
515: PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
516: PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
517: PetscCall(PetscStrallocpy("pastix", &((PetscObject)B)->type_name));
518: PetscCall(MatSetUp(B));
520: B->trivialsymbolic = PETSC_TRUE;
521: B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
522: B->ops->view = MatView_PaStiX;
523: B->ops->getinfo = MatGetInfo_PaStiX;
524: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_pastix));
526: B->factortype = MAT_FACTOR_LU;
528: /* set solvertype */
529: PetscCall(PetscFree(B->solvertype));
530: PetscCall(PetscStrallocpy(MATSOLVERPASTIX, &B->solvertype));
532: PetscCall(PetscNew(&pastix));
534: pastix->CleanUpPastix = PETSC_FALSE;
535: pastix->scat_rhs = NULL;
536: pastix->scat_sol = NULL;
537: B->ops->getinfo = MatGetInfo_External;
538: B->ops->destroy = MatDestroy_Pastix;
539: B->data = (void *)pastix;
541: *F = B;
542: PetscFunctionReturn(PETSC_SUCCESS);
543: }
545: static PetscErrorCode MatGetFactor_seqsbaij_pastix(Mat A, MatFactorType ftype, Mat *F)
546: {
547: Mat B;
548: Mat_Pastix *pastix;
550: PetscFunctionBegin;
551: PetscCheck(ftype == MAT_FACTOR_CHOLESKY, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
552: /* Create the factorization matrix */
553: PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
554: PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
555: PetscCall(PetscStrallocpy("pastix", &((PetscObject)B)->type_name));
556: PetscCall(MatSetUp(B));
558: B->trivialsymbolic = PETSC_TRUE;
559: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
560: B->ops->view = MatView_PaStiX;
561: B->ops->getinfo = MatGetInfo_PaStiX;
562: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_pastix));
564: B->factortype = MAT_FACTOR_CHOLESKY;
566: /* set solvertype */
567: PetscCall(PetscFree(B->solvertype));
568: PetscCall(PetscStrallocpy(MATSOLVERPASTIX, &B->solvertype));
570: PetscCall(PetscNew(&pastix));
572: pastix->CleanUpPastix = PETSC_FALSE;
573: pastix->scat_rhs = NULL;
574: pastix->scat_sol = NULL;
575: B->ops->getinfo = MatGetInfo_External;
576: B->ops->destroy = MatDestroy_Pastix;
577: B->data = (void *)pastix;
578: *F = B;
579: PetscFunctionReturn(PETSC_SUCCESS);
580: }
582: static PetscErrorCode MatGetFactor_mpisbaij_pastix(Mat A, MatFactorType ftype, Mat *F)
583: {
584: Mat B;
585: Mat_Pastix *pastix;
587: PetscFunctionBegin;
588: PetscCheck(ftype == MAT_FACTOR_CHOLESKY, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
590: /* Create the factorization matrix */
591: PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
592: PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
593: PetscCall(PetscStrallocpy("pastix", &((PetscObject)B)->type_name));
594: PetscCall(MatSetUp(B));
596: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
597: B->ops->view = MatView_PaStiX;
598: B->ops->getinfo = MatGetInfo_PaStiX;
599: B->ops->destroy = MatDestroy_Pastix;
600: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_pastix));
602: B->factortype = MAT_FACTOR_CHOLESKY;
604: /* set solvertype */
605: PetscCall(PetscFree(B->solvertype));
606: PetscCall(PetscStrallocpy(MATSOLVERPASTIX, &B->solvertype));
608: PetscCall(PetscNew(&pastix));
610: pastix->CleanUpPastix = PETSC_FALSE;
611: pastix->scat_rhs = NULL;
612: pastix->scat_sol = NULL;
613: B->data = (void *)pastix;
615: *F = B;
616: PetscFunctionReturn(PETSC_SUCCESS);
617: }
619: PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_Pastix(void)
620: {
621: PetscFunctionBegin;
622: PetscCall(MatSolverTypeRegister(MATSOLVERPASTIX, MATMPIAIJ, MAT_FACTOR_LU, MatGetFactor_mpiaij_pastix));
623: PetscCall(MatSolverTypeRegister(MATSOLVERPASTIX, MATSEQAIJ, MAT_FACTOR_LU, MatGetFactor_seqaij_pastix));
624: PetscCall(MatSolverTypeRegister(MATSOLVERPASTIX, MATMPISBAIJ, MAT_FACTOR_CHOLESKY, MatGetFactor_mpisbaij_pastix));
625: PetscCall(MatSolverTypeRegister(MATSOLVERPASTIX, MATSEQSBAIJ, MAT_FACTOR_CHOLESKY, MatGetFactor_seqsbaij_pastix));
626: PetscFunctionReturn(PETSC_SUCCESS);
627: }