Actual source code: superlu_dist.c
petsc-3.13.6 2020-09-29
1: /*
2: Provides an interface to the SuperLU_DIST sparse solver
3: */
5: #include <../src/mat/impls/aij/seq/aij.h>
6: #include <../src/mat/impls/aij/mpi/mpiaij.h>
7: #include <petscpkg_version.h>
9: EXTERN_C_BEGIN
10: #if defined(PETSC_USE_COMPLEX)
11: #include <superlu_zdefs.h>
12: #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(6,3,0)
13: #define LUstructInit zLUstructInit
14: #define ScalePermstructInit zScalePermstructInit
15: #define ScalePermstructFree zScalePermstructFree
16: #define LUstructFree zLUstructFree
17: #define Destroy_LU zDestroy_LU
18: #define ScalePermstruct_t zScalePermstruct_t
19: #define LUstruct_t zLUstruct_t
20: #define SOLVEstruct_t zSOLVEstruct_t
21: #endif
22: #else
23: #include <superlu_ddefs.h>
24: #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(6,3,0)
25: #define LUstructInit dLUstructInit
26: #define ScalePermstructInit dScalePermstructInit
27: #define ScalePermstructFree dScalePermstructFree
28: #define LUstructFree dLUstructFree
29: #define Destroy_LU dDestroy_LU
30: #define ScalePermstruct_t dScalePermstruct_t
31: #define LUstruct_t dLUstruct_t
32: #define SOLVEstruct_t dSOLVEstruct_t
33: #endif
34: #endif
35: EXTERN_C_END
37: typedef struct {
38: int_t nprow,npcol,*row,*col;
39: gridinfo_t grid;
40: superlu_dist_options_t options;
41: SuperMatrix A_sup;
42: ScalePermstruct_t ScalePermstruct;
43: LUstruct_t LUstruct;
44: int StatPrint;
45: SOLVEstruct_t SOLVEstruct;
46: fact_t FactPattern;
47: MPI_Comm comm_superlu;
48: #if defined(PETSC_USE_COMPLEX)
49: doublecomplex *val;
50: #else
51: double *val;
52: #endif
53: PetscBool matsolve_iscalled,matmatsolve_iscalled;
54: PetscBool CleanUpSuperLU_Dist; /* Flag to clean up (non-global) SuperLU objects during Destroy */
55: } Mat_SuperLU_DIST;
58: PetscErrorCode MatSuperluDistGetDiagU_SuperLU_DIST(Mat F,PetscScalar *diagU)
59: {
60: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->data;
63: #if defined(PETSC_USE_COMPLEX)
64: PetscStackCall("SuperLU_DIST:pzGetDiagU",pzGetDiagU(F->rmap->N,&lu->LUstruct,&lu->grid,(doublecomplex*)diagU));
65: #else
66: PetscStackCall("SuperLU_DIST:pdGetDiagU",pdGetDiagU(F->rmap->N,&lu->LUstruct,&lu->grid,diagU));
67: #endif
68: return(0);
69: }
71: PetscErrorCode MatSuperluDistGetDiagU(Mat F,PetscScalar *diagU)
72: {
77: PetscTryMethod(F,"MatSuperluDistGetDiagU_C",(Mat,PetscScalar*),(F,diagU));
78: return(0);
79: }
81: static PetscErrorCode MatDestroy_SuperLU_DIST(Mat A)
82: {
83: PetscErrorCode ierr;
84: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data;
87: if (lu->CleanUpSuperLU_Dist) {
88: /* Deallocate SuperLU_DIST storage */
89: PetscStackCall("SuperLU_DIST:Destroy_CompRowLoc_Matrix_dist",Destroy_CompRowLoc_Matrix_dist(&lu->A_sup));
90: if (lu->options.SolveInitialized) {
91: #if defined(PETSC_USE_COMPLEX)
92: PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
93: #else
94: PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
95: #endif
96: }
97: PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(A->cmap->N, &lu->grid, &lu->LUstruct));
98: PetscStackCall("SuperLU_DIST:ScalePermstructFree",ScalePermstructFree(&lu->ScalePermstruct));
99: PetscStackCall("SuperLU_DIST:LUstructFree",LUstructFree(&lu->LUstruct));
101: /* Release the SuperLU_DIST process grid. */
102: PetscStackCall("SuperLU_DIST:superlu_gridexit",superlu_gridexit(&lu->grid));
103: MPI_Comm_free(&(lu->comm_superlu));
104: }
105: PetscFree(A->data);
106: /* clear composed functions */
107: PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverType_C",NULL);
108: PetscObjectComposeFunction((PetscObject)A,"MatSuperluDistGetDiagU_C",NULL);
110: return(0);
111: }
113: static PetscErrorCode MatSolve_SuperLU_DIST(Mat A,Vec b_mpi,Vec x)
114: {
115: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data;
116: PetscErrorCode ierr;
117: PetscInt m=A->rmap->n;
118: SuperLUStat_t stat;
119: double berr[1];
120: PetscScalar *bptr=NULL;
121: int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
122: static PetscBool cite = PETSC_FALSE;
125: if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact must equal FACTORED");
126: PetscCitationsRegister("@article{lidemmel03,\n author = {Xiaoye S. Li and James W. Demmel},\n title = {{SuperLU_DIST}: A Scalable Distributed-Memory Sparse Direct\n Solver for Unsymmetric Linear Systems},\n journal = {ACM Trans. Mathematical Software},\n volume = {29},\n number = {2},\n pages = {110-140},\n year = 2003\n}\n",&cite);
128: if (lu->options.SolveInitialized && !lu->matsolve_iscalled) {
129: /* see comments in MatMatSolve() */
130: #if defined(PETSC_USE_COMPLEX)
131: PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
132: #else
133: PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
134: #endif
135: lu->options.SolveInitialized = NO;
136: }
137: VecCopy(b_mpi,x);
138: VecGetArray(x,&bptr);
140: PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */
141: #if defined(PETSC_USE_COMPLEX)
142: PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,1,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
143: #else
144: PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,1,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
145: #endif
146: if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);
148: if (lu->options.PrintStat) PetscStackCall("SuperLU_DIST:PStatPrint",PStatPrint(&lu->options, &stat, &lu->grid)); /* Print the statistics. */
149: PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
151: VecRestoreArray(x,&bptr);
152: lu->matsolve_iscalled = PETSC_TRUE;
153: lu->matmatsolve_iscalled = PETSC_FALSE;
154: return(0);
155: }
157: static PetscErrorCode MatMatSolve_SuperLU_DIST(Mat A,Mat B_mpi,Mat X)
158: {
159: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data;
160: PetscErrorCode ierr;
161: PetscInt m=A->rmap->n,nrhs;
162: SuperLUStat_t stat;
163: double berr[1];
164: PetscScalar *bptr;
165: int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
166: PetscBool flg;
169: if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact must equal FACTORED");
170: PetscObjectTypeCompareAny((PetscObject)B_mpi,&flg,MATSEQDENSE,MATMPIDENSE,NULL);
171: if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix");
172: if (X != B_mpi) {
173: PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);
174: if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix");
175: }
177: if (lu->options.SolveInitialized && !lu->matmatsolve_iscalled) {
178: /* communication pattern of SOLVEstruct is unlikely created for matmatsolve,
179: thus destroy it and create a new SOLVEstruct.
180: Otherwise it may result in memory corruption or incorrect solution
181: See src/mat/tests/ex125.c */
182: #if defined(PETSC_USE_COMPLEX)
183: PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
184: #else
185: PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
186: #endif
187: lu->options.SolveInitialized = NO;
188: }
189: if (X != B_mpi) {
190: MatCopy(B_mpi,X,SAME_NONZERO_PATTERN);
191: }
193: MatGetSize(B_mpi,NULL,&nrhs);
195: PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */
196: MatDenseGetArray(X,&bptr);
198: #if defined(PETSC_USE_COMPLEX)
199: PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid, &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
200: #else
201: PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
202: #endif
204: if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);
205: MatDenseRestoreArray(X,&bptr);
207: if (lu->options.PrintStat) PetscStackCall("SuperLU_DIST:PStatPrint",PStatPrint(&lu->options, &stat, &lu->grid)); /* Print the statistics. */
208: PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
209: lu->matsolve_iscalled = PETSC_FALSE;
210: lu->matmatsolve_iscalled = PETSC_TRUE;
211: return(0);
212: }
214: /*
215: input:
216: F: numeric Cholesky factor
217: output:
218: nneg: total number of negative pivots
219: nzero: total number of zero pivots
220: npos: (global dimension of F) - nneg - nzero
221: */
222: static PetscErrorCode MatGetInertia_SuperLU_DIST(Mat F,PetscInt *nneg,PetscInt *nzero,PetscInt *npos)
223: {
224: PetscErrorCode ierr;
225: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->data;
226: PetscScalar *diagU=NULL;
227: PetscInt M,i,neg=0,zero=0,pos=0;
228: PetscReal r;
231: if (!F->assembled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Matrix factor F is not assembled");
232: if (lu->options.RowPerm != NOROWPERM) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Must set NOROWPERM");
233: MatGetSize(F,&M,NULL);
234: PetscMalloc1(M,&diagU);
235: MatSuperluDistGetDiagU(F,diagU);
236: for (i=0; i<M; i++) {
237: #if defined(PETSC_USE_COMPLEX)
238: r = PetscImaginaryPart(diagU[i])/10.0;
239: if (r< -PETSC_MACHINE_EPSILON || r>PETSC_MACHINE_EPSILON) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"diagU[%d]=%g + i %g is non-real",i,PetscRealPart(diagU[i]),r*10.0);
240: r = PetscRealPart(diagU[i]);
241: #else
242: r = diagU[i];
243: #endif
244: if (r > 0) {
245: pos++;
246: } else if (r < 0) {
247: neg++;
248: } else zero++;
249: }
251: PetscFree(diagU);
252: if (nneg) *nneg = neg;
253: if (nzero) *nzero = zero;
254: if (npos) *npos = pos;
255: return(0);
256: }
258: static PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F,Mat A,const MatFactorInfo *info)
259: {
260: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->data;
261: Mat Aloc;
262: const PetscScalar *av;
263: const PetscInt *ai=NULL,*aj=NULL;
264: PetscInt nz,dummy;
265: int sinfo; /* SuperLU_Dist info flag is always an int even with long long indices */
266: SuperLUStat_t stat;
267: double *berr=0;
268: PetscBool ismpiaij,isseqaij,flg;
269: PetscErrorCode ierr;
272: PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);
273: PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);
274: if (ismpiaij) {
275: MatMPIAIJGetLocalMat(A,MAT_INITIAL_MATRIX,&Aloc);
276: } else if (isseqaij) {
277: PetscObjectReference((PetscObject)A);
278: Aloc = A;
279: } else SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Not for type %s",((PetscObject)A)->type_name);
281: MatGetRowIJ(Aloc,0,PETSC_FALSE,PETSC_FALSE,&dummy,&ai,&aj,&flg);
282: if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"GetRowIJ failed");
283: MatSeqAIJGetArrayRead(Aloc,&av);
284: nz = ai[Aloc->rmap->n];
286: /* Allocations for A_sup */
287: if (lu->options.Fact == DOFACT) { /* first numeric factorization */
288: #if defined(PETSC_USE_COMPLEX)
289: PetscStackCall("SuperLU_DIST:zallocateA_dist",zallocateA_dist(Aloc->rmap->n, nz, &lu->val, &lu->col, &lu->row));
290: #else
291: PetscStackCall("SuperLU_DIST:dallocateA_dist",dallocateA_dist(Aloc->rmap->n, nz, &lu->val, &lu->col, &lu->row));
292: #endif
293: } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */
294: if (lu->FactPattern == SamePattern_SameRowPerm) {
295: lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
296: } else if (lu->FactPattern == SamePattern) {
297: PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(A->rmap->N, &lu->grid, &lu->LUstruct)); /* Deallocate L and U matrices. */
298: lu->options.Fact = SamePattern;
299: } else if (lu->FactPattern == DOFACT) {
300: PetscStackCall("SuperLU_DIST:Destroy_CompRowLoc_Matrix_dist",Destroy_CompRowLoc_Matrix_dist(&lu->A_sup));
301: PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(A->rmap->N, &lu->grid, &lu->LUstruct));
302: lu->options.Fact = DOFACT;
304: #if defined(PETSC_USE_COMPLEX)
305: PetscStackCall("SuperLU_DIST:zallocateA_dist",zallocateA_dist(Aloc->rmap->n, nz, &lu->val, &lu->col, &lu->row));
306: #else
307: PetscStackCall("SuperLU_DIST:dallocateA_dist",dallocateA_dist(Aloc->rmap->n, nz, &lu->val, &lu->col, &lu->row));
308: #endif
309: } else {
310: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"options.Fact must be one of SamePattern SamePattern_SameRowPerm DOFACT");
311: }
312: }
314: /* Copy AIJ matrix to superlu_dist matrix */
315: PetscArraycpy(lu->row,ai,Aloc->rmap->n+1);
316: PetscArraycpy(lu->col,aj,nz);
317: PetscArraycpy(lu->val,av,nz);
318: MatRestoreRowIJ(Aloc,0,PETSC_FALSE,PETSC_FALSE,&dummy,&ai,&aj,&flg);
319: if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"RestoreRowIJ failed");
320: MatSeqAIJRestoreArrayRead(Aloc,&av);
321: MatDestroy(&Aloc);
323: /* Create and setup A_sup */
324: if (lu->options.Fact == DOFACT) {
325: #if defined(PETSC_USE_COMPLEX)
326: PetscStackCall("SuperLU_DIST:zCreate_CompRowLoc_Matrix_dist",zCreate_CompRowLoc_Matrix_dist(&lu->A_sup, A->rmap->N, A->cmap->N, nz, A->rmap->n, A->rmap->rstart, lu->val, lu->col, lu->row, SLU_NR_loc, SLU_Z, SLU_GE));
327: #else
328: PetscStackCall("SuperLU_DIST:dCreate_CompRowLoc_Matrix_dist",dCreate_CompRowLoc_Matrix_dist(&lu->A_sup, A->rmap->N, A->cmap->N, nz, A->rmap->n, A->rmap->rstart, lu->val, lu->col, lu->row, SLU_NR_loc, SLU_D, SLU_GE));
329: #endif
330: }
332: /* Factor the matrix. */
333: PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */
334: #if defined(PETSC_USE_COMPLEX)
335: PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, A->rmap->n, 0, &lu->grid, &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo));
336: #else
337: PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, A->rmap->n, 0, &lu->grid, &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo));
338: #endif
340: if (sinfo > 0) {
341: if (A->erroriffailure) {
342: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_MAT_LU_ZRPVT,"Zero pivot in row %D",sinfo);
343: } else {
344: if (sinfo <= lu->A_sup.ncol) {
345: F->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT;
346: PetscInfo1(F,"U(i,i) is exactly zero, i= %D\n",sinfo);
347: } else if (sinfo > lu->A_sup.ncol) {
348: /*
349: number of bytes allocated when memory allocation
350: failure occurred, plus A->ncol.
351: */
352: F->factorerrortype = MAT_FACTOR_OUTMEMORY;
353: PetscInfo1(F,"Number of bytes allocated when memory allocation fails %D\n",sinfo);
354: }
355: }
356: } else if (sinfo < 0) {
357: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB, "info = %D, argument in p*gssvx() had an illegal value", sinfo);
358: }
360: if (lu->options.PrintStat) {
361: PetscStackCall("SuperLU_DIST:PStatPrint",PStatPrint(&lu->options, &stat, &lu->grid)); /* Print the statistics. */
362: }
363: PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
364: F->assembled = PETSC_TRUE;
365: F->preallocated = PETSC_TRUE;
366: lu->options.Fact = FACTORED; /* The factored form of A is supplied. Local option used by this func. only */
367: return(0);
368: }
370: /* Note the Petsc r and c permutations are ignored */
371: static PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
372: {
373: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->data;
374: PetscInt M = A->rmap->N,N=A->cmap->N;
377: /* Initialize the SuperLU process grid. */
378: PetscStackCall("SuperLU_DIST:superlu_gridinit",superlu_gridinit(lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid));
380: /* Initialize ScalePermstruct and LUstruct. */
381: PetscStackCall("SuperLU_DIST:ScalePermstructInit",ScalePermstructInit(M, N, &lu->ScalePermstruct));
382: PetscStackCall("SuperLU_DIST:LUstructInit",LUstructInit(N, &lu->LUstruct));
383: F->ops->lufactornumeric = MatLUFactorNumeric_SuperLU_DIST;
384: F->ops->solve = MatSolve_SuperLU_DIST;
385: F->ops->matsolve = MatMatSolve_SuperLU_DIST;
386: F->ops->getinertia = NULL;
388: if (A->symmetric || A->hermitian) F->ops->getinertia = MatGetInertia_SuperLU_DIST;
389: lu->CleanUpSuperLU_Dist = PETSC_TRUE;
390: return(0);
391: }
393: static PetscErrorCode MatCholeskyFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,const MatFactorInfo *info)
394: {
398: MatLUFactorSymbolic_SuperLU_DIST(F,A,r,r,info);
399: F->ops->choleskyfactornumeric = MatLUFactorNumeric_SuperLU_DIST;
400: return(0);
401: }
403: static PetscErrorCode MatFactorGetSolverType_aij_superlu_dist(Mat A,MatSolverType *type)
404: {
406: *type = MATSOLVERSUPERLU_DIST;
407: return(0);
408: }
410: static PetscErrorCode MatView_Info_SuperLU_DIST(Mat A,PetscViewer viewer)
411: {
412: Mat_SuperLU_DIST *lu=(Mat_SuperLU_DIST*)A->data;
413: superlu_dist_options_t options;
414: PetscErrorCode ierr;
417: /* check if matrix is superlu_dist type */
418: if (A->ops->solve != MatSolve_SuperLU_DIST) return(0);
420: options = lu->options;
421: PetscViewerASCIIPrintf(viewer,"SuperLU_DIST run parameters:\n");
422: PetscViewerASCIIPrintf(viewer," Process grid nprow %D x npcol %D \n",lu->nprow,lu->npcol);
423: PetscViewerASCIIPrintf(viewer," Equilibrate matrix %s \n",PetscBools[options.Equil != NO]);
424: PetscViewerASCIIPrintf(viewer," Replace tiny pivots %s \n",PetscBools[options.ReplaceTinyPivot != NO]);
425: PetscViewerASCIIPrintf(viewer," Use iterative refinement %s \n",PetscBools[options.IterRefine == SLU_DOUBLE]);
426: PetscViewerASCIIPrintf(viewer," Processors in row %d col partition %d \n",lu->nprow,lu->npcol);
428: switch (options.RowPerm) {
429: case NOROWPERM:
430: PetscViewerASCIIPrintf(viewer," Row permutation NOROWPERM\n");
431: break;
432: case LargeDiag_MC64:
433: PetscViewerASCIIPrintf(viewer," Row permutation LargeDiag_MC64\n");
434: break;
435: case LargeDiag_AWPM:
436: PetscViewerASCIIPrintf(viewer," Row permutation LargeDiag_AWPM\n");
437: break;
438: case MY_PERMR:
439: PetscViewerASCIIPrintf(viewer," Row permutation MY_PERMR\n");
440: break;
441: default:
442: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
443: }
445: switch (options.ColPerm) {
446: case NATURAL:
447: PetscViewerASCIIPrintf(viewer," Column permutation NATURAL\n");
448: break;
449: case MMD_AT_PLUS_A:
450: PetscViewerASCIIPrintf(viewer," Column permutation MMD_AT_PLUS_A\n");
451: break;
452: case MMD_ATA:
453: PetscViewerASCIIPrintf(viewer," Column permutation MMD_ATA\n");
454: break;
455: /* Even though this is called METIS, the SuperLU_DIST code sets this by default if PARMETIS is defined, not METIS */
456: case METIS_AT_PLUS_A:
457: PetscViewerASCIIPrintf(viewer," Column permutation METIS_AT_PLUS_A\n");
458: break;
459: case PARMETIS:
460: PetscViewerASCIIPrintf(viewer," Column permutation PARMETIS\n");
461: break;
462: default:
463: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
464: }
466: PetscViewerASCIIPrintf(viewer," Parallel symbolic factorization %s \n",PetscBools[options.ParSymbFact != NO]);
468: if (lu->FactPattern == SamePattern) {
469: PetscViewerASCIIPrintf(viewer," Repeated factorization SamePattern\n");
470: } else if (lu->FactPattern == SamePattern_SameRowPerm) {
471: PetscViewerASCIIPrintf(viewer," Repeated factorization SamePattern_SameRowPerm\n");
472: } else if (lu->FactPattern == DOFACT) {
473: PetscViewerASCIIPrintf(viewer," Repeated factorization DOFACT\n");
474: } else {
475: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown factorization pattern");
476: }
477: return(0);
478: }
480: static PetscErrorCode MatView_SuperLU_DIST(Mat A,PetscViewer viewer)
481: {
482: PetscErrorCode ierr;
483: PetscBool iascii;
484: PetscViewerFormat format;
487: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
488: if (iascii) {
489: PetscViewerGetFormat(viewer,&format);
490: if (format == PETSC_VIEWER_ASCII_INFO) {
491: MatView_Info_SuperLU_DIST(A,viewer);
492: }
493: }
494: return(0);
495: }
497: static PetscErrorCode MatGetFactor_aij_superlu_dist(Mat A,MatFactorType ftype,Mat *F)
498: {
499: Mat B;
500: Mat_SuperLU_DIST *lu;
501: PetscErrorCode ierr;
502: PetscInt M=A->rmap->N,N=A->cmap->N,indx;
503: PetscMPIInt size;
504: superlu_dist_options_t options;
505: PetscBool flg;
506: const char *colperm[] = {"NATURAL","MMD_AT_PLUS_A","MMD_ATA","METIS_AT_PLUS_A","PARMETIS"};
507: const char *rowperm[] = {"NOROWPERM","LargeDiag_MC64","LargeDiag_AWPM","MY_PERMR"};
508: const char *factPattern[] = {"SamePattern","SamePattern_SameRowPerm","DOFACT"};
509: PetscBool set;
512: /* Create the factorization matrix */
513: MatCreate(PetscObjectComm((PetscObject)A),&B);
514: MatSetSizes(B,A->rmap->n,A->cmap->n,M,N);
515: PetscStrallocpy("superlu_dist",&((PetscObject)B)->type_name);
516: MatSetUp(B);
517: B->ops->getinfo = MatGetInfo_External;
518: B->ops->view = MatView_SuperLU_DIST;
519: B->ops->destroy = MatDestroy_SuperLU_DIST;
521: /* Set the default input options:
522: options.Fact = DOFACT;
523: options.Equil = YES;
524: options.ParSymbFact = NO;
525: options.ColPerm = METIS_AT_PLUS_A;
526: options.RowPerm = LargeDiag_MC64;
527: options.ReplaceTinyPivot = YES;
528: options.IterRefine = DOUBLE;
529: options.Trans = NOTRANS;
530: options.SolveInitialized = NO; -hold the communication pattern used MatSolve() and MatMatSolve()
531: options.RefineInitialized = NO;
532: options.PrintStat = YES;
533: options.SymPattern = NO;
534: */
535: set_default_options_dist(&options);
537: if (ftype == MAT_FACTOR_LU) {
538: B->factortype = MAT_FACTOR_LU;
539: B->ops->lufactorsymbolic = MatLUFactorSymbolic_SuperLU_DIST;
540: } else {
541: B->factortype = MAT_FACTOR_CHOLESKY;
542: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SuperLU_DIST;
543: options.SymPattern = YES;
544: }
546: /* set solvertype */
547: PetscFree(B->solvertype);
548: PetscStrallocpy(MATSOLVERSUPERLU_DIST,&B->solvertype);
550: PetscNewLog(B,&lu);
551: B->data = lu;
553: MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(lu->comm_superlu));
554: MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
555: /* Default num of process columns and rows */
556: lu->nprow = (int_t) (0.5 + PetscSqrtReal((PetscReal)size));
557: if (!lu->nprow) lu->nprow = 1;
558: while (lu->nprow > 0) {
559: lu->npcol = (int_t) (size/lu->nprow);
560: if (size == lu->nprow * lu->npcol) break;
561: lu->nprow--;
562: }
564: PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"SuperLU_Dist Options","Mat");
565: PetscOptionsInt("-mat_superlu_dist_r","Number rows in processor partition","None",lu->nprow,(PetscInt*)&lu->nprow,NULL);
566: PetscOptionsInt("-mat_superlu_dist_c","Number columns in processor partition","None",lu->npcol,(PetscInt*)&lu->npcol,NULL);
567: if (size != lu->nprow * lu->npcol) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Number of processes %d must equal to nprow %d * npcol %d",size,lu->nprow,lu->npcol);
569: PetscOptionsBool("-mat_superlu_dist_equil","Equilibrate matrix","None",options.Equil ? PETSC_TRUE : PETSC_FALSE,&flg,&set);
570: if (set && !flg) options.Equil = NO;
572: PetscOptionsEList("-mat_superlu_dist_rowperm","Row permutation","None",rowperm,4,rowperm[1],&indx,&flg);
573: if (flg) {
574: switch (indx) {
575: case 0:
576: options.RowPerm = NOROWPERM;
577: break;
578: case 1:
579: options.RowPerm = LargeDiag_MC64;
580: break;
581: case 2:
582: options.RowPerm = LargeDiag_AWPM;
583: break;
584: case 3:
585: options.RowPerm = MY_PERMR;
586: break;
587: default:
588: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown row permutation");
589: }
590: }
592: PetscOptionsEList("-mat_superlu_dist_colperm","Column permutation","None",colperm,5,colperm[3],&indx,&flg);
593: if (flg) {
594: switch (indx) {
595: case 0:
596: options.ColPerm = NATURAL;
597: break;
598: case 1:
599: options.ColPerm = MMD_AT_PLUS_A;
600: break;
601: case 2:
602: options.ColPerm = MMD_ATA;
603: break;
604: case 3:
605: options.ColPerm = METIS_AT_PLUS_A;
606: break;
607: case 4:
608: options.ColPerm = PARMETIS; /* only works for np>1 */
609: break;
610: default:
611: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
612: }
613: }
615: options.ReplaceTinyPivot = NO;
616: PetscOptionsBool("-mat_superlu_dist_replacetinypivot","Replace tiny pivots","None",options.ReplaceTinyPivot ? PETSC_TRUE : PETSC_FALSE,&flg,&set);
617: if (set && flg) options.ReplaceTinyPivot = YES;
619: options.ParSymbFact = NO;
620: PetscOptionsBool("-mat_superlu_dist_parsymbfact","Parallel symbolic factorization","None",PETSC_FALSE,&flg,&set);
621: if (set && flg && size>1) {
622: #if defined(PETSC_HAVE_PARMETIS)
623: options.ParSymbFact = YES;
624: options.ColPerm = PARMETIS; /* in v2.2, PARMETIS is forced for ParSymbFact regardless of user ordering setting */
625: #else
626: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"parsymbfact needs PARMETIS");
627: #endif
628: }
630: lu->FactPattern = SamePattern;
631: PetscOptionsEList("-mat_superlu_dist_fact","Sparsity pattern for repeated matrix factorization","None",factPattern,3,factPattern[0],&indx,&flg);
632: if (flg) {
633: switch (indx) {
634: case 0:
635: lu->FactPattern = SamePattern;
636: break;
637: case 1:
638: lu->FactPattern = SamePattern_SameRowPerm;
639: break;
640: case 2:
641: lu->FactPattern = DOFACT;
642: break;
643: }
644: }
646: options.IterRefine = NOREFINE;
647: PetscOptionsBool("-mat_superlu_dist_iterrefine","Use iterative refinement","None",options.IterRefine == NOREFINE ? PETSC_FALSE : PETSC_TRUE ,&flg,&set);
648: if (set) {
649: if (flg) options.IterRefine = SLU_DOUBLE;
650: else options.IterRefine = NOREFINE;
651: }
653: if (PetscLogPrintInfo) options.PrintStat = YES;
654: else options.PrintStat = NO;
655: PetscOptionsBool("-mat_superlu_dist_statprint","Print factorization information","None",(PetscBool)options.PrintStat,(PetscBool*)&options.PrintStat,NULL);
656: PetscOptionsEnd();
658: lu->options = options;
659: lu->options.Fact = DOFACT;
660: lu->matsolve_iscalled = PETSC_FALSE;
661: lu->matmatsolve_iscalled = PETSC_FALSE;
663: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_aij_superlu_dist);
664: PetscObjectComposeFunction((PetscObject)B,"MatSuperluDistGetDiagU_C",MatSuperluDistGetDiagU_SuperLU_DIST);
666: *F = B;
667: return(0);
668: }
670: PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_SuperLU_DIST(void)
671: {
674: MatSolverTypeRegister(MATSOLVERSUPERLU_DIST,MATMPIAIJ,MAT_FACTOR_LU,MatGetFactor_aij_superlu_dist);
675: MatSolverTypeRegister(MATSOLVERSUPERLU_DIST,MATSEQAIJ,MAT_FACTOR_LU,MatGetFactor_aij_superlu_dist);
676: MatSolverTypeRegister(MATSOLVERSUPERLU_DIST,MATMPIAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_superlu_dist);
677: MatSolverTypeRegister(MATSOLVERSUPERLU_DIST,MATSEQAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_superlu_dist);
678: return(0);
679: }
681: /*MC
682: MATSOLVERSUPERLU_DIST - Parallel direct solver package for LU factorization
684: Use ./configure --download-superlu_dist --download-parmetis --download-metis --download-ptscotch to have PETSc installed with SuperLU_DIST
686: Use -pc_type lu -pc_factor_mat_solver_type superlu_dist to use this direct solver
688: Works with AIJ matrices
690: Options Database Keys:
691: + -mat_superlu_dist_r <n> - number of rows in processor partition
692: . -mat_superlu_dist_c <n> - number of columns in processor partition
693: . -mat_superlu_dist_equil - equilibrate the matrix
694: . -mat_superlu_dist_rowperm <NOROWPERM,LargeDiag_MC64,LargeDiag_AWPM,MY_PERMR> - row permutation
695: . -mat_superlu_dist_colperm <NATURAL,MMD_AT_PLUS_A,MMD_ATA,METIS_AT_PLUS_A,PARMETIS> - column permutation
696: . -mat_superlu_dist_replacetinypivot - replace tiny pivots
697: . -mat_superlu_dist_fact <SamePattern> - (choose one of) SamePattern SamePattern_SameRowPerm DOFACT
698: . -mat_superlu_dist_iterrefine - use iterative refinement
699: - -mat_superlu_dist_statprint - print factorization information
701: Level: beginner
703: .seealso: PCLU
705: .seealso: PCFactorSetMatSolverType(), MatSolverType
707: M*/