Actual source code: superlu_dist.c
petsc-3.7.7 2017-09-25
2: /*
3: Provides an interface to the SuperLU_DIST_2.2 sparse solver
4: */
6: #include <../src/mat/impls/aij/seq/aij.h>
7: #include <../src/mat/impls/aij/mpi/mpiaij.h>
8: #if defined(PETSC_HAVE_STDLIB_H) /* This is to get around weird problem with SuperLU on cray */
9: #include <stdlib.h>
10: #endif
12: #if defined(PETSC_USE_64BIT_INDICES)
13: /* ugly SuperLU_Dist variable telling it to use long long int */
14: #define _LONGINT
15: #endif
17: EXTERN_C_BEGIN
18: #if defined(PETSC_USE_COMPLEX)
19: #include <superlu_zdefs.h>
20: #else
21: #include <superlu_ddefs.h>
22: #endif
23: EXTERN_C_END
25: /*
26: GLOBAL - The sparse matrix and right hand side are all stored initially on process 0. Should be called centralized
27: DISTRIBUTED - The sparse matrix and right hand size are initially stored across the entire MPI communicator.
28: */
29: typedef enum {GLOBAL,DISTRIBUTED} SuperLU_MatInputMode;
30: const char *SuperLU_MatInputModes[] = {"GLOBAL","DISTRIBUTED","SuperLU_MatInputMode","PETSC_",0};
32: typedef struct {
33: int_t nprow,npcol,*row,*col;
34: gridinfo_t grid;
35: superlu_dist_options_t options;
36: SuperMatrix A_sup;
37: ScalePermstruct_t ScalePermstruct;
38: LUstruct_t LUstruct;
39: int StatPrint;
40: SuperLU_MatInputMode MatInputMode;
41: SOLVEstruct_t SOLVEstruct;
42: fact_t FactPattern;
43: MPI_Comm comm_superlu;
44: #if defined(PETSC_USE_COMPLEX)
45: doublecomplex *val;
46: #else
47: double *val;
48: #endif
49: PetscBool matsolve_iscalled,matmatsolve_iscalled;
50: PetscBool CleanUpSuperLU_Dist; /* Flag to clean up (non-global) SuperLU objects during Destroy */
51: } Mat_SuperLU_DIST;
53: extern PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat,PetscViewer);
54: extern PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat,Mat,const MatFactorInfo*);
55: extern PetscErrorCode MatDestroy_SuperLU_DIST(Mat);
56: extern PetscErrorCode MatView_SuperLU_DIST(Mat,PetscViewer);
57: extern PetscErrorCode MatSolve_SuperLU_DIST(Mat,Vec,Vec);
58: extern PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat,Mat,IS,IS,const MatFactorInfo*);
59: extern PetscErrorCode MatDestroy_MPIAIJ(Mat);
63: PetscErrorCode MatGetDiagonal_SuperLU_DIST(Mat A,Vec v)
64: {
66: SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Mat type: SuperLU_DIST factor");
67: return(0);
68: }
72: PetscErrorCode MatDestroy_SuperLU_DIST(Mat A)
73: {
74: PetscErrorCode ierr;
75: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
76: PetscBool flg;
79: if (lu && lu->CleanUpSuperLU_Dist) {
80: /* Deallocate SuperLU_DIST storage */
81: if (lu->MatInputMode == GLOBAL) {
82: PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup));
83: } else {
84: PetscStackCall("SuperLU_DIST:Destroy_CompRowLoc_Matrix_dist",Destroy_CompRowLoc_Matrix_dist(&lu->A_sup));
85: if (lu->options.SolveInitialized) {
86: #if defined(PETSC_USE_COMPLEX)
87: PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
88: #else
89: PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
90: #endif
91: }
92: }
93: PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(A->cmap->N, &lu->grid, &lu->LUstruct));
94: PetscStackCall("SuperLU_DIST:ScalePermstructFree",ScalePermstructFree(&lu->ScalePermstruct));
95: PetscStackCall("SuperLU_DIST:LUstructFree",LUstructFree(&lu->LUstruct));
97: /* Release the SuperLU_DIST process grid. */
98: PetscStackCall("SuperLU_DIST:superlu_gridexit",superlu_gridexit(&lu->grid));
99: MPI_Comm_free(&(lu->comm_superlu));
100: }
101: PetscFree(A->spptr);
103: PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);
104: if (flg) {
105: MatDestroy_SeqAIJ(A);
106: } else {
107: MatDestroy_MPIAIJ(A);
108: }
109: return(0);
110: }
114: PetscErrorCode MatSolve_SuperLU_DIST(Mat A,Vec b_mpi,Vec x)
115: {
116: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
117: PetscErrorCode ierr;
118: PetscMPIInt size;
119: PetscInt m=A->rmap->n,M=A->rmap->N,N=A->cmap->N;
120: SuperLUStat_t stat;
121: double berr[1];
122: PetscScalar *bptr;
123: PetscInt nrhs=1;
124: Vec x_seq;
125: IS iden;
126: VecScatter scat;
127: int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
128: static PetscBool cite = PETSC_FALSE;
131: PetscCitationsRegister("@article{lidemmel03,\n author = {Xiaoye S. Li and James W. Demmel},\n title = {{SuperLU_DIST}: A Scalable Distributed-Memory Sparse Direct\n Solver for Unsymmetric Linear Systems},\n journal = {ACM Trans. Mathematical Software},\n volume = {29},\n number = {2},\n pages = {110-140},\n year = 2003\n}\n",&cite);
132: MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
133: if (size > 1 && lu->MatInputMode == GLOBAL) {
134: /* global mat input, convert b to x_seq */
135: VecCreateSeq(PETSC_COMM_SELF,N,&x_seq);
136: ISCreateStride(PETSC_COMM_SELF,N,0,1,&iden);
137: VecScatterCreate(b_mpi,iden,x_seq,iden,&scat);
138: ISDestroy(&iden);
140: VecScatterBegin(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);
141: VecScatterEnd(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);
142: VecGetArray(x_seq,&bptr);
143: } else { /* size==1 || distributed mat input */
144: if (lu->options.SolveInitialized && !lu->matsolve_iscalled) {
145: /* see comments in MatMatSolve() */
146: #if defined(PETSC_USE_COMPLEX)
147: PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
148: #else
149: PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
150: #endif
151: lu->options.SolveInitialized = NO;
152: }
153: VecCopy(b_mpi,x);
154: VecGetArray(x,&bptr);
155: }
157: if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED");
159: PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */
160: if (lu->MatInputMode == GLOBAL) {
161: #if defined(PETSC_USE_COMPLEX)
162: PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info));
163: #else
164: PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info));
165: #endif
166: } else { /* distributed mat input */
167: #if defined(PETSC_USE_COMPLEX)
168: PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
169: #else
170: PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
171: #endif
172: }
173: if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);
175: if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */
176: PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
178: if (size > 1 && lu->MatInputMode == GLOBAL) {
179: /* convert seq x to mpi x */
180: VecRestoreArray(x_seq,&bptr);
181: VecScatterBegin(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);
182: VecScatterEnd(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);
183: VecScatterDestroy(&scat);
184: VecDestroy(&x_seq);
185: } else {
186: VecRestoreArray(x,&bptr);
188: lu->matsolve_iscalled = PETSC_TRUE;
189: lu->matmatsolve_iscalled = PETSC_FALSE;
190: }
191: return(0);
192: }
196: PetscErrorCode MatMatSolve_SuperLU_DIST(Mat A,Mat B_mpi,Mat X)
197: {
198: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
199: PetscErrorCode ierr;
200: PetscMPIInt size;
201: PetscInt M=A->rmap->N,m=A->rmap->n,nrhs;
202: SuperLUStat_t stat;
203: double berr[1];
204: PetscScalar *bptr;
205: int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
206: PetscBool flg;
209: if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED");
210: PetscObjectTypeCompareAny((PetscObject)B_mpi,&flg,MATSEQDENSE,MATMPIDENSE,NULL);
211: if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix");
212: PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);
213: if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix");
215: MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
216: if (size > 1 && lu->MatInputMode == GLOBAL) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatInputMode=GLOBAL for nproc %d>1 is not supported",size);
217: /* size==1 or distributed mat input */
218: if (lu->options.SolveInitialized && !lu->matmatsolve_iscalled) {
219: /* communication pattern of SOLVEstruct is unlikely created for matmatsolve,
220: thus destroy it and create a new SOLVEstruct.
221: Otherwise it may result in memory corruption or incorrect solution
222: See src/mat/examples/tests/ex125.c */
223: #if defined(PETSC_USE_COMPLEX)
224: PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
225: #else
226: PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
227: #endif
228: lu->options.SolveInitialized = NO;
229: }
230: MatCopy(B_mpi,X,SAME_NONZERO_PATTERN);
232: MatGetSize(B_mpi,NULL,&nrhs);
234: PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */
235: MatDenseGetArray(X,&bptr);
236: if (lu->MatInputMode == GLOBAL) { /* size == 1 */
237: #if defined(PETSC_USE_COMPLEX)
238: PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,(doublecomplex*)bptr, M, nrhs,&lu->grid, &lu->LUstruct, berr, &stat, &info));
239: #else
240: PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr, M, nrhs, &lu->grid, &lu->LUstruct, berr, &stat, &info));
241: #endif
242: } else { /* distributed mat input */
243: #if defined(PETSC_USE_COMPLEX)
244: PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid, &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
245: #else
246: PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
247: #endif
248: }
249: if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);
250: MatDenseRestoreArray(X,&bptr);
252: if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */
253: PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
254: lu->matsolve_iscalled = PETSC_FALSE;
255: lu->matmatsolve_iscalled = PETSC_TRUE;
256: return(0);
257: }
262: PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F,Mat A,const MatFactorInfo *info)
263: {
264: Mat *tseq,A_seq = NULL;
265: Mat_SeqAIJ *aa,*bb;
266: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)(F)->spptr;
267: PetscErrorCode ierr;
268: PetscInt M=A->rmap->N,N=A->cmap->N,i,*ai,*aj,*bi,*bj,nz,rstart,*garray,
269: m=A->rmap->n, colA_start,j,jcol,jB,countA,countB,*bjj,*ajj;
270: int sinfo; /* SuperLU_Dist info flag is always an int even with long long indices */
271: PetscMPIInt size;
272: SuperLUStat_t stat;
273: double *berr=0;
274: IS isrow;
275: Mat F_diag=NULL;
276: #if defined(PETSC_USE_COMPLEX)
277: doublecomplex *av, *bv;
278: #else
279: double *av, *bv;
280: #endif
283: MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
285: if (lu->MatInputMode == GLOBAL) { /* global mat input */
286: if (size > 1) { /* convert mpi A to seq mat A */
287: ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);
288: MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);
289: ISDestroy(&isrow);
291: A_seq = *tseq;
292: PetscFree(tseq);
293: aa = (Mat_SeqAIJ*)A_seq->data;
294: } else {
295: PetscBool flg;
296: PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);
297: if (flg) {
298: Mat_MPIAIJ *At = (Mat_MPIAIJ*)A->data;
299: A = At->A;
300: }
301: aa = (Mat_SeqAIJ*)A->data;
302: }
304: /* Convert Petsc NR matrix to SuperLU_DIST NC.
305: Note: memories of lu->val, col and row are allocated by CompRow_to_CompCol_dist()! */
306: if (lu->options.Fact != DOFACT) {/* successive numeric factorization, sparsity pattern is reused. */
307: PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup));
308: if (lu->FactPattern == SamePattern_SameRowPerm) {
309: lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
310: } else { /* lu->FactPattern == SamePattern */
311: PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct));
312: lu->options.Fact = SamePattern;
313: }
314: }
315: #if defined(PETSC_USE_COMPLEX)
316: PetscStackCall("SuperLU_DIST:zCompRow_to_CompCol_dist",zCompRow_to_CompCol_dist(M,N,aa->nz,(doublecomplex*)aa->a,(int_t*)aa->j,(int_t*)aa->i,&lu->val,&lu->col, &lu->row));
317: #else
318: PetscStackCall("SuperLU_DIST:dCompRow_to_CompCol_dist",dCompRow_to_CompCol_dist(M,N,aa->nz,aa->a,(int_t*)aa->j,(int_t*)aa->i,&lu->val, &lu->col, &lu->row));
319: #endif
321: /* Create compressed column matrix A_sup. */
322: #if defined(PETSC_USE_COMPLEX)
323: PetscStackCall("SuperLU_DIST:zCreate_CompCol_Matrix_dist",zCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_Z, SLU_GE));
324: #else
325: PetscStackCall("SuperLU_DIST:dCreate_CompCol_Matrix_dist",dCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_D, SLU_GE));
326: #endif
327: } else { /* distributed mat input */
328: Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data;
329: aa=(Mat_SeqAIJ*)(mat->A)->data;
330: bb=(Mat_SeqAIJ*)(mat->B)->data;
331: ai=aa->i; aj=aa->j;
332: bi=bb->i; bj=bb->j;
333: #if defined(PETSC_USE_COMPLEX)
334: av=(doublecomplex*)aa->a;
335: bv=(doublecomplex*)bb->a;
336: #else
337: av=aa->a;
338: bv=bb->a;
339: #endif
340: rstart = A->rmap->rstart;
341: nz = aa->nz + bb->nz;
342: garray = mat->garray;
344: if (lu->options.Fact == DOFACT) { /* first numeric factorization */
345: #if defined(PETSC_USE_COMPLEX)
346: PetscStackCall("SuperLU_DIST:zallocateA_dist",zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
347: #else
348: PetscStackCall("SuperLU_DIST:dallocateA_dist",dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
349: #endif
350: } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */
351: if (lu->FactPattern == SamePattern_SameRowPerm) {
352: lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
353: } else if (lu->FactPattern == SamePattern) {
354: PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct)); /* Deallocate storage associated with the L and U matrices. */
355: lu->options.Fact = SamePattern;
356: } else {
357: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"options.Fact must be one of SamePattern SamePattern_SameRowPerm");
358: }
359: }
360: nz = 0;
361: for (i=0; i<m; i++) {
362: lu->row[i] = nz;
363: countA = ai[i+1] - ai[i];
364: countB = bi[i+1] - bi[i];
365: if (aj) {
366: ajj = aj + ai[i]; /* ptr to the beginning of this row */
367: } else {
368: ajj = NULL;
369: }
370: bjj = bj + bi[i];
372: /* B part, smaller col index */
373: if (aj) {
374: colA_start = rstart + ajj[0]; /* the smallest global col index of A */
375: } else { /* superlu_dist does not require matrix has diagonal entries, thus aj=NULL would work */
376: colA_start = rstart;
377: }
378: jB = 0;
379: for (j=0; j<countB; j++) {
380: jcol = garray[bjj[j]];
381: if (jcol > colA_start) {
382: jB = j;
383: break;
384: }
385: lu->col[nz] = jcol;
386: lu->val[nz++] = *bv++;
387: if (j==countB-1) jB = countB;
388: }
390: /* A part */
391: for (j=0; j<countA; j++) {
392: lu->col[nz] = rstart + ajj[j];
393: lu->val[nz++] = *av++;
394: }
396: /* B part, larger col index */
397: for (j=jB; j<countB; j++) {
398: lu->col[nz] = garray[bjj[j]];
399: lu->val[nz++] = *bv++;
400: }
401: }
402: lu->row[m] = nz;
404: if (lu->options.Fact == DOFACT) {
405: #if defined(PETSC_USE_COMPLEX)
406: PetscStackCall("SuperLU_DIST:zCreate_CompRowLoc_Matrix_dist",zCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_Z, SLU_GE));
407: #else
408: PetscStackCall("SuperLU_DIST:dCreate_CompRowLoc_Matrix_dist",dCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_D, SLU_GE));
409: #endif
410: }
411: }
413: /* Factor the matrix. */
414: PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */
415: if (lu->MatInputMode == GLOBAL) { /* global mat input */
416: #if defined(PETSC_USE_COMPLEX)
417: PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo));
418: #else
419: PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo));
420: #endif
421: } else { /* distributed mat input */
422: #if defined(PETSC_USE_COMPLEX)
423: PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo));
424: #else
425: PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo));
426: #endif
427: }
428:
429: if (sinfo > 0) {
430: if (A->erroriffailure) {
431: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_MAT_LU_ZRPVT,"Zero pivot in row %D",sinfo);
432: } else {
433: if (sinfo <= lu->A_sup.ncol) {
434: F->errortype = MAT_FACTOR_NUMERIC_ZEROPIVOT;
435: PetscInfo1(F,"U(i,i) is exactly zero, i= %D\n",sinfo);
436: } else if (sinfo > lu->A_sup.ncol) {
437: /*
438: number of bytes allocated when memory allocation
439: failure occurred, plus A->ncol.
440: */
441: F->errortype = MAT_FACTOR_OUTMEMORY;
442: PetscInfo1(F,"Number of bytes allocated when memory allocation fails %D\n",sinfo);
443: }
444: }
445: } else if (sinfo < 0) {
446: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB, "info = %D, argument in p*gssvx() had an illegal value", sinfo);
447: }
449: if (lu->MatInputMode == GLOBAL && size > 1) {
450: MatDestroy(&A_seq);
451: }
453: if (lu->options.PrintStat) {
454: PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */
455: }
456: PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
457: if (size > 1) {
458: F_diag = ((Mat_MPIAIJ*)(F)->data)->A;
459: F_diag->assembled = PETSC_TRUE;
460: }
461: (F)->assembled = PETSC_TRUE;
462: (F)->preallocated = PETSC_TRUE;
463: lu->options.Fact = FACTORED; /* The factored form of A is supplied. Local option used by this func. only */
464: return(0);
465: }
467: /* Note the Petsc r and c permutations are ignored */
470: PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
471: {
472: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->spptr;
473: PetscInt M = A->rmap->N,N=A->cmap->N;
476: /* Initialize the SuperLU process grid. */
477: PetscStackCall("SuperLU_DIST:superlu_gridinit",superlu_gridinit(lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid));
479: /* Initialize ScalePermstruct and LUstruct. */
480: PetscStackCall("SuperLU_DIST:ScalePermstructInit",ScalePermstructInit(M, N, &lu->ScalePermstruct));
481: PetscStackCall("SuperLU_DIST:LUstructInit",LUstructInit(N, &lu->LUstruct));
482: F->ops->lufactornumeric = MatLUFactorNumeric_SuperLU_DIST;
483: F->ops->solve = MatSolve_SuperLU_DIST;
484: F->ops->matsolve = MatMatSolve_SuperLU_DIST;
485: lu->CleanUpSuperLU_Dist = PETSC_TRUE;
486: return(0);
487: }
491: static PetscErrorCode MatFactorGetSolverPackage_aij_superlu_dist(Mat A,const MatSolverPackage *type)
492: {
494: *type = MATSOLVERSUPERLU_DIST;
495: return(0);
496: }
500: static PetscErrorCode MatGetFactor_aij_superlu_dist(Mat A,MatFactorType ftype,Mat *F)
501: {
502: Mat B;
503: Mat_SuperLU_DIST *lu;
504: PetscErrorCode ierr;
505: PetscInt M=A->rmap->N,N=A->cmap->N,indx;
506: PetscMPIInt size;
507: superlu_dist_options_t options;
508: PetscBool flg;
509: const char *colperm[] = {"NATURAL","MMD_AT_PLUS_A","MMD_ATA","METIS_AT_PLUS_A","PARMETIS"};
510: const char *rowperm[] = {"LargeDiag","NATURAL"};
511: const char *factPattern[] = {"SamePattern","SamePattern_SameRowPerm"};
512: PetscBool set;
515: /* Create the factorization matrix */
516: MatCreate(PetscObjectComm((PetscObject)A),&B);
517: MatSetSizes(B,A->rmap->n,A->cmap->n,M,N);
518: MatSetType(B,((PetscObject)A)->type_name);
519: MatSeqAIJSetPreallocation(B,0,NULL);
520: MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);
522: B->ops->lufactorsymbolic = MatLUFactorSymbolic_SuperLU_DIST;
523: B->ops->view = MatView_SuperLU_DIST;
524: B->ops->destroy = MatDestroy_SuperLU_DIST;
525: B->ops->getdiagonal = MatGetDiagonal_SuperLU_DIST;
527: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_aij_superlu_dist);
529: B->factortype = MAT_FACTOR_LU;
531: /* set solvertype */
532: PetscFree(B->solvertype);
533: PetscStrallocpy(MATSOLVERSUPERLU_DIST,&B->solvertype);
535: PetscNewLog(B,&lu);
536: B->spptr = lu;
538: /* Set the default input options:
539: options.Fact = DOFACT;
540: options.Equil = YES;
541: options.ParSymbFact = NO;
542: options.ColPerm = METIS_AT_PLUS_A;
543: options.RowPerm = LargeDiag;
544: options.ReplaceTinyPivot = YES;
545: options.IterRefine = DOUBLE;
546: options.Trans = NOTRANS;
547: options.SolveInitialized = NO; -hold the communication pattern used MatSolve() and MatMatSolve()
548: options.RefineInitialized = NO;
549: options.PrintStat = YES;
550: */
551: set_default_options_dist(&options);
553: MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(lu->comm_superlu));
554: MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
555: /* Default num of process columns and rows */
556: lu->npcol = (int_t) (0.5 + PetscSqrtReal((PetscReal)size));
557: if (!lu->npcol) lu->npcol = 1;
558: while (lu->npcol > 0) {
559: lu->nprow = (int_t) (size/lu->npcol);
560: if (size == lu->nprow * lu->npcol) break;
561: lu->npcol--;
562: }
564: PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"SuperLU_Dist Options","Mat");
565: PetscOptionsInt("-mat_superlu_dist_r","Number rows in processor partition","None",lu->nprow,(PetscInt*)&lu->nprow,NULL);
566: PetscOptionsInt("-mat_superlu_dist_c","Number columns in processor partition","None",lu->npcol,(PetscInt*)&lu->npcol,NULL);
567: if (size != lu->nprow * lu->npcol) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Number of processes %d must equal to nprow %d * npcol %d",size,lu->nprow,lu->npcol);
569: lu->MatInputMode = DISTRIBUTED;
571: PetscOptionsEnum("-mat_superlu_dist_matinput","Matrix input mode (global or distributed)","None",SuperLU_MatInputModes,(PetscEnum)lu->MatInputMode,(PetscEnum*)&lu->MatInputMode,NULL);
572: if (lu->MatInputMode == DISTRIBUTED && size == 1) lu->MatInputMode = GLOBAL;
574: PetscOptionsBool("-mat_superlu_dist_equil","Equilibrate matrix","None",options.Equil ? PETSC_TRUE : PETSC_FALSE,&flg,&set);
575: if (set && !flg) options.Equil = NO;
577: PetscOptionsEList("-mat_superlu_dist_rowperm","Row permutation","None",rowperm,2,rowperm[0],&indx,&flg);
578: if (flg) {
579: switch (indx) {
580: case 0:
581: options.RowPerm = LargeDiag;
582: break;
583: case 1:
584: options.RowPerm = NOROWPERM;
585: break;
586: }
587: }
589: PetscOptionsEList("-mat_superlu_dist_colperm","Column permutation","None",colperm,5,colperm[3],&indx,&flg);
590: if (flg) {
591: switch (indx) {
592: case 0:
593: options.ColPerm = NATURAL;
594: break;
595: case 1:
596: options.ColPerm = MMD_AT_PLUS_A;
597: break;
598: case 2:
599: options.ColPerm = MMD_ATA;
600: break;
601: case 3:
602: options.ColPerm = METIS_AT_PLUS_A;
603: break;
604: case 4:
605: options.ColPerm = PARMETIS; /* only works for np>1 */
606: break;
607: default:
608: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
609: }
610: }
612: options.ReplaceTinyPivot = NO;
613: PetscOptionsBool("-mat_superlu_dist_replacetinypivot","Replace tiny pivots","None",options.ReplaceTinyPivot ? PETSC_TRUE : PETSC_FALSE,&flg,&set);
614: if (set && flg) options.ReplaceTinyPivot = YES;
616: options.ParSymbFact = NO;
617: PetscOptionsBool("-mat_superlu_dist_parsymbfact","Parallel symbolic factorization","None",PETSC_FALSE,&flg,&set);
618: if (set && flg && size>1) {
619: if (lu->MatInputMode == GLOBAL) {
620: #if defined(PETSC_USE_INFO)
621: PetscInfo(A,"Warning: '-mat_superlu_dist_parsymbfact' is ignored because MatInputMode=GLOBAL\n");
622: #endif
623: } else {
624: #if defined(PETSC_HAVE_PARMETIS)
625: options.ParSymbFact = YES;
626: options.ColPerm = PARMETIS; /* in v2.2, PARMETIS is forced for ParSymbFact regardless of user ordering setting */
627: #else
628: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"parsymbfact needs PARMETIS");
629: #endif
630: }
631: }
633: lu->FactPattern = SamePattern_SameRowPerm;
634: PetscOptionsEList("-mat_superlu_dist_fact","Sparsity pattern for repeated matrix factorization","None",factPattern,2,factPattern[0],&indx,&flg);
635: if (flg) {
636: switch (indx) {
637: case 0:
638: lu->FactPattern = SamePattern;
639: break;
640: case 1:
641: lu->FactPattern = SamePattern_SameRowPerm;
642: break;
643: }
644: }
646: options.IterRefine = NOREFINE;
647: PetscOptionsBool("-mat_superlu_dist_iterrefine","Use iterative refinement","None",options.IterRefine == NOREFINE ? PETSC_FALSE : PETSC_TRUE ,&flg,&set);
648: if (set) {
649: if (flg) options.IterRefine = SLU_DOUBLE;
650: else options.IterRefine = NOREFINE;
651: }
653: if (PetscLogPrintInfo) options.PrintStat = YES;
654: else options.PrintStat = NO;
655: PetscOptionsBool("-mat_superlu_dist_statprint","Print factorization information","None",(PetscBool)options.PrintStat,(PetscBool*)&options.PrintStat,NULL);
656: PetscOptionsEnd();
658: lu->options = options;
659: lu->options.Fact = DOFACT;
660: lu->matsolve_iscalled = PETSC_FALSE;
661: lu->matmatsolve_iscalled = PETSC_FALSE;
663: *F = B;
664: return(0);
665: }
669: PETSC_EXTERN PetscErrorCode MatSolverPackageRegister_SuperLU_DIST(void)
670: {
673: MatSolverPackageRegister(MATSOLVERSUPERLU_DIST,MATMPIAIJ, MAT_FACTOR_LU,MatGetFactor_aij_superlu_dist);
674: MatSolverPackageRegister(MATSOLVERSUPERLU_DIST,MATSEQAIJ, MAT_FACTOR_LU,MatGetFactor_aij_superlu_dist);
675: return(0);
676: }
680: PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat A,PetscViewer viewer)
681: {
682: Mat_SuperLU_DIST *lu=(Mat_SuperLU_DIST*)A->spptr;
683: superlu_dist_options_t options;
684: PetscErrorCode ierr;
687: /* check if matrix is superlu_dist type */
688: if (A->ops->solve != MatSolve_SuperLU_DIST) return(0);
690: options = lu->options;
691: PetscViewerASCIIPrintf(viewer,"SuperLU_DIST run parameters:\n");
692: PetscViewerASCIIPrintf(viewer," Process grid nprow %D x npcol %D \n",lu->nprow,lu->npcol);
693: PetscViewerASCIIPrintf(viewer," Equilibrate matrix %s \n",PetscBools[options.Equil != NO]);
694: PetscViewerASCIIPrintf(viewer," Matrix input mode %d \n",lu->MatInputMode);
695: PetscViewerASCIIPrintf(viewer," Replace tiny pivots %s \n",PetscBools[options.ReplaceTinyPivot != NO]);
696: PetscViewerASCIIPrintf(viewer," Use iterative refinement %s \n",PetscBools[options.IterRefine == SLU_DOUBLE]);
697: PetscViewerASCIIPrintf(viewer," Processors in row %d col partition %d \n",lu->nprow,lu->npcol);
698: PetscViewerASCIIPrintf(viewer," Row permutation %s \n",(options.RowPerm == NOROWPERM) ? "NATURAL" : "LargeDiag");
700: switch (options.ColPerm) {
701: case NATURAL:
702: PetscViewerASCIIPrintf(viewer," Column permutation NATURAL\n");
703: break;
704: case MMD_AT_PLUS_A:
705: PetscViewerASCIIPrintf(viewer," Column permutation MMD_AT_PLUS_A\n");
706: break;
707: case MMD_ATA:
708: PetscViewerASCIIPrintf(viewer," Column permutation MMD_ATA\n");
709: break;
710: case METIS_AT_PLUS_A:
711: PetscViewerASCIIPrintf(viewer," Column permutation METIS_AT_PLUS_A\n");
712: break;
713: case PARMETIS:
714: PetscViewerASCIIPrintf(viewer," Column permutation PARMETIS\n");
715: break;
716: default:
717: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
718: }
720: PetscViewerASCIIPrintf(viewer," Parallel symbolic factorization %s \n",PetscBools[options.ParSymbFact != NO]);
722: if (lu->FactPattern == SamePattern) {
723: PetscViewerASCIIPrintf(viewer," Repeated factorization SamePattern\n");
724: } else {
725: PetscViewerASCIIPrintf(viewer," Repeated factorization SamePattern_SameRowPerm\n");
726: }
727: return(0);
728: }
732: PetscErrorCode MatView_SuperLU_DIST(Mat A,PetscViewer viewer)
733: {
734: PetscErrorCode ierr;
735: PetscBool iascii;
736: PetscViewerFormat format;
739: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
740: if (iascii) {
741: PetscViewerGetFormat(viewer,&format);
742: if (format == PETSC_VIEWER_ASCII_INFO) {
743: MatFactorInfo_SuperLU_DIST(A,viewer);
744: }
745: }
746: return(0);
747: }
750: /*MC
751: MATSOLVERSUPERLU_DIST - Parallel direct solver package for LU factorization
753: Use ./configure --download-superlu_dist --download-parmetis --download-metis --download-ptscotch to have PETSc installed with SuperLU_DIST
755: Use -pc_type lu -pc_factor_mat_solver_package superlu_dist to us this direct solver
757: Works with AIJ matrices
759: Options Database Keys:
760: + -mat_superlu_dist_r <n> - number of rows in processor partition
761: . -mat_superlu_dist_c <n> - number of columns in processor partition
762: . -mat_superlu_dist_matinput <0,1> - matrix input mode; 0=global, 1=distributed
763: . -mat_superlu_dist_equil - equilibrate the matrix
764: . -mat_superlu_dist_rowperm <LargeDiag,NATURAL> - row permutation
765: . -mat_superlu_dist_colperm <MMD_AT_PLUS_A,MMD_ATA,NATURAL> - column permutation
766: . -mat_superlu_dist_replacetinypivot - replace tiny pivots
767: . -mat_superlu_dist_fact <SamePattern> - (choose one of) SamePattern SamePattern_SameRowPerm
768: . -mat_superlu_dist_iterrefine - use iterative refinement
769: - -mat_superlu_dist_statprint - print factorization information
771: Level: beginner
773: .seealso: PCLU
775: .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage
777: M*/