Actual source code: superlu_dist.c
petsc-3.10.5 2019-03-28
2: /*
3: Provides an interface to the SuperLU_DIST sparse solver
4: */
6: #include <../src/mat/impls/aij/seq/aij.h>
7: #include <../src/mat/impls/aij/mpi/mpiaij.h>
8: #if defined(PETSC_HAVE_STDLIB_H) /* This is to get around weird problem with SuperLU on cray */
9: #include <stdlib.h>
10: #endif
12: EXTERN_C_BEGIN
13: #if defined(PETSC_USE_COMPLEX)
14: #include <superlu_zdefs.h>
15: #else
16: #include <superlu_ddefs.h>
17: #endif
18: EXTERN_C_END
20: /*
21: GLOBAL - The sparse matrix and right hand side are all stored initially on process 0. Should be called centralized
22: DISTRIBUTED - The sparse matrix and right hand size are initially stored across the entire MPI communicator.
23: */
24: typedef enum {GLOBAL,DISTRIBUTED} SuperLU_MatInputMode;
25: const char *SuperLU_MatInputModes[] = {"GLOBAL","DISTRIBUTED","SuperLU_MatInputMode","PETSC_",0};
27: typedef struct {
28: int_t nprow,npcol,*row,*col;
29: gridinfo_t grid;
30: superlu_dist_options_t options;
31: SuperMatrix A_sup;
32: ScalePermstruct_t ScalePermstruct;
33: LUstruct_t LUstruct;
34: int StatPrint;
35: SuperLU_MatInputMode MatInputMode;
36: SOLVEstruct_t SOLVEstruct;
37: fact_t FactPattern;
38: MPI_Comm comm_superlu;
39: #if defined(PETSC_USE_COMPLEX)
40: doublecomplex *val;
41: #else
42: double *val;
43: #endif
44: PetscBool matsolve_iscalled,matmatsolve_iscalled;
45: PetscBool CleanUpSuperLU_Dist; /* Flag to clean up (non-global) SuperLU objects during Destroy */
46: } Mat_SuperLU_DIST;
49: PetscErrorCode MatSuperluDistGetDiagU_SuperLU_DIST(Mat F,PetscScalar *diagU)
50: {
51: Mat_SuperLU_DIST *lu= (Mat_SuperLU_DIST*)F->data;
54: #if defined(PETSC_USE_COMPLEX)
55: PetscStackCall("SuperLU_DIST:pzGetDiagU",pzGetDiagU(F->rmap->N,&lu->LUstruct,&lu->grid,(doublecomplex*)diagU));
56: #else
57: PetscStackCall("SuperLU_DIST:pdGetDiagU",pdGetDiagU(F->rmap->N,&lu->LUstruct,&lu->grid,diagU));
58: #endif
59: return(0);
60: }
62: PetscErrorCode MatSuperluDistGetDiagU(Mat F,PetscScalar *diagU)
63: {
64: PetscErrorCode ierr;
68: PetscTryMethod(F,"MatSuperluDistGetDiagU_C",(Mat,PetscScalar*),(F,diagU));
69: return(0);
70: }
72: static PetscErrorCode MatDestroy_SuperLU_DIST(Mat A)
73: {
74: PetscErrorCode ierr;
75: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data;
78: if (lu->CleanUpSuperLU_Dist) {
79: /* Deallocate SuperLU_DIST storage */
80: if (lu->MatInputMode == GLOBAL) {
81: PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup));
82: } else {
83: PetscStackCall("SuperLU_DIST:Destroy_CompRowLoc_Matrix_dist",Destroy_CompRowLoc_Matrix_dist(&lu->A_sup));
84: if (lu->options.SolveInitialized) {
85: #if defined(PETSC_USE_COMPLEX)
86: PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
87: #else
88: PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
89: #endif
90: }
91: }
92: PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(A->cmap->N, &lu->grid, &lu->LUstruct));
93: PetscStackCall("SuperLU_DIST:ScalePermstructFree",ScalePermstructFree(&lu->ScalePermstruct));
94: PetscStackCall("SuperLU_DIST:LUstructFree",LUstructFree(&lu->LUstruct));
96: /* Release the SuperLU_DIST process grid. */
97: PetscStackCall("SuperLU_DIST:superlu_gridexit",superlu_gridexit(&lu->grid));
98: MPI_Comm_free(&(lu->comm_superlu));
99: }
100: PetscFree(A->data);
101: /* clear composed functions */
102: PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverType_C",NULL);
103: PetscObjectComposeFunction((PetscObject)A,"MatSuperluDistGetDiagU_C",NULL);
105: return(0);
106: }
108: static PetscErrorCode MatSolve_SuperLU_DIST(Mat A,Vec b_mpi,Vec x)
109: {
110: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data;
111: PetscErrorCode ierr;
112: PetscMPIInt size;
113: PetscInt m=A->rmap->n,M=A->rmap->N,N=A->cmap->N;
114: SuperLUStat_t stat;
115: double berr[1];
116: PetscScalar *bptr=NULL;
117: PetscInt nrhs=1;
118: Vec x_seq;
119: IS iden;
120: VecScatter scat;
121: int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
122: static PetscBool cite = PETSC_FALSE;
125: if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED");
126: PetscCitationsRegister("@article{lidemmel03,\n author = {Xiaoye S. Li and James W. Demmel},\n title = {{SuperLU_DIST}: A Scalable Distributed-Memory Sparse Direct\n Solver for Unsymmetric Linear Systems},\n journal = {ACM Trans. Mathematical Software},\n volume = {29},\n number = {2},\n pages = {110-140},\n year = 2003\n}\n",&cite);
128: MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
129: if (size > 1 && lu->MatInputMode == GLOBAL) {
130: /* global mat input, convert b to x_seq */
131: VecCreateSeq(PETSC_COMM_SELF,N,&x_seq);
132: ISCreateStride(PETSC_COMM_SELF,N,0,1,&iden);
133: VecScatterCreate(b_mpi,iden,x_seq,iden,&scat);
134: ISDestroy(&iden);
136: VecScatterBegin(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);
137: VecScatterEnd(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);
138: VecGetArray(x_seq,&bptr);
139: } else { /* size==1 || distributed mat input */
140: if (lu->options.SolveInitialized && !lu->matsolve_iscalled) {
141: /* see comments in MatMatSolve() */
142: #if defined(PETSC_USE_COMPLEX)
143: PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
144: #else
145: PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
146: #endif
147: lu->options.SolveInitialized = NO;
148: }
149: VecCopy(b_mpi,x);
150: VecGetArray(x,&bptr);
151: }
153: PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */
154: if (lu->MatInputMode == GLOBAL) {
155: #if defined(PETSC_USE_COMPLEX)
156: PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info));
157: #else
158: PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info));
159: #endif
160: } else { /* distributed mat input */
161: #if defined(PETSC_USE_COMPLEX)
162: PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
163: #else
164: PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
165: #endif
166: }
167: if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);
169: if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */
170: PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
172: if (size > 1 && lu->MatInputMode == GLOBAL) {
173: /* convert seq x to mpi x */
174: VecRestoreArray(x_seq,&bptr);
175: VecScatterBegin(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);
176: VecScatterEnd(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);
177: VecScatterDestroy(&scat);
178: VecDestroy(&x_seq);
179: } else {
180: VecRestoreArray(x,&bptr);
182: lu->matsolve_iscalled = PETSC_TRUE;
183: lu->matmatsolve_iscalled = PETSC_FALSE;
184: }
185: return(0);
186: }
188: static PetscErrorCode MatMatSolve_SuperLU_DIST(Mat A,Mat B_mpi,Mat X)
189: {
190: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data;
191: PetscErrorCode ierr;
192: PetscMPIInt size;
193: PetscInt M=A->rmap->N,m=A->rmap->n,nrhs;
194: SuperLUStat_t stat;
195: double berr[1];
196: PetscScalar *bptr;
197: int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
198: PetscBool flg;
201: if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED");
202: PetscObjectTypeCompareAny((PetscObject)B_mpi,&flg,MATSEQDENSE,MATMPIDENSE,NULL);
203: if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix");
204: PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);
205: if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix");
207: MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
208: if (size > 1 && lu->MatInputMode == GLOBAL) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatInputMode=GLOBAL for nproc %d>1 is not supported",size);
209: /* size==1 or distributed mat input */
210: if (lu->options.SolveInitialized && !lu->matmatsolve_iscalled) {
211: /* communication pattern of SOLVEstruct is unlikely created for matmatsolve,
212: thus destroy it and create a new SOLVEstruct.
213: Otherwise it may result in memory corruption or incorrect solution
214: See src/mat/examples/tests/ex125.c */
215: #if defined(PETSC_USE_COMPLEX)
216: PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
217: #else
218: PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
219: #endif
220: lu->options.SolveInitialized = NO;
221: }
222: MatCopy(B_mpi,X,SAME_NONZERO_PATTERN);
224: MatGetSize(B_mpi,NULL,&nrhs);
226: PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */
227: MatDenseGetArray(X,&bptr);
228: if (lu->MatInputMode == GLOBAL) { /* size == 1 */
229: #if defined(PETSC_USE_COMPLEX)
230: PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,(doublecomplex*)bptr, M, nrhs,&lu->grid, &lu->LUstruct, berr, &stat, &info));
231: #else
232: PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr, M, nrhs, &lu->grid, &lu->LUstruct, berr, &stat, &info));
233: #endif
234: } else { /* distributed mat input */
235: #if defined(PETSC_USE_COMPLEX)
236: PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid, &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
237: #else
238: PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
239: #endif
240: }
241: if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);
242: MatDenseRestoreArray(X,&bptr);
244: if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */
245: PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
246: lu->matsolve_iscalled = PETSC_FALSE;
247: lu->matmatsolve_iscalled = PETSC_TRUE;
248: return(0);
249: }
251: /*
252: input:
253: F: numeric Cholesky factor
254: output:
255: nneg: total number of negative pivots
256: nzero: total number of zero pivots
257: npos: (global dimension of F) - nneg - nzero
258: */
259: static PetscErrorCode MatGetInertia_SuperLU_DIST(Mat F,PetscInt *nneg,PetscInt *nzero,PetscInt *npos)
260: {
261: PetscErrorCode ierr;
262: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->data;
263: PetscScalar *diagU=NULL;
264: PetscInt M,i,neg=0,zero=0,pos=0;
265: PetscReal r;
268: if (!F->assembled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Matrix factor F is not assembled");
269: if (lu->options.RowPerm != NOROWPERM) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Must set NOROWPERM");
270: MatGetSize(F,&M,NULL);
271: PetscMalloc1(M,&diagU);
272: MatSuperluDistGetDiagU(F,diagU);
273: for (i=0; i<M; i++) {
274: #if defined(PETSC_USE_COMPLEX)
275: r = PetscImaginaryPart(diagU[i])/10.0;
276: if (r< -PETSC_MACHINE_EPSILON || r>PETSC_MACHINE_EPSILON) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"diagU[%d]=%g + i %g is non-real",i,PetscRealPart(diagU[i]),r*10.0);
277: r = PetscRealPart(diagU[i]);
278: #else
279: r = diagU[i];
280: #endif
281: if (r > 0) {
282: pos++;
283: } else if (r < 0) {
284: neg++;
285: } else zero++;
286: }
288: PetscFree(diagU);
289: if (nneg) *nneg = neg;
290: if (nzero) *nzero = zero;
291: if (npos) *npos = pos;
292: return(0);
293: }
295: static PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F,Mat A,const MatFactorInfo *info)
296: {
297: Mat *tseq,A_seq = NULL;
298: Mat_SeqAIJ *aa,*bb;
299: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->data;
300: PetscErrorCode ierr;
301: PetscInt M=A->rmap->N,N=A->cmap->N,i,*ai,*aj,*bi,*bj,nz,rstart,*garray,
302: m=A->rmap->n, colA_start,j,jcol,jB,countA,countB,*bjj,*ajj=NULL;
303: int sinfo; /* SuperLU_Dist info flag is always an int even with long long indices */
304: PetscMPIInt size;
305: SuperLUStat_t stat;
306: double *berr=0;
307: IS isrow;
308: #if defined(PETSC_USE_COMPLEX)
309: doublecomplex *av, *bv;
310: #else
311: double *av, *bv;
312: #endif
315: MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
317: if (lu->MatInputMode == GLOBAL) { /* global mat input */
318: if (size > 1) { /* convert mpi A to seq mat A */
319: ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);
320: MatCreateSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);
321: ISDestroy(&isrow);
323: A_seq = *tseq;
324: PetscFree(tseq);
325: aa = (Mat_SeqAIJ*)A_seq->data;
326: } else {
327: PetscBool flg;
328: PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);
329: if (flg) {
330: Mat_MPIAIJ *At = (Mat_MPIAIJ*)A->data;
331: A = At->A;
332: }
333: aa = (Mat_SeqAIJ*)A->data;
334: }
336: /* Convert Petsc NR matrix to SuperLU_DIST NC.
337: Note: memories of lu->val, col and row are allocated by CompRow_to_CompCol_dist()! */
338: if (lu->options.Fact != DOFACT) {/* successive numeric factorization, sparsity pattern is reused. */
339: PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup));
340: if (lu->FactPattern == SamePattern_SameRowPerm) {
341: lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
342: } else { /* lu->FactPattern == SamePattern */
343: PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct));
344: lu->options.Fact = SamePattern;
345: }
346: }
347: #if defined(PETSC_USE_COMPLEX)
348: PetscStackCall("SuperLU_DIST:zCompRow_to_CompCol_dist",zCompRow_to_CompCol_dist(M,N,aa->nz,(doublecomplex*)aa->a,(int_t*)aa->j,(int_t*)aa->i,&lu->val,&lu->col, &lu->row));
349: #else
350: PetscStackCall("SuperLU_DIST:dCompRow_to_CompCol_dist",dCompRow_to_CompCol_dist(M,N,aa->nz,aa->a,(int_t*)aa->j,(int_t*)aa->i,&lu->val, &lu->col, &lu->row));
351: #endif
353: /* Create compressed column matrix A_sup. */
354: #if defined(PETSC_USE_COMPLEX)
355: PetscStackCall("SuperLU_DIST:zCreate_CompCol_Matrix_dist",zCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_Z, SLU_GE));
356: #else
357: PetscStackCall("SuperLU_DIST:dCreate_CompCol_Matrix_dist",dCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_D, SLU_GE));
358: #endif
359: } else { /* distributed mat input */
360: Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data;
361: aa=(Mat_SeqAIJ*)(mat->A)->data;
362: bb=(Mat_SeqAIJ*)(mat->B)->data;
363: ai=aa->i; aj=aa->j;
364: bi=bb->i; bj=bb->j;
365: #if defined(PETSC_USE_COMPLEX)
366: av=(doublecomplex*)aa->a;
367: bv=(doublecomplex*)bb->a;
368: #else
369: av=aa->a;
370: bv=bb->a;
371: #endif
372: rstart = A->rmap->rstart;
373: nz = aa->nz + bb->nz;
374: garray = mat->garray;
376: if (lu->options.Fact == DOFACT) { /* first numeric factorization */
377: #if defined(PETSC_USE_COMPLEX)
378: PetscStackCall("SuperLU_DIST:zallocateA_dist",zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
379: #else
380: PetscStackCall("SuperLU_DIST:dallocateA_dist",dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
381: #endif
382: } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */
383: if (lu->FactPattern == SamePattern_SameRowPerm) {
384: lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
385: } else if (lu->FactPattern == SamePattern) {
386: PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct)); /* Deallocate L and U matrices. */
387: lu->options.Fact = SamePattern;
388: } else if (lu->FactPattern == DOFACT) {
389: PetscStackCall("SuperLU_DIST:Destroy_CompRowLoc_Matrix_dist",Destroy_CompRowLoc_Matrix_dist(&lu->A_sup));
390: PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct));
391: lu->options.Fact = DOFACT;
393: #if defined(PETSC_USE_COMPLEX)
394: PetscStackCall("SuperLU_DIST:zallocateA_dist",zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
395: #else
396: PetscStackCall("SuperLU_DIST:dallocateA_dist",dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
397: #endif
398: } else {
399: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"options.Fact must be one of SamePattern SamePattern_SameRowPerm DOFACT");
400: }
401: }
402: nz = 0;
403: for (i=0; i<m; i++) {
404: lu->row[i] = nz;
405: countA = ai[i+1] - ai[i];
406: countB = bi[i+1] - bi[i];
407: if (aj) {
408: ajj = aj + ai[i]; /* ptr to the beginning of this row */
409: } else {
410: ajj = NULL;
411: }
412: bjj = bj + bi[i];
414: /* B part, smaller col index */
415: if (aj) {
416: colA_start = rstart + ajj[0]; /* the smallest global col index of A */
417: } else { /* superlu_dist does not require matrix has diagonal entries, thus aj=NULL would work */
418: colA_start = rstart;
419: }
420: jB = 0;
421: for (j=0; j<countB; j++) {
422: jcol = garray[bjj[j]];
423: if (jcol > colA_start) {
424: jB = j;
425: break;
426: }
427: lu->col[nz] = jcol;
428: lu->val[nz++] = *bv++;
429: if (j==countB-1) jB = countB;
430: }
432: /* A part */
433: for (j=0; j<countA; j++) {
434: lu->col[nz] = rstart + ajj[j];
435: lu->val[nz++] = *av++;
436: }
438: /* B part, larger col index */
439: for (j=jB; j<countB; j++) {
440: lu->col[nz] = garray[bjj[j]];
441: lu->val[nz++] = *bv++;
442: }
443: }
444: lu->row[m] = nz;
446: if (lu->options.Fact == DOFACT) {
447: #if defined(PETSC_USE_COMPLEX)
448: PetscStackCall("SuperLU_DIST:zCreate_CompRowLoc_Matrix_dist",zCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_Z, SLU_GE));
449: #else
450: PetscStackCall("SuperLU_DIST:dCreate_CompRowLoc_Matrix_dist",dCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_D, SLU_GE));
451: #endif
452: }
453: }
455: /* Factor the matrix. */
456: PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */
457: if (lu->MatInputMode == GLOBAL) { /* global mat input */
458: #if defined(PETSC_USE_COMPLEX)
459: PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo));
460: #else
461: PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo));
462: #endif
463: } else { /* distributed mat input */
464: #if defined(PETSC_USE_COMPLEX)
465: PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo));
466: #else
467: PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo));
468: #endif
469: }
470:
471: if (sinfo > 0) {
472: if (A->erroriffailure) {
473: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_MAT_LU_ZRPVT,"Zero pivot in row %D",sinfo);
474: } else {
475: if (sinfo <= lu->A_sup.ncol) {
476: F->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT;
477: PetscInfo1(F,"U(i,i) is exactly zero, i= %D\n",sinfo);
478: } else if (sinfo > lu->A_sup.ncol) {
479: /*
480: number of bytes allocated when memory allocation
481: failure occurred, plus A->ncol.
482: */
483: F->factorerrortype = MAT_FACTOR_OUTMEMORY;
484: PetscInfo1(F,"Number of bytes allocated when memory allocation fails %D\n",sinfo);
485: }
486: }
487: } else if (sinfo < 0) {
488: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB, "info = %D, argument in p*gssvx() had an illegal value", sinfo);
489: }
491: if (lu->MatInputMode == GLOBAL && size > 1) {
492: MatDestroy(&A_seq);
493: }
495: if (lu->options.PrintStat) {
496: PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */
497: }
498: PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
499: F->assembled = PETSC_TRUE;
500: F->preallocated = PETSC_TRUE;
501: lu->options.Fact = FACTORED; /* The factored form of A is supplied. Local option used by this func. only */
502: return(0);
503: }
505: /* Note the Petsc r and c permutations are ignored */
506: static PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
507: {
508: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->data;
509: PetscInt M = A->rmap->N,N=A->cmap->N;
512: /* Initialize the SuperLU process grid. */
513: PetscStackCall("SuperLU_DIST:superlu_gridinit",superlu_gridinit(lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid));
515: /* Initialize ScalePermstruct and LUstruct. */
516: PetscStackCall("SuperLU_DIST:ScalePermstructInit",ScalePermstructInit(M, N, &lu->ScalePermstruct));
517: PetscStackCall("SuperLU_DIST:LUstructInit",LUstructInit(N, &lu->LUstruct));
518: F->ops->lufactornumeric = MatLUFactorNumeric_SuperLU_DIST;
519: F->ops->solve = MatSolve_SuperLU_DIST;
520: F->ops->matsolve = MatMatSolve_SuperLU_DIST;
521: F->ops->getinertia = NULL;
523: if (A->symmetric || A->hermitian) {
524: F->ops->getinertia = MatGetInertia_SuperLU_DIST;
525: }
526: lu->CleanUpSuperLU_Dist = PETSC_TRUE;
527: return(0);
528: }
530: static PetscErrorCode MatCholeskyFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,const MatFactorInfo *info)
531: {
535: if (!A->symmetric) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Input matrix must be symmetric\n");
536: MatLUFactorSymbolic_SuperLU_DIST(F,A,r,r,info);
537: F->ops->choleskyfactornumeric = MatLUFactorNumeric_SuperLU_DIST;
538: return(0);
539: }
541: static PetscErrorCode MatFactorGetSolverType_aij_superlu_dist(Mat A,MatSolverType *type)
542: {
544: *type = MATSOLVERSUPERLU_DIST;
545: return(0);
546: }
548: static PetscErrorCode MatView_Info_SuperLU_DIST(Mat A,PetscViewer viewer)
549: {
550: Mat_SuperLU_DIST *lu=(Mat_SuperLU_DIST*)A->data;
551: superlu_dist_options_t options;
552: PetscErrorCode ierr;
555: /* check if matrix is superlu_dist type */
556: if (A->ops->solve != MatSolve_SuperLU_DIST) return(0);
558: options = lu->options;
559: PetscViewerASCIIPrintf(viewer,"SuperLU_DIST run parameters:\n");
560: PetscViewerASCIIPrintf(viewer," Process grid nprow %D x npcol %D \n",lu->nprow,lu->npcol);
561: PetscViewerASCIIPrintf(viewer," Equilibrate matrix %s \n",PetscBools[options.Equil != NO]);
562: PetscViewerASCIIPrintf(viewer," Matrix input mode %d \n",lu->MatInputMode);
563: PetscViewerASCIIPrintf(viewer," Replace tiny pivots %s \n",PetscBools[options.ReplaceTinyPivot != NO]);
564: PetscViewerASCIIPrintf(viewer," Use iterative refinement %s \n",PetscBools[options.IterRefine == SLU_DOUBLE]);
565: PetscViewerASCIIPrintf(viewer," Processors in row %d col partition %d \n",lu->nprow,lu->npcol);
567: switch (options.RowPerm) {
568: case NOROWPERM:
569: PetscViewerASCIIPrintf(viewer," Row permutation NOROWPERM\n");
570: break;
571: case LargeDiag_MC64:
572: PetscViewerASCIIPrintf(viewer," Row permutation LargeDiag_MC64\n");
573: break;
574: case LargeDiag_AWPM:
575: PetscViewerASCIIPrintf(viewer," Row permutation LargeDiag_AWPM\n");
576: break;
577: case MY_PERMR:
578: PetscViewerASCIIPrintf(viewer," Row permutation MY_PERMR\n");
579: break;
580: default:
581: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
582: }
584: switch (options.ColPerm) {
585: case NATURAL:
586: PetscViewerASCIIPrintf(viewer," Column permutation NATURAL\n");
587: break;
588: case MMD_AT_PLUS_A:
589: PetscViewerASCIIPrintf(viewer," Column permutation MMD_AT_PLUS_A\n");
590: break;
591: case MMD_ATA:
592: PetscViewerASCIIPrintf(viewer," Column permutation MMD_ATA\n");
593: break;
594: case METIS_AT_PLUS_A:
595: PetscViewerASCIIPrintf(viewer," Column permutation METIS_AT_PLUS_A\n");
596: break;
597: case PARMETIS:
598: PetscViewerASCIIPrintf(viewer," Column permutation PARMETIS\n");
599: break;
600: default:
601: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
602: }
604: PetscViewerASCIIPrintf(viewer," Parallel symbolic factorization %s \n",PetscBools[options.ParSymbFact != NO]);
606: if (lu->FactPattern == SamePattern) {
607: PetscViewerASCIIPrintf(viewer," Repeated factorization SamePattern\n");
608: } else if (lu->FactPattern == SamePattern_SameRowPerm) {
609: PetscViewerASCIIPrintf(viewer," Repeated factorization SamePattern_SameRowPerm\n");
610: } else if (lu->FactPattern == DOFACT) {
611: PetscViewerASCIIPrintf(viewer," Repeated factorization DOFACT\n");
612: } else {
613: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown factorization pattern");
614: }
615: return(0);
616: }
618: static PetscErrorCode MatView_SuperLU_DIST(Mat A,PetscViewer viewer)
619: {
620: PetscErrorCode ierr;
621: PetscBool iascii;
622: PetscViewerFormat format;
625: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
626: if (iascii) {
627: PetscViewerGetFormat(viewer,&format);
628: if (format == PETSC_VIEWER_ASCII_INFO) {
629: MatView_Info_SuperLU_DIST(A,viewer);
630: }
631: }
632: return(0);
633: }
635: static PetscErrorCode MatGetFactor_aij_superlu_dist(Mat A,MatFactorType ftype,Mat *F)
636: {
637: Mat B;
638: Mat_SuperLU_DIST *lu;
639: PetscErrorCode ierr;
640: PetscInt M=A->rmap->N,N=A->cmap->N,indx;
641: PetscMPIInt size;
642: superlu_dist_options_t options;
643: PetscBool flg;
644: const char *colperm[] = {"NATURAL","MMD_AT_PLUS_A","MMD_ATA","METIS_AT_PLUS_A","PARMETIS"};
645: const char *rowperm[] = {"NOROWPERM","LargeDiag_MC64","LargeDiag_AWPM","MY_PERMR"};
646: const char *factPattern[] = {"SamePattern","SamePattern_SameRowPerm","DOFACT"};
647: PetscBool set;
650: /* Create the factorization matrix */
651: MatCreate(PetscObjectComm((PetscObject)A),&B);
652: MatSetSizes(B,A->rmap->n,A->cmap->n,M,N);
653: PetscStrallocpy("superlu_dist",&((PetscObject)B)->type_name);
654: MatSetUp(B);
655: B->ops->getinfo = MatGetInfo_External;
656: B->ops->view = MatView_SuperLU_DIST;
657: B->ops->destroy = MatDestroy_SuperLU_DIST;
659: if (ftype == MAT_FACTOR_LU) {
660: B->factortype = MAT_FACTOR_LU;
661: B->ops->lufactorsymbolic = MatLUFactorSymbolic_SuperLU_DIST;
662: } else {
663: B->factortype = MAT_FACTOR_CHOLESKY;
664: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SuperLU_DIST;
665: }
667: /* set solvertype */
668: PetscFree(B->solvertype);
669: PetscStrallocpy(MATSOLVERSUPERLU_DIST,&B->solvertype);
671: PetscNewLog(B,&lu);
672: B->data = lu;
674: /* Set the default input options:
675: options.Fact = DOFACT;
676: options.Equil = YES;
677: options.ParSymbFact = NO;
678: options.ColPerm = METIS_AT_PLUS_A;
679: options.RowPerm = LargeDiag_MC64;
680: options.ReplaceTinyPivot = YES;
681: options.IterRefine = DOUBLE;
682: options.Trans = NOTRANS;
683: options.SolveInitialized = NO; -hold the communication pattern used MatSolve() and MatMatSolve()
684: options.RefineInitialized = NO;
685: options.PrintStat = YES;
686: */
687: set_default_options_dist(&options);
689: MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(lu->comm_superlu));
690: MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
691: /* Default num of process columns and rows */
692: lu->nprow = (int_t) (0.5 + PetscSqrtReal((PetscReal)size));
693: if (!lu->nprow) lu->nprow = 1;
694: while (lu->nprow > 0) {
695: lu->npcol = (int_t) (size/lu->nprow);
696: if (size == lu->nprow * lu->npcol) break;
697: lu->nprow--;
698: }
700: PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"SuperLU_Dist Options","Mat");
701: PetscOptionsInt("-mat_superlu_dist_r","Number rows in processor partition","None",lu->nprow,(PetscInt*)&lu->nprow,NULL);
702: PetscOptionsInt("-mat_superlu_dist_c","Number columns in processor partition","None",lu->npcol,(PetscInt*)&lu->npcol,NULL);
703: if (size != lu->nprow * lu->npcol) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Number of processes %d must equal to nprow %d * npcol %d",size,lu->nprow,lu->npcol);
705: lu->MatInputMode = DISTRIBUTED;
707: PetscOptionsEnum("-mat_superlu_dist_matinput","Matrix input mode (global or distributed)","None",SuperLU_MatInputModes,(PetscEnum)lu->MatInputMode,(PetscEnum*)&lu->MatInputMode,NULL);
708: if (lu->MatInputMode == DISTRIBUTED && size == 1) lu->MatInputMode = GLOBAL;
710: PetscOptionsBool("-mat_superlu_dist_equil","Equilibrate matrix","None",options.Equil ? PETSC_TRUE : PETSC_FALSE,&flg,&set);
711: if (set && !flg) options.Equil = NO;
713: PetscOptionsEList("-mat_superlu_dist_rowperm","Row permutation","None",rowperm,4,rowperm[1],&indx,&flg);
714: if (flg) {
715: switch (indx) {
716: case 0:
717: options.RowPerm = NOROWPERM;
718: break;
719: case 1:
720: options.RowPerm = LargeDiag_MC64;
721: break;
722: case 2:
723: options.RowPerm = LargeDiag_AWPM;
724: break;
725: case 3:
726: options.RowPerm = MY_PERMR;
727: break;
728: default:
729: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown row permutation");
730: }
731: }
733: PetscOptionsEList("-mat_superlu_dist_colperm","Column permutation","None",colperm,5,colperm[3],&indx,&flg);
734: if (flg) {
735: switch (indx) {
736: case 0:
737: options.ColPerm = NATURAL;
738: break;
739: case 1:
740: options.ColPerm = MMD_AT_PLUS_A;
741: break;
742: case 2:
743: options.ColPerm = MMD_ATA;
744: break;
745: case 3:
746: options.ColPerm = METIS_AT_PLUS_A;
747: break;
748: case 4:
749: options.ColPerm = PARMETIS; /* only works for np>1 */
750: break;
751: default:
752: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
753: }
754: }
756: options.ReplaceTinyPivot = NO;
757: PetscOptionsBool("-mat_superlu_dist_replacetinypivot","Replace tiny pivots","None",options.ReplaceTinyPivot ? PETSC_TRUE : PETSC_FALSE,&flg,&set);
758: if (set && flg) options.ReplaceTinyPivot = YES;
760: options.ParSymbFact = NO;
761: PetscOptionsBool("-mat_superlu_dist_parsymbfact","Parallel symbolic factorization","None",PETSC_FALSE,&flg,&set);
762: if (set && flg && size>1) {
763: if (lu->MatInputMode == GLOBAL) {
764: #if defined(PETSC_USE_INFO)
765: PetscInfo(A,"Warning: '-mat_superlu_dist_parsymbfact' is ignored because MatInputMode=GLOBAL\n");
766: #endif
767: } else {
768: #if defined(PETSC_HAVE_PARMETIS)
769: options.ParSymbFact = YES;
770: options.ColPerm = PARMETIS; /* in v2.2, PARMETIS is forced for ParSymbFact regardless of user ordering setting */
771: #else
772: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"parsymbfact needs PARMETIS");
773: #endif
774: }
775: }
777: lu->FactPattern = SamePattern;
778: PetscOptionsEList("-mat_superlu_dist_fact","Sparsity pattern for repeated matrix factorization","None",factPattern,3,factPattern[0],&indx,&flg);
779: if (flg) {
780: switch (indx) {
781: case 0:
782: lu->FactPattern = SamePattern;
783: break;
784: case 1:
785: lu->FactPattern = SamePattern_SameRowPerm;
786: break;
787: case 2:
788: lu->FactPattern = DOFACT;
789: break;
790: }
791: }
793: options.IterRefine = NOREFINE;
794: PetscOptionsBool("-mat_superlu_dist_iterrefine","Use iterative refinement","None",options.IterRefine == NOREFINE ? PETSC_FALSE : PETSC_TRUE ,&flg,&set);
795: if (set) {
796: if (flg) options.IterRefine = SLU_DOUBLE;
797: else options.IterRefine = NOREFINE;
798: }
800: if (PetscLogPrintInfo) options.PrintStat = YES;
801: else options.PrintStat = NO;
802: PetscOptionsBool("-mat_superlu_dist_statprint","Print factorization information","None",(PetscBool)options.PrintStat,(PetscBool*)&options.PrintStat,NULL);
803: PetscOptionsEnd();
805: lu->options = options;
806: lu->options.Fact = DOFACT;
807: lu->matsolve_iscalled = PETSC_FALSE;
808: lu->matmatsolve_iscalled = PETSC_FALSE;
810: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_aij_superlu_dist);
811: PetscObjectComposeFunction((PetscObject)B,"MatSuperluDistGetDiagU_C",MatSuperluDistGetDiagU_SuperLU_DIST);
813: *F = B;
814: return(0);
815: }
817: PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_SuperLU_DIST(void)
818: {
821: MatSolverTypeRegister(MATSOLVERSUPERLU_DIST,MATMPIAIJ, MAT_FACTOR_LU,MatGetFactor_aij_superlu_dist);
822: MatSolverTypeRegister(MATSOLVERSUPERLU_DIST,MATSEQAIJ, MAT_FACTOR_LU,MatGetFactor_aij_superlu_dist);
823: MatSolverTypeRegister(MATSOLVERSUPERLU_DIST,MATMPIAIJ, MAT_FACTOR_CHOLESKY,MatGetFactor_aij_superlu_dist);
824: MatSolverTypeRegister(MATSOLVERSUPERLU_DIST,MATSEQAIJ, MAT_FACTOR_CHOLESKY,MatGetFactor_aij_superlu_dist);
825: return(0);
826: }
828: /*MC
829: MATSOLVERSUPERLU_DIST - Parallel direct solver package for LU factorization
831: Use ./configure --download-superlu_dist --download-parmetis --download-metis --download-ptscotch to have PETSc installed with SuperLU_DIST
833: Use -pc_type lu -pc_factor_mat_solver_type superlu_dist to use this direct solver
835: Works with AIJ matrices
837: Options Database Keys:
838: + -mat_superlu_dist_r <n> - number of rows in processor partition
839: . -mat_superlu_dist_c <n> - number of columns in processor partition
840: . -mat_superlu_dist_matinput <0,1> - matrix input mode; 0=global, 1=distributed
841: . -mat_superlu_dist_equil - equilibrate the matrix
842: . -mat_superlu_dist_rowperm <NOROWPERM,LargeDiag_MC64,LargeDiag_AWPM,MY_PERMR> - row permutation
843: . -mat_superlu_dist_colperm <MMD_AT_PLUS_A,MMD_ATA,NATURAL> - column permutation
844: . -mat_superlu_dist_replacetinypivot - replace tiny pivots
845: . -mat_superlu_dist_fact <SamePattern> - (choose one of) SamePattern SamePattern_SameRowPerm DOFACT
846: . -mat_superlu_dist_iterrefine - use iterative refinement
847: - -mat_superlu_dist_statprint - print factorization information
849: Level: beginner
851: .seealso: PCLU
853: .seealso: PCFactorSetMatSolverType(), MatSolverType
855: M*/