Actual source code: superlu_dist.c

petsc-3.9.4 2018-09-11
Report Typos and Errors

  2: /*
  3:         Provides an interface to the SuperLU_DIST sparse solver
  4: */

  6: #include <../src/mat/impls/aij/seq/aij.h>
  7: #include <../src/mat/impls/aij/mpi/mpiaij.h>
  8: #if defined(PETSC_HAVE_STDLIB_H) /* This is to get around weird problem with SuperLU on cray */
  9: #include <stdlib.h>
 10: #endif

 12: EXTERN_C_BEGIN
 13: #if defined(PETSC_USE_COMPLEX)
 14: #include <superlu_zdefs.h>
 15: #else
 16: #include <superlu_ddefs.h>
 17: #endif
 18: EXTERN_C_END

 20: /*
 21:     GLOBAL - The sparse matrix and right hand side are all stored initially on process 0. Should be called centralized
 22:     DISTRIBUTED - The sparse matrix and right hand size are initially stored across the entire MPI communicator.
 23: */
 24: typedef enum {GLOBAL,DISTRIBUTED} SuperLU_MatInputMode;
 25: const char *SuperLU_MatInputModes[] = {"GLOBAL","DISTRIBUTED","SuperLU_MatInputMode","PETSC_",0};

 27: typedef struct {
 28:   int_t                  nprow,npcol,*row,*col;
 29:   gridinfo_t             grid;
 30:   superlu_dist_options_t options;
 31:   SuperMatrix            A_sup;
 32:   ScalePermstruct_t      ScalePermstruct;
 33:   LUstruct_t             LUstruct;
 34:   int                    StatPrint;
 35:   SuperLU_MatInputMode   MatInputMode;
 36:   SOLVEstruct_t          SOLVEstruct;
 37:   fact_t                 FactPattern;
 38:   MPI_Comm               comm_superlu;
 39: #if defined(PETSC_USE_COMPLEX)
 40:   doublecomplex          *val;
 41: #else
 42:   double                 *val;
 43: #endif
 44:   PetscBool              matsolve_iscalled,matmatsolve_iscalled;
 45:   PetscBool              CleanUpSuperLU_Dist;  /* Flag to clean up (non-global) SuperLU objects during Destroy */
 46: } Mat_SuperLU_DIST;


 49: PetscErrorCode MatSuperluDistGetDiagU_SuperLU_DIST(Mat F,PetscScalar *diagU)
 50: {
 51:   Mat_SuperLU_DIST  *lu= (Mat_SuperLU_DIST*)F->data;

 54: #if defined(PETSC_USE_COMPLEX)
 55:   PetscStackCall("SuperLU_DIST:pzGetDiagU",pzGetDiagU(F->rmap->N,&lu->LUstruct,&lu->grid,(doublecomplex*)diagU));
 56: #else
 57:   PetscStackCall("SuperLU_DIST:pdGetDiagU",pdGetDiagU(F->rmap->N,&lu->LUstruct,&lu->grid,diagU));
 58: #endif
 59:   return(0);
 60: }

 62: PetscErrorCode MatSuperluDistGetDiagU(Mat F,PetscScalar *diagU)
 63: {
 64:   PetscErrorCode    ierr;

 68:   PetscTryMethod(F,"MatSuperluDistGetDiagU_C",(Mat,PetscScalar*),(F,diagU));
 69:   return(0);
 70: }

 72: static PetscErrorCode MatDestroy_SuperLU_DIST(Mat A)
 73: {
 74:   PetscErrorCode   ierr;
 75:   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data;

 78:   if (lu->CleanUpSuperLU_Dist) {
 79:     /* Deallocate SuperLU_DIST storage */
 80:     if (lu->MatInputMode == GLOBAL) {
 81:       PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup));
 82:     } else {
 83:       PetscStackCall("SuperLU_DIST:Destroy_CompRowLoc_Matrix_dist",Destroy_CompRowLoc_Matrix_dist(&lu->A_sup));
 84:       if (lu->options.SolveInitialized) {
 85: #if defined(PETSC_USE_COMPLEX)
 86:         PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
 87: #else
 88:         PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
 89: #endif
 90:       }
 91:     }
 92:     PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(A->cmap->N, &lu->grid, &lu->LUstruct));
 93:     PetscStackCall("SuperLU_DIST:ScalePermstructFree",ScalePermstructFree(&lu->ScalePermstruct));
 94:     PetscStackCall("SuperLU_DIST:LUstructFree",LUstructFree(&lu->LUstruct));

 96:     /* Release the SuperLU_DIST process grid. */
 97:     PetscStackCall("SuperLU_DIST:superlu_gridexit",superlu_gridexit(&lu->grid));
 98:     MPI_Comm_free(&(lu->comm_superlu));
 99:   }
100:   PetscFree(A->data);
101:   /* clear composed functions */
102:   PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverType_C",NULL);
103:   PetscObjectComposeFunction((PetscObject)A,"MatSuperluDistGetDiagU_C",NULL);

105:   return(0);
106: }

108: static PetscErrorCode MatSolve_SuperLU_DIST(Mat A,Vec b_mpi,Vec x)
109: {
110:   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data;
111:   PetscErrorCode   ierr;
112:   PetscMPIInt      size;
113:   PetscInt         m=A->rmap->n,M=A->rmap->N,N=A->cmap->N;
114:   SuperLUStat_t    stat;
115:   double           berr[1];
116:   PetscScalar      *bptr=NULL;
117:   PetscInt         nrhs=1;
118:   Vec              x_seq;
119:   IS               iden;
120:   VecScatter       scat;
121:   int              info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
122:   static PetscBool cite = PETSC_FALSE;

125:   if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED");
126:   PetscCitationsRegister("@article{lidemmel03,\n  author = {Xiaoye S. Li and James W. Demmel},\n  title = {{SuperLU_DIST}: A Scalable Distributed-Memory Sparse Direct\n           Solver for Unsymmetric Linear Systems},\n  journal = {ACM Trans. Mathematical Software},\n  volume = {29},\n  number = {2},\n  pages = {110-140},\n  year = 2003\n}\n",&cite);

128:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
129:   if (size > 1 && lu->MatInputMode == GLOBAL) {
130:     /* global mat input, convert b to x_seq */
131:     VecCreateSeq(PETSC_COMM_SELF,N,&x_seq);
132:     ISCreateStride(PETSC_COMM_SELF,N,0,1,&iden);
133:     VecScatterCreate(b_mpi,iden,x_seq,iden,&scat);
134:     ISDestroy(&iden);

136:     VecScatterBegin(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);
137:     VecScatterEnd(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);
138:     VecGetArray(x_seq,&bptr);
139:   } else { /* size==1 || distributed mat input */
140:     if (lu->options.SolveInitialized && !lu->matsolve_iscalled) {
141:       /* see comments in MatMatSolve() */
142: #if defined(PETSC_USE_COMPLEX)
143:       PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
144: #else
145:       PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
146: #endif
147:       lu->options.SolveInitialized = NO;
148:     }
149:     VecCopy(b_mpi,x);
150:     VecGetArray(x,&bptr);
151:   }

153:   PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat));        /* Initialize the statistics variables. */
154:   if (lu->MatInputMode == GLOBAL) {
155: #if defined(PETSC_USE_COMPLEX)
156:     PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info));
157: #else
158:     PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info));
159: #endif
160:   } else { /* distributed mat input */
161: #if defined(PETSC_USE_COMPLEX)
162:     PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
163: #else
164:     PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
165: #endif
166:   }
167:   if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);

169:   if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid);      /* Print the statistics. */
170:   PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));

172:   if (size > 1 && lu->MatInputMode == GLOBAL) {
173:     /* convert seq x to mpi x */
174:     VecRestoreArray(x_seq,&bptr);
175:     VecScatterBegin(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);
176:     VecScatterEnd(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);
177:     VecScatterDestroy(&scat);
178:     VecDestroy(&x_seq);
179:   } else {
180:     VecRestoreArray(x,&bptr);

182:     lu->matsolve_iscalled    = PETSC_TRUE;
183:     lu->matmatsolve_iscalled = PETSC_FALSE;
184:   }
185:   return(0);
186: }

188: static PetscErrorCode MatMatSolve_SuperLU_DIST(Mat A,Mat B_mpi,Mat X)
189: {
190:   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data;
191:   PetscErrorCode   ierr;
192:   PetscMPIInt      size;
193:   PetscInt         M=A->rmap->N,m=A->rmap->n,nrhs;
194:   SuperLUStat_t    stat;
195:   double           berr[1];
196:   PetscScalar      *bptr;
197:   int              info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
198:   PetscBool        flg;

201:   if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED");
202:   PetscObjectTypeCompareAny((PetscObject)B_mpi,&flg,MATSEQDENSE,MATMPIDENSE,NULL);
203:   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix");
204:   PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);
205:   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix");

207:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
208:   if (size > 1 && lu->MatInputMode == GLOBAL) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatInputMode=GLOBAL for nproc %d>1 is not supported",size);
209:   /* size==1 or distributed mat input */
210:   if (lu->options.SolveInitialized && !lu->matmatsolve_iscalled) {
211:     /* communication pattern of SOLVEstruct is unlikely created for matmatsolve,
212:        thus destroy it and create a new SOLVEstruct.
213:        Otherwise it may result in memory corruption or incorrect solution
214:        See src/mat/examples/tests/ex125.c */
215: #if defined(PETSC_USE_COMPLEX)
216:     PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
217: #else
218:     PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
219: #endif
220:     lu->options.SolveInitialized = NO;
221:   }
222:   MatCopy(B_mpi,X,SAME_NONZERO_PATTERN);

224:   MatGetSize(B_mpi,NULL,&nrhs);

226:   PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat));        /* Initialize the statistics variables. */
227:   MatDenseGetArray(X,&bptr);
228:   if (lu->MatInputMode == GLOBAL) { /* size == 1 */
229: #if defined(PETSC_USE_COMPLEX)
230:     PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,(doublecomplex*)bptr, M, nrhs,&lu->grid, &lu->LUstruct, berr, &stat, &info));
231: #else
232:     PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr, M, nrhs, &lu->grid, &lu->LUstruct, berr, &stat, &info));
233: #endif
234:   } else { /* distributed mat input */
235: #if defined(PETSC_USE_COMPLEX)
236:     PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid, &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
237: #else
238:     PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
239: #endif
240:   }
241:   if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);
242:   MatDenseRestoreArray(X,&bptr);

244:   if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */
245:   PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
246:   lu->matsolve_iscalled    = PETSC_FALSE;
247:   lu->matmatsolve_iscalled = PETSC_TRUE;
248:   return(0);
249: }

251: /*
252:   input:
253:    F:        numeric Cholesky factor
254:   output:
255:    nneg:     total number of negative pivots
256:    nzero:    total number of zero pivots
257:    npos:     (global dimension of F) - nneg - nzero
258: */
259: static PetscErrorCode MatGetInertia_SuperLU_DIST(Mat F,PetscInt *nneg,PetscInt *nzero,PetscInt *npos)
260: {
261:   PetscErrorCode   ierr;
262:   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->data;
263:   PetscScalar      *diagU=NULL;
264:   PetscInt         M,i,neg=0,zero=0,pos=0;
265:   PetscReal        r;

268:   if (!F->assembled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Matrix factor F is not assembled");
269:   if (lu->options.RowPerm != NOROWPERM) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Must set NOROWPERM");
270:   MatGetSize(F,&M,NULL);
271:   PetscMalloc1(M,&diagU);
272:   MatSuperluDistGetDiagU(F,diagU);
273:   for (i=0; i<M; i++) {
274: #if defined(PETSC_USE_COMPLEX)
275:     r = PetscImaginaryPart(diagU[i])/10.0;
276:     if (r< -PETSC_MACHINE_EPSILON || r>PETSC_MACHINE_EPSILON) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"diagU[%d]=%g + i %g is non-real",i,PetscRealPart(diagU[i]),r*10.0);
277:     r = PetscRealPart(diagU[i]);
278: #else
279:     r = diagU[i];
280: #endif
281:     if (r > 0) {
282:       pos++;
283:     } else if (r < 0) {
284:       neg++;
285:     } else zero++;
286:   }

288:   PetscFree(diagU);
289:   if (nneg)  *nneg  = neg;
290:   if (nzero) *nzero = zero;
291:   if (npos)  *npos  = pos;
292:   return(0);
293: }

295: static PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F,Mat A,const MatFactorInfo *info)
296: {
297:   Mat              *tseq,A_seq = NULL;
298:   Mat_SeqAIJ       *aa,*bb;
299:   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->data;
300:   PetscErrorCode   ierr;
301:   PetscInt         M=A->rmap->N,N=A->cmap->N,i,*ai,*aj,*bi,*bj,nz,rstart,*garray,
302:                    m=A->rmap->n, colA_start,j,jcol,jB,countA,countB,*bjj,*ajj=NULL;
303:   int              sinfo;   /* SuperLU_Dist info flag is always an int even with long long indices */
304:   PetscMPIInt      size;
305:   SuperLUStat_t    stat;
306:   double           *berr=0;
307:   IS               isrow;
308: #if defined(PETSC_USE_COMPLEX)
309:   doublecomplex    *av, *bv;
310: #else
311:   double           *av, *bv;
312: #endif

315:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);

317:   if (lu->MatInputMode == GLOBAL) { /* global mat input */
318:     if (size > 1) { /* convert mpi A to seq mat A */
319:       ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);
320:       MatCreateSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);
321:       ISDestroy(&isrow);

323:       A_seq = *tseq;
324:       PetscFree(tseq);
325:       aa    = (Mat_SeqAIJ*)A_seq->data;
326:     } else {
327:       PetscBool flg;
328:       PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);
329:       if (flg) {
330:         Mat_MPIAIJ *At = (Mat_MPIAIJ*)A->data;
331:         A = At->A;
332:       }
333:       aa =  (Mat_SeqAIJ*)A->data;
334:     }

336:     /* Convert Petsc NR matrix to SuperLU_DIST NC.
337:        Note: memories of lu->val, col and row are allocated by CompRow_to_CompCol_dist()! */
338:     if (lu->options.Fact != DOFACT) {/* successive numeric factorization, sparsity pattern is reused. */
339:       PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup));
340:       if (lu->FactPattern == SamePattern_SameRowPerm) {
341:         lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
342:       } else { /* lu->FactPattern == SamePattern */
343:         PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct));
344:         lu->options.Fact = SamePattern;
345:       }
346:     }
347: #if defined(PETSC_USE_COMPLEX)
348:     PetscStackCall("SuperLU_DIST:zCompRow_to_CompCol_dist",zCompRow_to_CompCol_dist(M,N,aa->nz,(doublecomplex*)aa->a,(int_t*)aa->j,(int_t*)aa->i,&lu->val,&lu->col, &lu->row));
349: #else
350:     PetscStackCall("SuperLU_DIST:dCompRow_to_CompCol_dist",dCompRow_to_CompCol_dist(M,N,aa->nz,aa->a,(int_t*)aa->j,(int_t*)aa->i,&lu->val, &lu->col, &lu->row));
351: #endif

353:     /* Create compressed column matrix A_sup. */
354: #if defined(PETSC_USE_COMPLEX)
355:     PetscStackCall("SuperLU_DIST:zCreate_CompCol_Matrix_dist",zCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_Z, SLU_GE));
356: #else
357:     PetscStackCall("SuperLU_DIST:dCreate_CompCol_Matrix_dist",dCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_D, SLU_GE));
358: #endif
359:   } else { /* distributed mat input */
360:     Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data;
361:     aa=(Mat_SeqAIJ*)(mat->A)->data;
362:     bb=(Mat_SeqAIJ*)(mat->B)->data;
363:     ai=aa->i; aj=aa->j;
364:     bi=bb->i; bj=bb->j;
365: #if defined(PETSC_USE_COMPLEX)
366:     av=(doublecomplex*)aa->a;
367:     bv=(doublecomplex*)bb->a;
368: #else
369:     av=aa->a;
370:     bv=bb->a;
371: #endif
372:     rstart = A->rmap->rstart;
373:     nz     = aa->nz + bb->nz;
374:     garray = mat->garray;

376:     if (lu->options.Fact == DOFACT) { /* first numeric factorization */
377: #if defined(PETSC_USE_COMPLEX)
378:       PetscStackCall("SuperLU_DIST:zallocateA_dist",zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
379: #else
380:       PetscStackCall("SuperLU_DIST:dallocateA_dist",dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
381: #endif
382:     } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */
383:       if (lu->FactPattern == SamePattern_SameRowPerm) {
384:         lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
385:       } else if (lu->FactPattern == SamePattern) {
386:         PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct)); /* Deallocate L and U matrices. */
387:         lu->options.Fact = SamePattern;
388:       } else if (lu->FactPattern == DOFACT) {
389:         PetscStackCall("SuperLU_DIST:Destroy_CompRowLoc_Matrix_dist",Destroy_CompRowLoc_Matrix_dist(&lu->A_sup));
390:         PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct));
391:         lu->options.Fact = DOFACT;

393: #if defined(PETSC_USE_COMPLEX)
394:       PetscStackCall("SuperLU_DIST:zallocateA_dist",zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
395: #else
396:       PetscStackCall("SuperLU_DIST:dallocateA_dist",dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
397: #endif
398:       } else {
399:         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"options.Fact must be one of SamePattern SamePattern_SameRowPerm DOFACT");
400:       }
401:     }
402:     nz = 0;
403:     for (i=0; i<m; i++) {
404:       lu->row[i] = nz;
405:       countA     = ai[i+1] - ai[i];
406:       countB     = bi[i+1] - bi[i];
407:       if (aj) {
408:         ajj = aj + ai[i]; /* ptr to the beginning of this row */
409:       } else {
410:         ajj = NULL;
411:       }
412:       bjj = bj + bi[i];

414:       /* B part, smaller col index */
415:       if (aj) {
416:         colA_start = rstart + ajj[0]; /* the smallest global col index of A */
417:       } else { /* superlu_dist does not require matrix has diagonal entries, thus aj=NULL would work */
418:         colA_start = rstart;
419:       }
420:       jB         = 0;
421:       for (j=0; j<countB; j++) {
422:         jcol = garray[bjj[j]];
423:         if (jcol > colA_start) {
424:           jB = j;
425:           break;
426:         }
427:         lu->col[nz]   = jcol;
428:         lu->val[nz++] = *bv++;
429:         if (j==countB-1) jB = countB;
430:       }

432:       /* A part */
433:       for (j=0; j<countA; j++) {
434:         lu->col[nz]   = rstart + ajj[j];
435:         lu->val[nz++] = *av++;
436:       }

438:       /* B part, larger col index */
439:       for (j=jB; j<countB; j++) {
440:         lu->col[nz]   = garray[bjj[j]];
441:         lu->val[nz++] = *bv++;
442:       }
443:     }
444:     lu->row[m] = nz;

446:     if (lu->options.Fact == DOFACT) {
447: #if defined(PETSC_USE_COMPLEX)
448:       PetscStackCall("SuperLU_DIST:zCreate_CompRowLoc_Matrix_dist",zCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_Z, SLU_GE));
449: #else
450:       PetscStackCall("SuperLU_DIST:dCreate_CompRowLoc_Matrix_dist",dCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_D, SLU_GE));
451: #endif
452:     }
453:   }

455:   /* Factor the matrix. */
456:   PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat));   /* Initialize the statistics variables. */
457:   if (lu->MatInputMode == GLOBAL) { /* global mat input */
458: #if defined(PETSC_USE_COMPLEX)
459:     PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo));
460: #else
461:     PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo));
462: #endif
463:   } else { /* distributed mat input */
464: #if defined(PETSC_USE_COMPLEX)
465:     PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo));
466: #else
467:     PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo));
468: #endif
469:   }
470: 
471:   if (sinfo > 0) {
472:     if (A->erroriffailure) {
473:       SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_MAT_LU_ZRPVT,"Zero pivot in row %D",sinfo);
474:     } else {
475:       if (sinfo <= lu->A_sup.ncol) {
476:         F->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT;
477:         PetscInfo1(F,"U(i,i) is exactly zero, i= %D\n",sinfo);
478:       } else if (sinfo > lu->A_sup.ncol) {
479:         /* 
480:          number of bytes allocated when memory allocation
481:          failure occurred, plus A->ncol.
482:          */
483:         F->factorerrortype = MAT_FACTOR_OUTMEMORY;
484:         PetscInfo1(F,"Number of bytes allocated when memory allocation fails %D\n",sinfo);
485:       }
486:     }
487:   } else if (sinfo < 0) {
488:     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB, "info = %D, argument in p*gssvx() had an illegal value", sinfo);
489:   }

491:   if (lu->MatInputMode == GLOBAL && size > 1) {
492:     MatDestroy(&A_seq);
493:   }

495:   if (lu->options.PrintStat) {
496:     PStatPrint(&lu->options, &stat, &lu->grid);  /* Print the statistics. */
497:   }
498:   PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
499:   F->assembled    = PETSC_TRUE;
500:   F->preallocated = PETSC_TRUE;
501:   lu->options.Fact  = FACTORED; /* The factored form of A is supplied. Local option used by this func. only */
502:   return(0);
503: }

505: /* Note the Petsc r and c permutations are ignored */
506: static PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
507: {
508:   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->data;
509:   PetscInt         M   = A->rmap->N,N=A->cmap->N;

512:   /* Initialize the SuperLU process grid. */
513:   PetscStackCall("SuperLU_DIST:superlu_gridinit",superlu_gridinit(lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid));

515:   /* Initialize ScalePermstruct and LUstruct. */
516:   PetscStackCall("SuperLU_DIST:ScalePermstructInit",ScalePermstructInit(M, N, &lu->ScalePermstruct));
517:   PetscStackCall("SuperLU_DIST:LUstructInit",LUstructInit(N, &lu->LUstruct));
518:   F->ops->lufactornumeric = MatLUFactorNumeric_SuperLU_DIST;
519:   F->ops->solve           = MatSolve_SuperLU_DIST;
520:   F->ops->matsolve        = MatMatSolve_SuperLU_DIST;
521:   F->ops->getinertia      = NULL;

523:   if (A->symmetric || A->hermitian) {
524:     F->ops->getinertia = MatGetInertia_SuperLU_DIST;
525:   }
526:   lu->CleanUpSuperLU_Dist = PETSC_TRUE;
527:   return(0);
528: }

530: static PetscErrorCode MatCholeskyFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,const MatFactorInfo *info)
531: {

535:   if (!A->symmetric) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Input matrix must be symmetric\n");
536:   MatLUFactorSymbolic_SuperLU_DIST(F,A,r,r,info);
537:   F->ops->choleskyfactornumeric = MatLUFactorNumeric_SuperLU_DIST;
538:   return(0);
539: }

541: static PetscErrorCode MatFactorGetSolverType_aij_superlu_dist(Mat A,MatSolverType *type)
542: {
544:   *type = MATSOLVERSUPERLU_DIST;
545:   return(0);
546: }

548: static PetscErrorCode MatView_Info_SuperLU_DIST(Mat A,PetscViewer viewer)
549: {
550:   Mat_SuperLU_DIST       *lu=(Mat_SuperLU_DIST*)A->data;
551:   superlu_dist_options_t options;
552:   PetscErrorCode         ierr;

555:   /* check if matrix is superlu_dist type */
556:   if (A->ops->solve != MatSolve_SuperLU_DIST) return(0);

558:   options = lu->options;
559:   PetscViewerASCIIPrintf(viewer,"SuperLU_DIST run parameters:\n");
560:   PetscViewerASCIIPrintf(viewer,"  Process grid nprow %D x npcol %D \n",lu->nprow,lu->npcol);
561:   PetscViewerASCIIPrintf(viewer,"  Equilibrate matrix %s \n",PetscBools[options.Equil != NO]);
562:   PetscViewerASCIIPrintf(viewer,"  Matrix input mode %d \n",lu->MatInputMode);
563:   PetscViewerASCIIPrintf(viewer,"  Replace tiny pivots %s \n",PetscBools[options.ReplaceTinyPivot != NO]);
564:   PetscViewerASCIIPrintf(viewer,"  Use iterative refinement %s \n",PetscBools[options.IterRefine == SLU_DOUBLE]);
565:   PetscViewerASCIIPrintf(viewer,"  Processors in row %d col partition %d \n",lu->nprow,lu->npcol);
566:   PetscViewerASCIIPrintf(viewer,"  Row permutation %s \n",(options.RowPerm == NOROWPERM) ? "NATURAL" : "LargeDiag");

568:   switch (options.ColPerm) {
569:   case NATURAL:
570:     PetscViewerASCIIPrintf(viewer,"  Column permutation NATURAL\n");
571:     break;
572:   case MMD_AT_PLUS_A:
573:     PetscViewerASCIIPrintf(viewer,"  Column permutation MMD_AT_PLUS_A\n");
574:     break;
575:   case MMD_ATA:
576:     PetscViewerASCIIPrintf(viewer,"  Column permutation MMD_ATA\n");
577:     break;
578:   case METIS_AT_PLUS_A:
579:     PetscViewerASCIIPrintf(viewer,"  Column permutation METIS_AT_PLUS_A\n");
580:     break;
581:   case PARMETIS:
582:     PetscViewerASCIIPrintf(viewer,"  Column permutation PARMETIS\n");
583:     break;
584:   default:
585:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
586:   }

588:   PetscViewerASCIIPrintf(viewer,"  Parallel symbolic factorization %s \n",PetscBools[options.ParSymbFact != NO]);

590:   if (lu->FactPattern == SamePattern) {
591:     PetscViewerASCIIPrintf(viewer,"  Repeated factorization SamePattern\n");
592:   } else if (lu->FactPattern == SamePattern_SameRowPerm) {
593:     PetscViewerASCIIPrintf(viewer,"  Repeated factorization SamePattern_SameRowPerm\n");
594:   } else if (lu->FactPattern == DOFACT) {
595:     PetscViewerASCIIPrintf(viewer,"  Repeated factorization DOFACT\n");
596:   } else {
597:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown factorization pattern");
598:   }
599:   return(0);
600: }

602: static PetscErrorCode MatView_SuperLU_DIST(Mat A,PetscViewer viewer)
603: {
604:   PetscErrorCode    ierr;
605:   PetscBool         iascii;
606:   PetscViewerFormat format;

609:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
610:   if (iascii) {
611:     PetscViewerGetFormat(viewer,&format);
612:     if (format == PETSC_VIEWER_ASCII_INFO) {
613:       MatView_Info_SuperLU_DIST(A,viewer);
614:     }
615:   }
616:   return(0);
617: }

619: static PetscErrorCode MatGetFactor_aij_superlu_dist(Mat A,MatFactorType ftype,Mat *F)
620: {
621:   Mat                    B;
622:   Mat_SuperLU_DIST       *lu;
623:   PetscErrorCode         ierr;
624:   PetscInt               M=A->rmap->N,N=A->cmap->N,indx;
625:   PetscMPIInt            size;
626:   superlu_dist_options_t options;
627:   PetscBool              flg;
628:   const char             *colperm[]     = {"NATURAL","MMD_AT_PLUS_A","MMD_ATA","METIS_AT_PLUS_A","PARMETIS"};
629:   const char             *rowperm[]     = {"LargeDiag","NATURAL"};
630:   const char             *factPattern[] = {"SamePattern","SamePattern_SameRowPerm","DOFACT"};
631:   PetscBool              set;

634:   /* Create the factorization matrix */
635:   MatCreate(PetscObjectComm((PetscObject)A),&B);
636:   MatSetSizes(B,A->rmap->n,A->cmap->n,M,N);
637:   PetscStrallocpy("superlu_dist",&((PetscObject)B)->type_name);
638:   MatSetUp(B);
639:   B->ops->getinfo = MatGetInfo_External;
640:   B->ops->view    = MatView_SuperLU_DIST;
641:   B->ops->destroy = MatDestroy_SuperLU_DIST;

643:   if (ftype == MAT_FACTOR_LU) {
644:     B->factortype = MAT_FACTOR_LU;
645:     B->ops->lufactorsymbolic       = MatLUFactorSymbolic_SuperLU_DIST;
646:   } else {
647:     B->factortype = MAT_FACTOR_CHOLESKY;
648:     B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SuperLU_DIST;
649:   }

651:   /* set solvertype */
652:   PetscFree(B->solvertype);
653:   PetscStrallocpy(MATSOLVERSUPERLU_DIST,&B->solvertype);

655:   PetscNewLog(B,&lu);
656:   B->data = lu;

658:   /* Set the default input options:
659:      options.Fact              = DOFACT;
660:      options.Equil             = YES;
661:      options.ParSymbFact       = NO;
662:      options.ColPerm           = METIS_AT_PLUS_A;
663:      options.RowPerm           = LargeDiag;
664:      options.ReplaceTinyPivot  = YES;
665:      options.IterRefine        = DOUBLE;
666:      options.Trans             = NOTRANS;
667:      options.SolveInitialized  = NO; -hold the communication pattern used MatSolve() and MatMatSolve()
668:      options.RefineInitialized = NO;
669:      options.PrintStat         = YES;
670:   */
671:   set_default_options_dist(&options);

673:   MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(lu->comm_superlu));
674:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
675:   /* Default num of process columns and rows */
676:   lu->nprow = (int_t) (0.5 + PetscSqrtReal((PetscReal)size));
677:   if (!lu->nprow) lu->nprow = 1;
678:   while (lu->nprow > 0) {
679:     lu->npcol = (int_t) (size/lu->nprow);
680:     if (size == lu->nprow * lu->npcol) break;
681:     lu->nprow--;
682:   }

684:   PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"SuperLU_Dist Options","Mat");
685:   PetscOptionsInt("-mat_superlu_dist_r","Number rows in processor partition","None",lu->nprow,(PetscInt*)&lu->nprow,NULL);
686:   PetscOptionsInt("-mat_superlu_dist_c","Number columns in processor partition","None",lu->npcol,(PetscInt*)&lu->npcol,NULL);
687:   if (size != lu->nprow * lu->npcol) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Number of processes %d must equal to nprow %d * npcol %d",size,lu->nprow,lu->npcol);

689:   lu->MatInputMode = DISTRIBUTED;

691:   PetscOptionsEnum("-mat_superlu_dist_matinput","Matrix input mode (global or distributed)","None",SuperLU_MatInputModes,(PetscEnum)lu->MatInputMode,(PetscEnum*)&lu->MatInputMode,NULL);
692:   if (lu->MatInputMode == DISTRIBUTED && size == 1) lu->MatInputMode = GLOBAL;

694:   PetscOptionsBool("-mat_superlu_dist_equil","Equilibrate matrix","None",options.Equil ? PETSC_TRUE : PETSC_FALSE,&flg,&set);
695:   if (set && !flg) options.Equil = NO;

697:   PetscOptionsEList("-mat_superlu_dist_rowperm","Row permutation","None",rowperm,2,rowperm[0],&indx,&flg);
698:   if (flg) {
699:     switch (indx) {
700:     case 0:
701:       options.RowPerm = LargeDiag;
702:       break;
703:     case 1:
704:       options.RowPerm = NOROWPERM;
705:       break;
706:     }
707:   }

709:   PetscOptionsEList("-mat_superlu_dist_colperm","Column permutation","None",colperm,5,colperm[3],&indx,&flg);
710:   if (flg) {
711:     switch (indx) {
712:     case 0:
713:       options.ColPerm = NATURAL;
714:       break;
715:     case 1:
716:       options.ColPerm = MMD_AT_PLUS_A;
717:       break;
718:     case 2:
719:       options.ColPerm = MMD_ATA;
720:       break;
721:     case 3:
722:       options.ColPerm = METIS_AT_PLUS_A;
723:       break;
724:     case 4:
725:       options.ColPerm = PARMETIS;   /* only works for np>1 */
726:       break;
727:     default:
728:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
729:     }
730:   }

732:   options.ReplaceTinyPivot = NO;
733:   PetscOptionsBool("-mat_superlu_dist_replacetinypivot","Replace tiny pivots","None",options.ReplaceTinyPivot ? PETSC_TRUE : PETSC_FALSE,&flg,&set);
734:   if (set && flg) options.ReplaceTinyPivot = YES;

736:   options.ParSymbFact = NO;
737:   PetscOptionsBool("-mat_superlu_dist_parsymbfact","Parallel symbolic factorization","None",PETSC_FALSE,&flg,&set);
738:   if (set && flg && size>1) {
739:     if (lu->MatInputMode == GLOBAL) {
740: #if defined(PETSC_USE_INFO)
741:       PetscInfo(A,"Warning: '-mat_superlu_dist_parsymbfact' is ignored because MatInputMode=GLOBAL\n");
742: #endif
743:     } else {
744: #if defined(PETSC_HAVE_PARMETIS)
745:       options.ParSymbFact = YES;
746:       options.ColPerm     = PARMETIS;   /* in v2.2, PARMETIS is forced for ParSymbFact regardless of user ordering setting */
747: #else
748:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"parsymbfact needs PARMETIS");
749: #endif
750:     }
751:   }

753:   lu->FactPattern = SamePattern;
754:   PetscOptionsEList("-mat_superlu_dist_fact","Sparsity pattern for repeated matrix factorization","None",factPattern,3,factPattern[0],&indx,&flg);
755:   if (flg) {
756:     switch (indx) {
757:     case 0:
758:       lu->FactPattern = SamePattern;
759:       break;
760:     case 1:
761:       lu->FactPattern = SamePattern_SameRowPerm;
762:       break;
763:     case 2:
764:       lu->FactPattern = DOFACT;
765:       break;
766:     }
767:   }

769:   options.IterRefine = NOREFINE;
770:   PetscOptionsBool("-mat_superlu_dist_iterrefine","Use iterative refinement","None",options.IterRefine == NOREFINE ? PETSC_FALSE : PETSC_TRUE ,&flg,&set);
771:   if (set) {
772:     if (flg) options.IterRefine = SLU_DOUBLE;
773:     else options.IterRefine = NOREFINE;
774:   }

776:   if (PetscLogPrintInfo) options.PrintStat = YES;
777:   else options.PrintStat = NO;
778:   PetscOptionsBool("-mat_superlu_dist_statprint","Print factorization information","None",(PetscBool)options.PrintStat,(PetscBool*)&options.PrintStat,NULL);
779:   PetscOptionsEnd();

781:   lu->options              = options;
782:   lu->options.Fact         = DOFACT;
783:   lu->matsolve_iscalled    = PETSC_FALSE;
784:   lu->matmatsolve_iscalled = PETSC_FALSE;

786:   PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_aij_superlu_dist);
787:   PetscObjectComposeFunction((PetscObject)B,"MatSuperluDistGetDiagU_C",MatSuperluDistGetDiagU_SuperLU_DIST);

789:   *F = B;
790:   return(0);
791: }

793: PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_SuperLU_DIST(void)
794: {
797:   MatSolverTypeRegister(MATSOLVERSUPERLU_DIST,MATMPIAIJ,  MAT_FACTOR_LU,MatGetFactor_aij_superlu_dist);
798:   MatSolverTypeRegister(MATSOLVERSUPERLU_DIST,MATSEQAIJ,  MAT_FACTOR_LU,MatGetFactor_aij_superlu_dist);
799:   MatSolverTypeRegister(MATSOLVERSUPERLU_DIST,MATMPIAIJ,  MAT_FACTOR_CHOLESKY,MatGetFactor_aij_superlu_dist);
800:   MatSolverTypeRegister(MATSOLVERSUPERLU_DIST,MATSEQAIJ,  MAT_FACTOR_CHOLESKY,MatGetFactor_aij_superlu_dist);
801:   return(0);
802: }

804: /*MC
805:   MATSOLVERSUPERLU_DIST - Parallel direct solver package for LU factorization

807:   Use ./configure --download-superlu_dist --download-parmetis --download-metis --download-ptscotch  to have PETSc installed with SuperLU_DIST

809:   Use -pc_type lu -pc_factor_mat_solver_type superlu_dist to use this direct solver

811:    Works with AIJ matrices

813:   Options Database Keys:
814: + -mat_superlu_dist_r <n> - number of rows in processor partition
815: . -mat_superlu_dist_c <n> - number of columns in processor partition
816: . -mat_superlu_dist_matinput <0,1> - matrix input mode; 0=global, 1=distributed
817: . -mat_superlu_dist_equil - equilibrate the matrix
818: . -mat_superlu_dist_rowperm <LargeDiag,NATURAL> - row permutation
819: . -mat_superlu_dist_colperm <MMD_AT_PLUS_A,MMD_ATA,NATURAL> - column permutation
820: . -mat_superlu_dist_replacetinypivot - replace tiny pivots
821: . -mat_superlu_dist_fact <SamePattern> - (choose one of) SamePattern SamePattern_SameRowPerm DOFACT
822: . -mat_superlu_dist_iterrefine - use iterative refinement
823: - -mat_superlu_dist_statprint - print factorization information

825:    Level: beginner

827: .seealso: PCLU

829: .seealso: PCFactorSetMatSolverType(), MatSolverType

831: M*/