Actual source code: superlu_dist.c

petsc-3.7.3 2016-08-01
Report Typos and Errors
  2: /*
  3:         Provides an interface to the SuperLU_DIST_2.2 sparse solver
  4: */

  6: #include <../src/mat/impls/aij/seq/aij.h>
  7: #include <../src/mat/impls/aij/mpi/mpiaij.h>
  8: #if defined(PETSC_HAVE_STDLIB_H) /* This is to get around weird problem with SuperLU on cray */
  9: #include <stdlib.h>
 10: #endif

 12: #if defined(PETSC_USE_64BIT_INDICES)
 13: /* ugly SuperLU_Dist variable telling it to use long long int */
 14: #define _LONGINT
 15: #endif

 17: EXTERN_C_BEGIN
 18: #if defined(PETSC_USE_COMPLEX)
 19: #include <superlu_zdefs.h>
 20: #else
 21: #include <superlu_ddefs.h>
 22: #endif
 23: EXTERN_C_END

 25: /*
 26:     GLOBAL - The sparse matrix and right hand side are all stored initially on process 0. Should be called centralized
 27:     DISTRIBUTED - The sparse matrix and right hand size are initially stored across the entire MPI communicator.
 28: */
 29: typedef enum {GLOBAL,DISTRIBUTED} SuperLU_MatInputMode;
 30: const char *SuperLU_MatInputModes[] = {"GLOBAL","DISTRIBUTED","SuperLU_MatInputMode","PETSC_",0};

 32: typedef struct {
 33:   int_t                  nprow,npcol,*row,*col;
 34:   gridinfo_t             grid;
 35:   superlu_dist_options_t options;
 36:   SuperMatrix            A_sup;
 37:   ScalePermstruct_t      ScalePermstruct;
 38:   LUstruct_t             LUstruct;
 39:   int                    StatPrint;
 40:   SuperLU_MatInputMode   MatInputMode;
 41:   SOLVEstruct_t          SOLVEstruct;
 42:   fact_t                 FactPattern;
 43:   MPI_Comm               comm_superlu;
 44: #if defined(PETSC_USE_COMPLEX)
 45:   doublecomplex          *val;
 46: #else
 47:   double                 *val;
 48: #endif
 49:   PetscBool              matsolve_iscalled,matmatsolve_iscalled;
 50:   PetscBool              CleanUpSuperLU_Dist;  /* Flag to clean up (non-global) SuperLU objects during Destroy */
 51: } Mat_SuperLU_DIST;

 53: extern PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat,PetscViewer);
 54: extern PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat,Mat,const MatFactorInfo*);
 55: extern PetscErrorCode MatDestroy_SuperLU_DIST(Mat);
 56: extern PetscErrorCode MatView_SuperLU_DIST(Mat,PetscViewer);
 57: extern PetscErrorCode MatSolve_SuperLU_DIST(Mat,Vec,Vec);
 58: extern PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat,Mat,IS,IS,const MatFactorInfo*);
 59: extern PetscErrorCode MatDestroy_MPIAIJ(Mat);

 63: PetscErrorCode MatGetDiagonal_SuperLU_DIST(Mat A,Vec v)
 64: {
 66:   SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Mat type: SuperLU_DIST factor");
 67:   return(0);
 68: }

 72: PetscErrorCode MatDestroy_SuperLU_DIST(Mat A)
 73: {
 74:   PetscErrorCode   ierr;
 75:   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
 76:   PetscBool        flg;

 79:   if (lu && lu->CleanUpSuperLU_Dist) {
 80:     /* Deallocate SuperLU_DIST storage */
 81:     if (lu->MatInputMode == GLOBAL) {
 82:       PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup));
 83:     } else {
 84:       PetscStackCall("SuperLU_DIST:Destroy_CompRowLoc_Matrix_dist",Destroy_CompRowLoc_Matrix_dist(&lu->A_sup));
 85:       if (lu->options.SolveInitialized) {
 86: #if defined(PETSC_USE_COMPLEX)
 87:         PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
 88: #else
 89:         PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
 90: #endif
 91:       }
 92:     }
 93:     PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(A->cmap->N, &lu->grid, &lu->LUstruct));
 94:     PetscStackCall("SuperLU_DIST:ScalePermstructFree",ScalePermstructFree(&lu->ScalePermstruct));
 95:     PetscStackCall("SuperLU_DIST:LUstructFree",LUstructFree(&lu->LUstruct));

 97:     /* Release the SuperLU_DIST process grid. */
 98:     PetscStackCall("SuperLU_DIST:superlu_gridexit",superlu_gridexit(&lu->grid));
 99:     MPI_Comm_free(&(lu->comm_superlu));
100:   }
101:   PetscFree(A->spptr);

103:   PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);
104:   if (flg) {
105:     MatDestroy_SeqAIJ(A);
106:   } else {
107:     MatDestroy_MPIAIJ(A);
108:   }
109:   return(0);
110: }

114: PetscErrorCode MatSolve_SuperLU_DIST(Mat A,Vec b_mpi,Vec x)
115: {
116:   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
117:   PetscErrorCode   ierr;
118:   PetscMPIInt      size;
119:   PetscInt         m=A->rmap->n,M=A->rmap->N,N=A->cmap->N;
120:   SuperLUStat_t    stat;
121:   double           berr[1];
122:   PetscScalar      *bptr;
123:   PetscInt         nrhs=1;
124:   Vec              x_seq;
125:   IS               iden;
126:   VecScatter       scat;
127:   int              info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
128:   static PetscBool cite = PETSC_FALSE;

131:   PetscCitationsRegister("@article{lidemmel03,\n  author = {Xiaoye S. Li and James W. Demmel},\n  title = {{SuperLU_DIST}: A Scalable Distributed-Memory Sparse Direct\n           Solver for Unsymmetric Linear Systems},\n  journal = {ACM Trans. Mathematical Software},\n  volume = {29},\n  number = {2},\n  pages = {110-140},\n  year = 2003\n}\n",&cite);
132:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
133:   if (size > 1 && lu->MatInputMode == GLOBAL) {
134:     /* global mat input, convert b to x_seq */
135:     VecCreateSeq(PETSC_COMM_SELF,N,&x_seq);
136:     ISCreateStride(PETSC_COMM_SELF,N,0,1,&iden);
137:     VecScatterCreate(b_mpi,iden,x_seq,iden,&scat);
138:     ISDestroy(&iden);

140:     VecScatterBegin(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);
141:     VecScatterEnd(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);
142:     VecGetArray(x_seq,&bptr);
143:   } else { /* size==1 || distributed mat input */
144:     if (lu->options.SolveInitialized && !lu->matsolve_iscalled) {
145:       /* see comments in MatMatSolve() */
146: #if defined(PETSC_USE_COMPLEX)
147:       PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
148: #else
149:       PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
150: #endif
151:       lu->options.SolveInitialized = NO;
152:     }
153:     VecCopy(b_mpi,x);
154:     VecGetArray(x,&bptr);
155:   }

157:   if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED");

159:   PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat));        /* Initialize the statistics variables. */
160:   if (lu->MatInputMode == GLOBAL) {
161: #if defined(PETSC_USE_COMPLEX)
162:     PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info));
163: #else
164:     PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info));
165: #endif
166:   } else { /* distributed mat input */
167: #if defined(PETSC_USE_COMPLEX)
168:     PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
169: #else
170:     PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
171: #endif
172:   }
173:   if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);

175:   if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid);      /* Print the statistics. */
176:   PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));

178:   if (size > 1 && lu->MatInputMode == GLOBAL) {
179:     /* convert seq x to mpi x */
180:     VecRestoreArray(x_seq,&bptr);
181:     VecScatterBegin(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);
182:     VecScatterEnd(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);
183:     VecScatterDestroy(&scat);
184:     VecDestroy(&x_seq);
185:   } else {
186:     VecRestoreArray(x,&bptr);

188:     lu->matsolve_iscalled    = PETSC_TRUE;
189:     lu->matmatsolve_iscalled = PETSC_FALSE;
190:   }
191:   return(0);
192: }

196: PetscErrorCode MatMatSolve_SuperLU_DIST(Mat A,Mat B_mpi,Mat X)
197: {
198:   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
199:   PetscErrorCode   ierr;
200:   PetscMPIInt      size;
201:   PetscInt         M=A->rmap->N,m=A->rmap->n,nrhs;
202:   SuperLUStat_t    stat;
203:   double           berr[1];
204:   PetscScalar      *bptr;
205:   int              info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
206:   PetscBool        flg;

209:   if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED");
210:   PetscObjectTypeCompareAny((PetscObject)B_mpi,&flg,MATSEQDENSE,MATMPIDENSE,NULL);
211:   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix");
212:   PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);
213:   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix");

215:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
216:   if (size > 1 && lu->MatInputMode == GLOBAL) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatInputMode=GLOBAL for nproc %d>1 is not supported",size);
217:   /* size==1 or distributed mat input */
218:   if (lu->options.SolveInitialized && !lu->matmatsolve_iscalled) {
219:     /* communication pattern of SOLVEstruct is unlikely created for matmatsolve,
220:        thus destroy it and create a new SOLVEstruct.
221:        Otherwise it may result in memory corruption or incorrect solution
222:        See src/mat/examples/tests/ex125.c */
223: #if defined(PETSC_USE_COMPLEX)
224:     PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
225: #else
226:     PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
227: #endif
228:     lu->options.SolveInitialized = NO;
229:   }
230:   MatCopy(B_mpi,X,SAME_NONZERO_PATTERN);

232:   MatGetSize(B_mpi,NULL,&nrhs);

234:   PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat));        /* Initialize the statistics variables. */
235:   MatDenseGetArray(X,&bptr);
236:   if (lu->MatInputMode == GLOBAL) { /* size == 1 */
237: #if defined(PETSC_USE_COMPLEX)
238:     PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,(doublecomplex*)bptr, M, nrhs,&lu->grid, &lu->LUstruct, berr, &stat, &info));
239: #else
240:     PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr, M, nrhs, &lu->grid, &lu->LUstruct, berr, &stat, &info));
241: #endif
242:   } else { /* distributed mat input */
243: #if defined(PETSC_USE_COMPLEX)
244:     PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid, &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
245: #else
246:     PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
247: #endif
248:   }
249:   if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);
250:   MatDenseRestoreArray(X,&bptr);

252:   if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */
253:   PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
254:   lu->matsolve_iscalled    = PETSC_FALSE;
255:   lu->matmatsolve_iscalled = PETSC_TRUE;
256:   return(0);
257: }


262: PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F,Mat A,const MatFactorInfo *info)
263: {
264:   Mat              *tseq,A_seq = NULL;
265:   Mat_SeqAIJ       *aa,*bb;
266:   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)(F)->spptr;
267:   PetscErrorCode   ierr;
268:   PetscInt         M=A->rmap->N,N=A->cmap->N,i,*ai,*aj,*bi,*bj,nz,rstart,*garray,
269:                    m=A->rmap->n, colA_start,j,jcol,jB,countA,countB,*bjj,*ajj;
270:   int              sinfo;   /* SuperLU_Dist info flag is always an int even with long long indices */
271:   PetscMPIInt      size;
272:   SuperLUStat_t    stat;
273:   double           *berr=0;
274:   IS               isrow;
275:   Mat              F_diag=NULL;
276: #if defined(PETSC_USE_COMPLEX)
277:   doublecomplex    *av, *bv;
278: #else
279:   double           *av, *bv;
280: #endif

283:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);

285:   if (lu->MatInputMode == GLOBAL) { /* global mat input */
286:     if (size > 1) { /* convert mpi A to seq mat A */
287:       ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);
288:       MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);
289:       ISDestroy(&isrow);

291:       A_seq = *tseq;
292:       PetscFree(tseq);
293:       aa    = (Mat_SeqAIJ*)A_seq->data;
294:     } else {
295:       PetscBool flg;
296:       PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);
297:       if (flg) {
298:         Mat_MPIAIJ *At = (Mat_MPIAIJ*)A->data;
299:         A = At->A;
300:       }
301:       aa =  (Mat_SeqAIJ*)A->data;
302:     }

304:     /* Convert Petsc NR matrix to SuperLU_DIST NC.
305:        Note: memories of lu->val, col and row are allocated by CompRow_to_CompCol_dist()! */
306:     if (lu->options.Fact != DOFACT) {/* successive numeric factorization, sparsity pattern is reused. */
307:       PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup));
308:       if (lu->FactPattern == SamePattern_SameRowPerm) {
309:         lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
310:       } else { /* lu->FactPattern == SamePattern */
311:         PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct));
312:         lu->options.Fact = SamePattern;
313:       }
314:     }
315: #if defined(PETSC_USE_COMPLEX)
316:     PetscStackCall("SuperLU_DIST:zCompRow_to_CompCol_dist",zCompRow_to_CompCol_dist(M,N,aa->nz,(doublecomplex*)aa->a,(int_t*)aa->j,(int_t*)aa->i,&lu->val,&lu->col, &lu->row));
317: #else
318:     PetscStackCall("SuperLU_DIST:dCompRow_to_CompCol_dist",dCompRow_to_CompCol_dist(M,N,aa->nz,aa->a,(int_t*)aa->j,(int_t*)aa->i,&lu->val, &lu->col, &lu->row));
319: #endif

321:     /* Create compressed column matrix A_sup. */
322: #if defined(PETSC_USE_COMPLEX)
323:     PetscStackCall("SuperLU_DIST:zCreate_CompCol_Matrix_dist",zCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_Z, SLU_GE));
324: #else
325:     PetscStackCall("SuperLU_DIST:dCreate_CompCol_Matrix_dist",dCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_D, SLU_GE));
326: #endif
327:   } else { /* distributed mat input */
328:     Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data;
329:     aa=(Mat_SeqAIJ*)(mat->A)->data;
330:     bb=(Mat_SeqAIJ*)(mat->B)->data;
331:     ai=aa->i; aj=aa->j;
332:     bi=bb->i; bj=bb->j;
333: #if defined(PETSC_USE_COMPLEX)
334:     av=(doublecomplex*)aa->a;
335:     bv=(doublecomplex*)bb->a;
336: #else
337:     av=aa->a;
338:     bv=bb->a;
339: #endif
340:     rstart = A->rmap->rstart;
341:     nz     = aa->nz + bb->nz;
342:     garray = mat->garray;

344:     if (lu->options.Fact == DOFACT) { /* first numeric factorization */
345: #if defined(PETSC_USE_COMPLEX)
346:       PetscStackCall("SuperLU_DIST:zallocateA_dist",zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
347: #else
348:       PetscStackCall("SuperLU_DIST:dallocateA_dist",dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
349: #endif
350:     } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */
351:       SUPERLU_FREE((void*)lu->A_sup.Store);
352:       if (lu->FactPattern == SamePattern_SameRowPerm) {
353:         lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
354:       } else {
355:         PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct)); /* Deallocate storage associated with the L and U matrices. */
356:         lu->options.Fact = SamePattern;
357:       }
358:     }
359:     nz = 0;
360:     for (i=0; i<m; i++) {
361:       lu->row[i] = nz;
362:       countA     = ai[i+1] - ai[i];
363:       countB     = bi[i+1] - bi[i];
364:       if (aj) {
365:         ajj = aj + ai[i]; /* ptr to the beginning of this row */
366:       } else {
367:         ajj = NULL;
368:       }
369:       bjj = bj + bi[i];

371:       /* B part, smaller col index */
372:       if (aj) {
373:         colA_start = rstart + ajj[0]; /* the smallest global col index of A */
374:       } else { /* superlu_dist does not require matrix has diagonal entries, thus aj=NULL would work */
375:         colA_start = rstart;
376:       }
377:       jB         = 0;
378:       for (j=0; j<countB; j++) {
379:         jcol = garray[bjj[j]];
380:         if (jcol > colA_start) {
381:           jB = j;
382:           break;
383:         }
384:         lu->col[nz]   = jcol;
385:         lu->val[nz++] = *bv++;
386:         if (j==countB-1) jB = countB;
387:       }

389:       /* A part */
390:       for (j=0; j<countA; j++) {
391:         lu->col[nz]   = rstart + ajj[j];
392:         lu->val[nz++] = *av++;
393:       }
394: 
395:       /* B part, larger col index */
396:       for (j=jB; j<countB; j++) {
397:         lu->col[nz]   = garray[bjj[j]];
398:         lu->val[nz++] = *bv++;
399:       }
400:     }
401:     lu->row[m] = nz;
402: #if defined(PETSC_USE_COMPLEX)
403:     PetscStackCall("SuperLU_DIST:zCreate_CompRowLoc_Matrix_dist",zCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_Z, SLU_GE));
404: #else
405:     PetscStackCall("SuperLU_DIST:dCreate_CompRowLoc_Matrix_dist",dCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_D, SLU_GE));
406: #endif
407:   }

409:   /* Factor the matrix. */
410:   PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat));   /* Initialize the statistics variables. */
411:   if (lu->MatInputMode == GLOBAL) { /* global mat input */
412: #if defined(PETSC_USE_COMPLEX)
413:     PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo));
414: #else
415:     PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo));
416: #endif
417:   } else { /* distributed mat input */
418: #if defined(PETSC_USE_COMPLEX)
419:     PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo));
420: #else
421:     PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo));
422: #endif
423:   }
424: 
425:   if (sinfo > 0) {
426:     if (A->erroriffailure) {
427:       SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_MAT_LU_ZRPVT,"Zero pivot in row %D",sinfo);
428:     } else {
429:       if (sinfo <= lu->A_sup.ncol) {
430:         F->errortype = MAT_FACTOR_NUMERIC_ZEROPIVOT;
431:         PetscInfo1(F,"U(i,i) is exactly zero, i= %D\n",sinfo);
432:       } else if (sinfo > lu->A_sup.ncol) {
433:         /* 
434:          number of bytes allocated when memory allocation
435:          failure occurred, plus A->ncol.
436:          */
437:         F->errortype = MAT_FACTOR_OUTMEMORY;
438:         PetscInfo1(F,"Number of bytes allocated when memory allocation fails %D\n",sinfo);
439:       }
440:     }
441:   } else if (sinfo < 0) {
442:     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB, "info = %D, argument in p*gssvx() had an illegal value", sinfo);
443:   }

445:   if (lu->MatInputMode == GLOBAL && size > 1) {
446:     MatDestroy(&A_seq);
447:   }

449:   if (lu->options.PrintStat) {
450:     PStatPrint(&lu->options, &stat, &lu->grid);  /* Print the statistics. */
451:   }
452:   PStatFree(&stat);
453:   if (size > 1) {
454:     F_diag            = ((Mat_MPIAIJ*)(F)->data)->A;
455:     F_diag->assembled = PETSC_TRUE;
456:   }
457:   (F)->assembled    = PETSC_TRUE;
458:   (F)->preallocated = PETSC_TRUE;
459:   lu->options.Fact  = FACTORED; /* The factored form of A is supplied. Local option used by this func. only */
460:   return(0);
461: }

463: /* Note the Petsc r and c permutations are ignored */
466: PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
467: {
468:   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->spptr;
469:   PetscInt         M   = A->rmap->N,N=A->cmap->N;

472:   /* Initialize the SuperLU process grid. */
473:   PetscStackCall("SuperLU_DIST:superlu_gridinit",superlu_gridinit(lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid));

475:   /* Initialize ScalePermstruct and LUstruct. */
476:   PetscStackCall("SuperLU_DIST:ScalePermstructInit",ScalePermstructInit(M, N, &lu->ScalePermstruct));
477:   PetscStackCall("SuperLU_DIST:LUstructInit",LUstructInit(N, &lu->LUstruct));
478:   F->ops->lufactornumeric = MatLUFactorNumeric_SuperLU_DIST;
479:   F->ops->solve           = MatSolve_SuperLU_DIST;
480:   F->ops->matsolve        = MatMatSolve_SuperLU_DIST;
481:   lu->CleanUpSuperLU_Dist = PETSC_TRUE;
482:   return(0);
483: }

487: static PetscErrorCode MatFactorGetSolverPackage_aij_superlu_dist(Mat A,const MatSolverPackage *type)
488: {
490:   *type = MATSOLVERSUPERLU_DIST;
491:   return(0);
492: }

496: static PetscErrorCode MatGetFactor_aij_superlu_dist(Mat A,MatFactorType ftype,Mat *F)
497: {
498:   Mat                    B;
499:   Mat_SuperLU_DIST       *lu;
500:   PetscErrorCode         ierr;
501:   PetscInt               M=A->rmap->N,N=A->cmap->N,indx;
502:   PetscMPIInt            size;
503:   superlu_dist_options_t options;
504:   PetscBool              flg;
505:   const char             *colperm[]     = {"NATURAL","MMD_AT_PLUS_A","MMD_ATA","METIS_AT_PLUS_A","PARMETIS"};
506:   const char             *rowperm[]     = {"LargeDiag","NATURAL"};
507:   const char             *factPattern[] = {"SamePattern","SamePattern_SameRowPerm"};
508:   PetscBool              set;

511:   /* Create the factorization matrix */
512:   MatCreate(PetscObjectComm((PetscObject)A),&B);
513:   MatSetSizes(B,A->rmap->n,A->cmap->n,M,N);
514:   MatSetType(B,((PetscObject)A)->type_name);
515:   MatSeqAIJSetPreallocation(B,0,NULL);
516:   MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);

518:   B->ops->lufactorsymbolic = MatLUFactorSymbolic_SuperLU_DIST;
519:   B->ops->view             = MatView_SuperLU_DIST;
520:   B->ops->destroy          = MatDestroy_SuperLU_DIST;
521:   B->ops->getdiagonal      = MatGetDiagonal_SuperLU_DIST;

523:   PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_aij_superlu_dist);

525:   B->factortype = MAT_FACTOR_LU;

527:   /* set solvertype */
528:   PetscFree(B->solvertype);
529:   PetscStrallocpy(MATSOLVERSUPERLU_DIST,&B->solvertype);

531:   PetscNewLog(B,&lu);
532:   B->spptr = lu;

534:   /* Set the default input options:
535:      options.Fact              = DOFACT;
536:      options.Equil             = YES;
537:      options.ParSymbFact       = NO;
538:      options.ColPerm           = METIS_AT_PLUS_A;
539:      options.RowPerm           = LargeDiag;
540:      options.ReplaceTinyPivot  = YES;
541:      options.IterRefine        = DOUBLE;
542:      options.Trans             = NOTRANS;
543:      options.SolveInitialized  = NO; -hold the communication pattern used MatSolve() and MatMatSolve()
544:      options.RefineInitialized = NO;
545:      options.PrintStat         = YES;
546:   */
547:   set_default_options_dist(&options);

549:   MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(lu->comm_superlu));
550:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
551:   /* Default num of process columns and rows */
552:   lu->npcol = (int_t) (0.5 + PetscSqrtReal((PetscReal)size));
553:   if (!lu->npcol) lu->npcol = 1;
554:   while (lu->npcol > 0) {
555:     lu->nprow = (int_t) (size/lu->npcol);
556:     if (size == lu->nprow * lu->npcol) break;
557:     lu->npcol--;
558:   }

560:   PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"SuperLU_Dist Options","Mat");
561:   PetscOptionsInt("-mat_superlu_dist_r","Number rows in processor partition","None",lu->nprow,(PetscInt*)&lu->nprow,NULL);
562:   PetscOptionsInt("-mat_superlu_dist_c","Number columns in processor partition","None",lu->npcol,(PetscInt*)&lu->npcol,NULL);
563:   if (size != lu->nprow * lu->npcol) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Number of processes %d must equal to nprow %d * npcol %d",size,lu->nprow,lu->npcol);

565:   lu->MatInputMode = DISTRIBUTED;

567:   PetscOptionsEnum("-mat_superlu_dist_matinput","Matrix input mode (global or distributed)","None",SuperLU_MatInputModes,(PetscEnum)lu->MatInputMode,(PetscEnum*)&lu->MatInputMode,NULL);
568:   if (lu->MatInputMode == DISTRIBUTED && size == 1) lu->MatInputMode = GLOBAL;

570:   PetscOptionsBool("-mat_superlu_dist_equil","Equilibrate matrix","None",options.Equil ? PETSC_TRUE : PETSC_FALSE,&flg,&set);
571:   if (set && !flg) options.Equil = NO;

573:   PetscOptionsEList("-mat_superlu_dist_rowperm","Row permutation","None",rowperm,2,rowperm[0],&indx,&flg);
574:   if (flg) {
575:     switch (indx) {
576:     case 0:
577:       options.RowPerm = LargeDiag;
578:       break;
579:     case 1:
580:       options.RowPerm = NOROWPERM;
581:       break;
582:     }
583:   }

585:   PetscOptionsEList("-mat_superlu_dist_colperm","Column permutation","None",colperm,5,colperm[3],&indx,&flg);
586:   if (flg) {
587:     switch (indx) {
588:     case 0:
589:       options.ColPerm = NATURAL;
590:       break;
591:     case 1:
592:       options.ColPerm = MMD_AT_PLUS_A;
593:       break;
594:     case 2:
595:       options.ColPerm = MMD_ATA;
596:       break;
597:     case 3:
598:       options.ColPerm = METIS_AT_PLUS_A;
599:       break;
600:     case 4:
601:       options.ColPerm = PARMETIS;   /* only works for np>1 */
602:       break;
603:     default:
604:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
605:     }
606:   }

608:   options.ReplaceTinyPivot = NO;
609:   PetscOptionsBool("-mat_superlu_dist_replacetinypivot","Replace tiny pivots","None",options.ReplaceTinyPivot ? PETSC_TRUE : PETSC_FALSE,&flg,&set);
610:   if (set && flg) options.ReplaceTinyPivot = YES;

612:   options.ParSymbFact = NO;
613:   PetscOptionsBool("-mat_superlu_dist_parsymbfact","Parallel symbolic factorization","None",PETSC_FALSE,&flg,&set);
614:   if (set && flg && size>1) {
615:     if (lu->MatInputMode == GLOBAL) {
616: #if defined(PETSC_USE_INFO)
617:       PetscInfo(A,"Warning: '-mat_superlu_dist_parsymbfact' is ignored because MatInputMode=GLOBAL\n");
618: #endif
619:     } else {
620: #if defined(PETSC_HAVE_PARMETIS)
621:       options.ParSymbFact = YES;
622:       options.ColPerm     = PARMETIS;   /* in v2.2, PARMETIS is forced for ParSymbFact regardless of user ordering setting */
623: #else
624:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"parsymbfact needs PARMETIS");
625: #endif
626:     }
627:   }

629:   lu->FactPattern = SamePattern;
630:   PetscOptionsEList("-mat_superlu_dist_fact","Sparsity pattern for repeated matrix factorization","None",factPattern,2,factPattern[0],&indx,&flg);
631:   if (flg) {
632:     switch (indx) {
633:     case 0:
634:       lu->FactPattern = SamePattern;
635:       break;
636:     case 1:
637:       lu->FactPattern = SamePattern_SameRowPerm;
638:       break;
639:     }
640:   }

642:   options.IterRefine = NOREFINE;
643:   PetscOptionsBool("-mat_superlu_dist_iterrefine","Use iterative refinement","None",options.IterRefine == NOREFINE ? PETSC_FALSE : PETSC_TRUE ,&flg,&set);
644:   if (set) {
645:     if (flg) options.IterRefine = SLU_DOUBLE;
646:     else options.IterRefine = NOREFINE;
647:   }

649:   if (PetscLogPrintInfo) options.PrintStat = YES;
650:   else options.PrintStat = NO;
651:   PetscOptionsBool("-mat_superlu_dist_statprint","Print factorization information","None",(PetscBool)options.PrintStat,(PetscBool*)&options.PrintStat,NULL);
652:   PetscOptionsEnd();

654:   lu->options              = options;
655:   lu->options.Fact         = DOFACT;
656:   lu->matsolve_iscalled    = PETSC_FALSE;
657:   lu->matmatsolve_iscalled = PETSC_FALSE;

659:   *F = B;
660:   return(0);
661: }

665: PETSC_EXTERN PetscErrorCode MatSolverPackageRegister_SuperLU_DIST(void)
666: {
669:   MatSolverPackageRegister(MATSOLVERSUPERLU_DIST,MATMPIAIJ,  MAT_FACTOR_LU,MatGetFactor_aij_superlu_dist);
670:   MatSolverPackageRegister(MATSOLVERSUPERLU_DIST,MATSEQAIJ,  MAT_FACTOR_LU,MatGetFactor_aij_superlu_dist);
671:   return(0);
672: }

676: PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat A,PetscViewer viewer)
677: {
678:   Mat_SuperLU_DIST       *lu=(Mat_SuperLU_DIST*)A->spptr;
679:   superlu_dist_options_t options;
680:   PetscErrorCode         ierr;

683:   /* check if matrix is superlu_dist type */
684:   if (A->ops->solve != MatSolve_SuperLU_DIST) return(0);

686:   options = lu->options;
687:   PetscViewerASCIIPrintf(viewer,"SuperLU_DIST run parameters:\n");
688:   PetscViewerASCIIPrintf(viewer,"  Process grid nprow %D x npcol %D \n",lu->nprow,lu->npcol);
689:   PetscViewerASCIIPrintf(viewer,"  Equilibrate matrix %s \n",PetscBools[options.Equil != NO]);
690:   PetscViewerASCIIPrintf(viewer,"  Matrix input mode %d \n",lu->MatInputMode);
691:   PetscViewerASCIIPrintf(viewer,"  Replace tiny pivots %s \n",PetscBools[options.ReplaceTinyPivot != NO]);
692:   PetscViewerASCIIPrintf(viewer,"  Use iterative refinement %s \n",PetscBools[options.IterRefine == SLU_DOUBLE]);
693:   PetscViewerASCIIPrintf(viewer,"  Processors in row %d col partition %d \n",lu->nprow,lu->npcol);
694:   PetscViewerASCIIPrintf(viewer,"  Row permutation %s \n",(options.RowPerm == NOROWPERM) ? "NATURAL" : "LargeDiag");

696:   switch (options.ColPerm) {
697:   case NATURAL:
698:     PetscViewerASCIIPrintf(viewer,"  Column permutation NATURAL\n");
699:     break;
700:   case MMD_AT_PLUS_A:
701:     PetscViewerASCIIPrintf(viewer,"  Column permutation MMD_AT_PLUS_A\n");
702:     break;
703:   case MMD_ATA:
704:     PetscViewerASCIIPrintf(viewer,"  Column permutation MMD_ATA\n");
705:     break;
706:   case METIS_AT_PLUS_A:
707:     PetscViewerASCIIPrintf(viewer,"  Column permutation METIS_AT_PLUS_A\n");
708:     break;
709:   case PARMETIS:
710:     PetscViewerASCIIPrintf(viewer,"  Column permutation PARMETIS\n");
711:     break;
712:   default:
713:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
714:   }

716:   PetscViewerASCIIPrintf(viewer,"  Parallel symbolic factorization %s \n",PetscBools[options.ParSymbFact != NO]);

718:   if (lu->FactPattern == SamePattern) {
719:     PetscViewerASCIIPrintf(viewer,"  Repeated factorization SamePattern\n");
720:   } else {
721:     PetscViewerASCIIPrintf(viewer,"  Repeated factorization SamePattern_SameRowPerm\n");
722:   }
723:   return(0);
724: }

728: PetscErrorCode MatView_SuperLU_DIST(Mat A,PetscViewer viewer)
729: {
730:   PetscErrorCode    ierr;
731:   PetscBool         iascii;
732:   PetscViewerFormat format;

735:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
736:   if (iascii) {
737:     PetscViewerGetFormat(viewer,&format);
738:     if (format == PETSC_VIEWER_ASCII_INFO) {
739:       MatFactorInfo_SuperLU_DIST(A,viewer);
740:     }
741:   }
742:   return(0);
743: }


746: /*MC
747:   MATSOLVERSUPERLU_DIST - Parallel direct solver package for LU factorization

749:   Use ./configure --download-superlu_dist --download-parmetis --download-metis --download-ptscotch  to have PETSc installed with SuperLU_DIST

751:   Use -pc_type lu -pc_factor_mat_solver_package superlu_dist to us this direct solver

753:    Works with AIJ matrices

755:   Options Database Keys:
756: + -mat_superlu_dist_r <n> - number of rows in processor partition
757: . -mat_superlu_dist_c <n> - number of columns in processor partition
758: . -mat_superlu_dist_matinput <0,1> - matrix input mode; 0=global, 1=distributed
759: . -mat_superlu_dist_equil - equilibrate the matrix
760: . -mat_superlu_dist_rowperm <LargeDiag,NATURAL> - row permutation
761: . -mat_superlu_dist_colperm <MMD_AT_PLUS_A,MMD_ATA,NATURAL> - column permutation
762: . -mat_superlu_dist_replacetinypivot - replace tiny pivots
763: . -mat_superlu_dist_fact <SamePattern> - (choose one of) SamePattern SamePattern_SameRowPerm
764: . -mat_superlu_dist_iterrefine - use iterative refinement
765: - -mat_superlu_dist_statprint - print factorization information

767:    Level: beginner

769: .seealso: PCLU

771: .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage

773: M*/