Actual source code: superlu_dist.c

petsc-3.5.4 2015-05-23
Report Typos and Errors
  2: /*
  3:         Provides an interface to the SuperLU_DIST_2.2 sparse solver
  4: */

  6: #include <../src/mat/impls/aij/seq/aij.h>
  7: #include <../src/mat/impls/aij/mpi/mpiaij.h>
  8: #if defined(PETSC_HAVE_STDLIB_H) /* This is to get around weird problem with SuperLU on cray */
  9: #include <stdlib.h>
 10: #endif

 12: #if defined(PETSC_USE_64BIT_INDICES)
 13: /* ugly SuperLU_Dist variable telling it to use long long int */
 14: #define _LONGINT
 15: #endif

 17: EXTERN_C_BEGIN
 18: #if defined(PETSC_USE_COMPLEX)
 19: #include <superlu_zdefs.h>
 20: #else
 21: #include <superlu_ddefs.h>
 22: #endif
 23: EXTERN_C_END

 25: /*
 26:     GLOBAL - The sparse matrix and right hand side are all stored initially on process 0. Should be called centralized
 27:     DISTRIBUTED - The sparse matrix and right hand size are initially stored across the entire MPI communicator.
 28: */
 29: typedef enum {GLOBAL,DISTRIBUTED} SuperLU_MatInputMode;
 30: const char *SuperLU_MatInputModes[] = {"GLOBAL","DISTRIBUTED","SuperLU_MatInputMode","PETSC_",0};

 32: typedef struct {
 33:   int_t                nprow,npcol,*row,*col;
 34:   gridinfo_t           grid;
 35:   superlu_options_t    options;
 36:   SuperMatrix          A_sup;
 37:   ScalePermstruct_t    ScalePermstruct;
 38:   LUstruct_t           LUstruct;
 39:   int                  StatPrint;
 40:   SuperLU_MatInputMode MatInputMode;
 41:   SOLVEstruct_t        SOLVEstruct;
 42:   fact_t               FactPattern;
 43:   MPI_Comm             comm_superlu;
 44: #if defined(PETSC_USE_COMPLEX)
 45:   doublecomplex        *val;
 46: #else
 47:   double               *val;
 48: #endif
 49:   PetscBool            matsolve_iscalled,matmatsolve_iscalled;
 50:   PetscBool            CleanUpSuperLU_Dist;  /* Flag to clean up (non-global) SuperLU objects during Destroy */
 51: } Mat_SuperLU_DIST;

 53: extern PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat,PetscViewer);
 54: extern PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat,Mat,const MatFactorInfo*);
 55: extern PetscErrorCode MatDestroy_SuperLU_DIST(Mat);
 56: extern PetscErrorCode MatView_SuperLU_DIST(Mat,PetscViewer);
 57: extern PetscErrorCode MatSolve_SuperLU_DIST(Mat,Vec,Vec);
 58: extern PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat,Mat,IS,IS,const MatFactorInfo*);
 59: extern PetscErrorCode MatDestroy_MPIAIJ(Mat);

 63: PetscErrorCode MatDestroy_SuperLU_DIST(Mat A)
 64: {
 65:   PetscErrorCode   ierr;
 66:   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
 67:   PetscBool        flg;

 70:   if (lu && lu->CleanUpSuperLU_Dist) {
 71:     /* Deallocate SuperLU_DIST storage */
 72:     if (lu->MatInputMode == GLOBAL) {
 73:       PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup));
 74:     } else {
 75:       PetscStackCall("SuperLU_DIST:Destroy_CompRowLoc_Matrix_dist",Destroy_CompRowLoc_Matrix_dist(&lu->A_sup));
 76:       if (lu->options.SolveInitialized) {
 77: #if defined(PETSC_USE_COMPLEX)
 78:         PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
 79: #else
 80:         PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
 81: #endif
 82:       }
 83:     }
 84:     PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(A->cmap->N, &lu->grid, &lu->LUstruct));
 85:     PetscStackCall("SuperLU_DIST:ScalePermstructFree",ScalePermstructFree(&lu->ScalePermstruct));
 86:     PetscStackCall("SuperLU_DIST:LUstructFree",LUstructFree(&lu->LUstruct));

 88:     /* Release the SuperLU_DIST process grid. */
 89:     PetscStackCall("SuperLU_DIST:superlu_gridexit",superlu_gridexit(&lu->grid));
 90:     MPI_Comm_free(&(lu->comm_superlu));
 91:   }
 92:   PetscFree(A->spptr);

 94:   PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);
 95:   if (flg) {
 96:     MatDestroy_SeqAIJ(A);
 97:   } else {
 98:     MatDestroy_MPIAIJ(A);
 99:   }
100:   return(0);
101: }

105: PetscErrorCode MatSolve_SuperLU_DIST(Mat A,Vec b_mpi,Vec x)
106: {
107:   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
108:   PetscErrorCode   ierr;
109:   PetscMPIInt      size;
110:   PetscInt         m=A->rmap->n,M=A->rmap->N,N=A->cmap->N;
111:   SuperLUStat_t    stat;
112:   double           berr[1];
113:   PetscScalar      *bptr;
114:   PetscInt         nrhs=1;
115:   Vec              x_seq;
116:   IS               iden;
117:   VecScatter       scat;
118:   int              info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
119:   static PetscBool cite = PETSC_FALSE;

122:   PetscCitationsRegister("@article{lidemmel03,\n  author = {Xiaoye S. Li and James W. Demmel},\n  title = {{SuperLU_DIST}: A Scalable Distributed-Memory Sparse Direct\n           Solver for Unsymmetric Linear Systems},\n  journal = {ACM Trans. Mathematical Software},\n  volume = {29},\n  number = {2},\n  pages = {110-140},\n  year = 2003\n}\n",&cite);
123:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
124:   if (size > 1 && lu->MatInputMode == GLOBAL) {
125:     /* global mat input, convert b to x_seq */
126:     VecCreateSeq(PETSC_COMM_SELF,N,&x_seq);
127:     ISCreateStride(PETSC_COMM_SELF,N,0,1,&iden);
128:     VecScatterCreate(b_mpi,iden,x_seq,iden,&scat);
129:     ISDestroy(&iden);

131:     VecScatterBegin(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);
132:     VecScatterEnd(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);
133:     VecGetArray(x_seq,&bptr);
134:   } else { /* size==1 || distributed mat input */
135:     if (lu->options.SolveInitialized && !lu->matsolve_iscalled) {
136:       /* see comments in MatMatSolve() */
137: #if defined(PETSC_USE_COMPLEX)
138:       PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
139: #else
140:       PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
141: #endif
142:       lu->options.SolveInitialized = NO;
143:     }
144:     VecCopy(b_mpi,x);
145:     VecGetArray(x,&bptr);
146:   }

148:   if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED");

150:   PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat));        /* Initialize the statistics variables. */
151:   if (lu->MatInputMode == GLOBAL) {
152: #if defined(PETSC_USE_COMPLEX)
153:     PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info));
154: #else
155:     PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info));
156: #endif
157:   } else { /* distributed mat input */
158: #if defined(PETSC_USE_COMPLEX)
159:     PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
160: #else
161:     PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
162: #endif
163:   }
164:   if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);

166:   if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid);      /* Print the statistics. */
167:   PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));

169:   if (size > 1 && lu->MatInputMode == GLOBAL) {
170:     /* convert seq x to mpi x */
171:     VecRestoreArray(x_seq,&bptr);
172:     VecScatterBegin(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);
173:     VecScatterEnd(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);
174:     VecScatterDestroy(&scat);
175:     VecDestroy(&x_seq);
176:   } else {
177:     VecRestoreArray(x,&bptr);

179:     lu->matsolve_iscalled    = PETSC_TRUE;
180:     lu->matmatsolve_iscalled = PETSC_FALSE;
181:   }
182:   return(0);
183: }

187: PetscErrorCode MatMatSolve_SuperLU_DIST(Mat A,Mat B_mpi,Mat X)
188: {
189:   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
190:   PetscErrorCode   ierr;
191:   PetscMPIInt      size;
192:   PetscInt         M=A->rmap->N,m=A->rmap->n,nrhs;
193:   SuperLUStat_t    stat;
194:   double           berr[1];
195:   PetscScalar      *bptr;
196:   int              info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
197:   PetscBool        flg;

200:   if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED");
201:   PetscObjectTypeCompareAny((PetscObject)B_mpi,&flg,MATSEQDENSE,MATMPIDENSE,NULL);
202:   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix");
203:   PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);
204:   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix");

206:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
207:   if (size > 1 && lu->MatInputMode == GLOBAL) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatInputMode=GLOBAL for nproc %d>1 is not supported",size);
208:   /* size==1 or distributed mat input */
209:   if (lu->options.SolveInitialized && !lu->matmatsolve_iscalled) {
210:     /* communication pattern of SOLVEstruct is unlikely created for matmatsolve,
211:        thus destroy it and create a new SOLVEstruct.
212:        Otherwise it may result in memory corruption or incorrect solution
213:        See src/mat/examples/tests/ex125.c */
214: #if defined(PETSC_USE_COMPLEX)
215:     PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
216: #else
217:     PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
218: #endif
219:     lu->options.SolveInitialized = NO;
220:   }
221:   MatCopy(B_mpi,X,SAME_NONZERO_PATTERN);

223:   MatGetSize(B_mpi,NULL,&nrhs);

225:   PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat));        /* Initialize the statistics variables. */
226:   MatDenseGetArray(X,&bptr);
227:   if (lu->MatInputMode == GLOBAL) { /* size == 1 */
228: #if defined(PETSC_USE_COMPLEX)
229:     PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,(doublecomplex*)bptr, M, nrhs,&lu->grid, &lu->LUstruct, berr, &stat, &info));
230: #else
231:     PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr, M, nrhs, &lu->grid, &lu->LUstruct, berr, &stat, &info));
232: #endif
233:   } else { /* distributed mat input */
234: #if defined(PETSC_USE_COMPLEX)
235:     PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid, &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
236: #else
237:     PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
238: #endif
239:   }
240:   if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);
241:   MatDenseRestoreArray(X,&bptr);

243:   if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */
244:   PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
245:   lu->matsolve_iscalled    = PETSC_FALSE;
246:   lu->matmatsolve_iscalled = PETSC_TRUE;
247:   return(0);
248: }


253: PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F,Mat A,const MatFactorInfo *info)
254: {
255:   Mat              *tseq,A_seq = NULL;
256:   Mat_SeqAIJ       *aa,*bb;
257:   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)(F)->spptr;
258:   PetscErrorCode   ierr;
259:   PetscInt         M=A->rmap->N,N=A->cmap->N,i,*ai,*aj,*bi,*bj,nz,rstart,*garray,
260:                    m=A->rmap->n, colA_start,j,jcol,jB,countA,countB,*bjj,*ajj;
261:   int              sinfo;   /* SuperLU_Dist info flag is always an int even with long long indices */
262:   PetscMPIInt      size;
263:   SuperLUStat_t    stat;
264:   double           *berr=0;
265:   IS               isrow;
266:   Mat              F_diag=NULL;
267: #if defined(PETSC_USE_COMPLEX)
268:   doublecomplex    *av, *bv;
269: #else
270:   double           *av, *bv;
271: #endif

274:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);

276:   if (lu->MatInputMode == GLOBAL) { /* global mat input */
277:     if (size > 1) { /* convert mpi A to seq mat A */
278:       ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);
279:       MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);
280:       ISDestroy(&isrow);

282:       A_seq = *tseq;
283:       PetscFree(tseq);
284:       aa    = (Mat_SeqAIJ*)A_seq->data;
285:     } else {
286:       PetscBool flg;
287:       PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);
288:       if (flg) {
289:         Mat_MPIAIJ *At = (Mat_MPIAIJ*)A->data;
290:         A = At->A;
291:       }
292:       aa =  (Mat_SeqAIJ*)A->data;
293:     }

295:     /* Convert Petsc NR matrix to SuperLU_DIST NC.
296:        Note: memories of lu->val, col and row are allocated by CompRow_to_CompCol_dist()! */
297:     if (lu->options.Fact != DOFACT) {/* successive numeric factorization, sparsity pattern is reused. */
298:       PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup));
299:       if (lu->FactPattern == SamePattern_SameRowPerm) {
300:         lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
301:       } else { /* lu->FactPattern == SamePattern */
302:         PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct));
303:         lu->options.Fact = SamePattern;
304:       }
305:     }
306: #if defined(PETSC_USE_COMPLEX)
307:     PetscStackCall("SuperLU_DIST:zCompRow_to_CompCol_dist",zCompRow_to_CompCol_dist(M,N,aa->nz,(doublecomplex*)aa->a,(int_t*)aa->j,(int_t*)aa->i,&lu->val,&lu->col, &lu->row));
308: #else
309:     PetscStackCall("SuperLU_DIST:dCompRow_to_CompCol_dist",dCompRow_to_CompCol_dist(M,N,aa->nz,aa->a,(int_t*)aa->j,(int_t*)aa->i,&lu->val, &lu->col, &lu->row));
310: #endif

312:     /* Create compressed column matrix A_sup. */
313: #if defined(PETSC_USE_COMPLEX)
314:     PetscStackCall("SuperLU_DIST:zCreate_CompCol_Matrix_dist",zCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_Z, SLU_GE));
315: #else
316:     PetscStackCall("SuperLU_DIST:dCreate_CompCol_Matrix_dist",dCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_D, SLU_GE));
317: #endif
318:   } else { /* distributed mat input */
319:     Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data;
320:     aa=(Mat_SeqAIJ*)(mat->A)->data;
321:     bb=(Mat_SeqAIJ*)(mat->B)->data;
322:     ai=aa->i; aj=aa->j;
323:     bi=bb->i; bj=bb->j;
324: #if defined(PETSC_USE_COMPLEX)
325:     av=(doublecomplex*)aa->a;
326:     bv=(doublecomplex*)bb->a;
327: #else
328:     av=aa->a;
329:     bv=bb->a;
330: #endif
331:     rstart = A->rmap->rstart;
332:     nz     = aa->nz + bb->nz;
333:     garray = mat->garray;

335:     if (lu->options.Fact == DOFACT) { /* first numeric factorization */
336: #if defined(PETSC_USE_COMPLEX)
337:       PetscStackCall("SuperLU_DIST:zallocateA_dist",zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
338: #else
339:       PetscStackCall("SuperLU_DIST:dallocateA_dist",dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
340: #endif
341:     } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */
342:       /* Destroy_CompRowLoc_Matrix_dist(&lu->A_sup); */ /* this leads to crash! However, see SuperLU_DIST_2.5/EXAMPLE/pzdrive2.c */
343:       if (lu->FactPattern == SamePattern_SameRowPerm) {
344:         lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
345:       } else {
346:         PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct)); /* Deallocate storage associated with the L and U matrices. */
347:         lu->options.Fact = SamePattern;
348:       }
349:     }
350:     nz = 0;
351:     for (i=0; i<m; i++) {
352:       lu->row[i] = nz;
353:       countA     = ai[i+1] - ai[i];
354:       countB     = bi[i+1] - bi[i];
355:       ajj        = aj + ai[i]; /* ptr to the beginning of this row */
356:       bjj        = bj + bi[i];

358:       /* B part, smaller col index */
359:       colA_start = rstart + ajj[0]; /* the smallest global col index of A */
360:       jB         = 0;
361:       for (j=0; j<countB; j++) {
362:         jcol = garray[bjj[j]];
363:         if (jcol > colA_start) {
364:           jB = j;
365:           break;
366:         }
367:         lu->col[nz]   = jcol;
368:         lu->val[nz++] = *bv++;
369:         if (j==countB-1) jB = countB;
370:       }

372:       /* A part */
373:       for (j=0; j<countA; j++) {
374:         lu->col[nz]   = rstart + ajj[j];
375:         lu->val[nz++] = *av++;
376:       }

378:       /* B part, larger col index */
379:       for (j=jB; j<countB; j++) {
380:         lu->col[nz]   = garray[bjj[j]];
381:         lu->val[nz++] = *bv++;
382:       }
383:     }
384:     lu->row[m] = nz;
385: #if defined(PETSC_USE_COMPLEX)
386:     PetscStackCall("SuperLU_DIST:zCreate_CompRowLoc_Matrix_dist",zCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_Z, SLU_GE));
387: #else
388:     PetscStackCall("SuperLU_DIST:dCreate_CompRowLoc_Matrix_dist",dCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_D, SLU_GE));
389: #endif
390:   }

392:   /* Factor the matrix. */
393:   PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat));   /* Initialize the statistics variables. */
394:   if (lu->MatInputMode == GLOBAL) { /* global mat input */
395: #if defined(PETSC_USE_COMPLEX)
396:     PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo));
397: #else
398:     PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo));
399: #endif
400:   } else { /* distributed mat input */
401: #if defined(PETSC_USE_COMPLEX)
402:     PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo));
403:     if (sinfo) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pzgssvx fails, info: %d\n",sinfo);
404: #else
405:     PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo));
406:     if (sinfo) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",sinfo);
407: #endif
408:   }

410:   if (lu->MatInputMode == GLOBAL && size > 1) {
411:     MatDestroy(&A_seq);
412:   }

414:   if (lu->options.PrintStat) {
415:     PStatPrint(&lu->options, &stat, &lu->grid);  /* Print the statistics. */
416:   }
417:   PStatFree(&stat);
418:   if (size > 1) {
419:     F_diag            = ((Mat_MPIAIJ*)(F)->data)->A;
420:     F_diag->assembled = PETSC_TRUE;
421:   }
422:   (F)->assembled    = PETSC_TRUE;
423:   (F)->preallocated = PETSC_TRUE;
424:   lu->options.Fact  = FACTORED; /* The factored form of A is supplied. Local option used by this func. only */
425:   return(0);
426: }

428: /* Note the Petsc r and c permutations are ignored */
431: PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
432: {
433:   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->spptr;
434:   PetscInt         M   = A->rmap->N,N=A->cmap->N;

437:   /* Initialize the SuperLU process grid. */
438:   PetscStackCall("SuperLU_DIST:superlu_gridinit",superlu_gridinit(lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid));

440:   /* Initialize ScalePermstruct and LUstruct. */
441:   PetscStackCall("SuperLU_DIST:ScalePermstructInit",ScalePermstructInit(M, N, &lu->ScalePermstruct));
442:   PetscStackCall("SuperLU_DIST:LUstructInit",LUstructInit(M, N, &lu->LUstruct));
443:   F->ops->lufactornumeric = MatLUFactorNumeric_SuperLU_DIST;
444:   F->ops->solve           = MatSolve_SuperLU_DIST;
445:   F->ops->matsolve        = MatMatSolve_SuperLU_DIST;
446:   lu->CleanUpSuperLU_Dist = PETSC_TRUE;
447:   return(0);
448: }

452: PetscErrorCode MatFactorGetSolverPackage_aij_superlu_dist(Mat A,const MatSolverPackage *type)
453: {
455:   *type = MATSOLVERSUPERLU_DIST;
456:   return(0);
457: }

461: PETSC_EXTERN PetscErrorCode MatGetFactor_aij_superlu_dist(Mat A,MatFactorType ftype,Mat *F)
462: {
463:   Mat               B;
464:   Mat_SuperLU_DIST  *lu;
465:   PetscErrorCode    ierr;
466:   PetscInt          M=A->rmap->N,N=A->cmap->N,indx;
467:   PetscMPIInt       size;
468:   superlu_options_t options;
469:   PetscBool         flg;
470:   const char        *colperm[]     = {"NATURAL","MMD_AT_PLUS_A","MMD_ATA","METIS_AT_PLUS_A","PARMETIS"};
471:   const char        *rowperm[]     = {"LargeDiag","NATURAL"};
472:   const char        *factPattern[] = {"SamePattern","SamePattern_SameRowPerm"};

475:   /* Create the factorization matrix */
476:   MatCreate(PetscObjectComm((PetscObject)A),&B);
477:   MatSetSizes(B,A->rmap->n,A->cmap->n,M,N);
478:   MatSetType(B,((PetscObject)A)->type_name);
479:   MatSeqAIJSetPreallocation(B,0,NULL);
480:   MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);

482:   B->ops->lufactorsymbolic = MatLUFactorSymbolic_SuperLU_DIST;
483:   B->ops->view             = MatView_SuperLU_DIST;
484:   B->ops->destroy          = MatDestroy_SuperLU_DIST;

486:   PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_aij_superlu_dist);

488:   B->factortype = MAT_FACTOR_LU;

490:   PetscNewLog(B,&lu);
491:   B->spptr = lu;

493:   /* Set the default input options:
494:      options.Fact              = DOFACT;
495:      options.Equil             = YES;
496:      options.ParSymbFact       = NO;
497:      options.ColPerm           = METIS_AT_PLUS_A;
498:      options.RowPerm           = LargeDiag;
499:      options.ReplaceTinyPivot  = YES;
500:      options.IterRefine        = DOUBLE;
501:      options.Trans             = NOTRANS;
502:      options.SolveInitialized  = NO; -hold the communication pattern used MatSolve() and MatMatSolve()
503:      options.RefineInitialized = NO;
504:      options.PrintStat         = YES;
505:   */
506:   set_default_options_dist(&options);

508:   MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(lu->comm_superlu));
509:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
510:   /* Default num of process columns and rows */
511:   lu->npcol = (int_t) (0.5 + PetscSqrtReal((PetscReal)size));
512:   if (!lu->npcol) lu->npcol = 1;
513:   while (lu->npcol > 0) {
514:     lu->nprow = (int_t) (size/lu->npcol);
515:     if (size == lu->nprow * lu->npcol) break;
516:     lu->npcol--;
517:   }

519:   PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"SuperLU_Dist Options","Mat");
520:   PetscOptionsInt("-mat_superlu_dist_r","Number rows in processor partition","None",lu->nprow,(PetscInt*)&lu->nprow,NULL);
521:   PetscOptionsInt("-mat_superlu_dist_c","Number columns in processor partition","None",lu->npcol,(PetscInt*)&lu->npcol,NULL);
522:   if (size != lu->nprow * lu->npcol) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Number of processes %d must equal to nprow %d * npcol %d",size,lu->nprow,lu->npcol);

524:   lu->MatInputMode = DISTRIBUTED;

526:   PetscOptionsEnum("-mat_superlu_dist_matinput","Matrix input mode (global or distributed)","None",SuperLU_MatInputModes,(PetscEnum)lu->MatInputMode,(PetscEnum*)&lu->MatInputMode,NULL);
527:   if (lu->MatInputMode == DISTRIBUTED && size == 1) lu->MatInputMode = GLOBAL;

529:   PetscOptionsBool("-mat_superlu_dist_equil","Equilibrate matrix","None",PETSC_TRUE,&flg,0);
530:   if (!flg) options.Equil = NO;

532:   PetscOptionsEList("-mat_superlu_dist_rowperm","Row permutation","None",rowperm,2,rowperm[0],&indx,&flg);
533:   if (flg) {
534:     switch (indx) {
535:     case 0:
536:       options.RowPerm = LargeDiag;
537:       break;
538:     case 1:
539:       options.RowPerm = NOROWPERM;
540:       break;
541:     }
542:   }

544:   PetscOptionsEList("-mat_superlu_dist_colperm","Column permutation","None",colperm,5,colperm[3],&indx,&flg);
545:   if (flg) {
546:     switch (indx) {
547:     case 0:
548:       options.ColPerm = NATURAL;
549:       break;
550:     case 1:
551:       options.ColPerm = MMD_AT_PLUS_A;
552:       break;
553:     case 2:
554:       options.ColPerm = MMD_ATA;
555:       break;
556:     case 3:
557:       options.ColPerm = METIS_AT_PLUS_A;
558:       break;
559:     case 4:
560:       options.ColPerm = PARMETIS;   /* only works for np>1 */
561:       break;
562:     default:
563:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
564:     }
565:   }

567:   PetscOptionsBool("-mat_superlu_dist_replacetinypivot","Replace tiny pivots","None",PETSC_TRUE,&flg,0);
568:   if (!flg) options.ReplaceTinyPivot = NO;

570:   options.ParSymbFact = NO;

572:   PetscOptionsBool("-mat_superlu_dist_parsymbfact","Parallel symbolic factorization","None",PETSC_FALSE,&flg,0);
573:   if (flg) {
574: #if defined(PETSC_HAVE_PARMETIS)
575:     options.ParSymbFact = YES;
576:     options.ColPerm     = PARMETIS;   /* in v2.2, PARMETIS is forced for ParSymbFact regardless of user ordering setting */
577: #else
578:     printf("parsymbfact needs PARMETIS");
579: #endif
580:   }

582:   lu->FactPattern = SamePattern_SameRowPerm;

584:   PetscOptionsEList("-mat_superlu_dist_fact","Sparsity pattern for repeated matrix factorization","None",factPattern,2,factPattern[1],&indx,&flg);
585:   if (flg) {
586:     switch (indx) {
587:     case 0:
588:       lu->FactPattern = SamePattern;
589:       break;
590:     case 1:
591:       lu->FactPattern = SamePattern_SameRowPerm;
592:       break;
593:     }
594:   }

596:   options.IterRefine = NOREFINE;
597:   PetscOptionsBool("-mat_superlu_dist_iterrefine","Use iterative refinement","None",PETSC_FALSE,&flg,0);
598:   if (flg) options.IterRefine = SLU_DOUBLE;

600:   if (PetscLogPrintInfo) options.PrintStat = YES;
601:   else options.PrintStat = NO;
602:   PetscOptionsBool("-mat_superlu_dist_statprint","Print factorization information","None",
603:                           (PetscBool)options.PrintStat,(PetscBool*)&options.PrintStat,0);
604:   PetscOptionsEnd();

606:   lu->options              = options;
607:   lu->options.Fact         = DOFACT;
608:   lu->matsolve_iscalled    = PETSC_FALSE;
609:   lu->matmatsolve_iscalled = PETSC_FALSE;

611:   *F = B;
612:   return(0);
613: }

617: PETSC_EXTERN PetscErrorCode MatGetFactor_seqaij_superlu_dist(Mat A,MatFactorType ftype,Mat *F)
618: {

622:   MatGetFactor_aij_superlu_dist(A,ftype,F);
623:   return(0);
624: }

628: PETSC_EXTERN PetscErrorCode MatGetFactor_mpiaij_superlu_dist(Mat A,MatFactorType ftype,Mat *F)
629: {

633:   MatGetFactor_aij_superlu_dist(A,ftype,F);
634:   return(0);
635: }

639: PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat A,PetscViewer viewer)
640: {
641:   Mat_SuperLU_DIST  *lu=(Mat_SuperLU_DIST*)A->spptr;
642:   superlu_options_t options;
643:   PetscErrorCode    ierr;

646:   /* check if matrix is superlu_dist type */
647:   if (A->ops->solve != MatSolve_SuperLU_DIST) return(0);

649:   options = lu->options;
650:   PetscViewerASCIIPrintf(viewer,"SuperLU_DIST run parameters:\n");
651:   PetscViewerASCIIPrintf(viewer,"  Process grid nprow %D x npcol %D \n",lu->nprow,lu->npcol);
652:   PetscViewerASCIIPrintf(viewer,"  Equilibrate matrix %s \n",PetscBools[options.Equil != NO]);
653:   PetscViewerASCIIPrintf(viewer,"  Matrix input mode %d \n",lu->MatInputMode);
654:   PetscViewerASCIIPrintf(viewer,"  Replace tiny pivots %s \n",PetscBools[options.ReplaceTinyPivot != NO]);
655:   PetscViewerASCIIPrintf(viewer,"  Use iterative refinement %s \n",PetscBools[options.IterRefine == SLU_DOUBLE]);
656:   PetscViewerASCIIPrintf(viewer,"  Processors in row %d col partition %d \n",lu->nprow,lu->npcol);
657:   PetscViewerASCIIPrintf(viewer,"  Row permutation %s \n",(options.RowPerm == NOROWPERM) ? "NATURAL" : "LargeDiag");

659:   switch (options.ColPerm) {
660:   case NATURAL:
661:     PetscViewerASCIIPrintf(viewer,"  Column permutation NATURAL\n");
662:     break;
663:   case MMD_AT_PLUS_A:
664:     PetscViewerASCIIPrintf(viewer,"  Column permutation MMD_AT_PLUS_A\n");
665:     break;
666:   case MMD_ATA:
667:     PetscViewerASCIIPrintf(viewer,"  Column permutation MMD_ATA\n");
668:     break;
669:   case METIS_AT_PLUS_A:
670:     PetscViewerASCIIPrintf(viewer,"  Column permutation METIS_AT_PLUS_A\n");
671:     break;
672:   case PARMETIS:
673:     PetscViewerASCIIPrintf(viewer,"  Column permutation PARMETIS\n");
674:     break;
675:   default:
676:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
677:   }

679:   PetscViewerASCIIPrintf(viewer,"  Parallel symbolic factorization %s \n",PetscBools[options.ParSymbFact != NO]);

681:   if (lu->FactPattern == SamePattern) {
682:     PetscViewerASCIIPrintf(viewer,"  Repeated factorization SamePattern\n");
683:   } else {
684:     PetscViewerASCIIPrintf(viewer,"  Repeated factorization SamePattern_SameRowPerm\n");
685:   }
686:   return(0);
687: }

691: PetscErrorCode MatView_SuperLU_DIST(Mat A,PetscViewer viewer)
692: {
693:   PetscErrorCode    ierr;
694:   PetscBool         iascii;
695:   PetscViewerFormat format;

698:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
699:   if (iascii) {
700:     PetscViewerGetFormat(viewer,&format);
701:     if (format == PETSC_VIEWER_ASCII_INFO) {
702:       MatFactorInfo_SuperLU_DIST(A,viewer);
703:     }
704:   }
705:   return(0);
706: }


709: /*MC
710:   MATSOLVERSUPERLU_DIST - Parallel direct solver package for LU factorization

712:    Works with AIJ matrices

714:   Options Database Keys:
715: + -mat_superlu_dist_r <n> - number of rows in processor partition
716: . -mat_superlu_dist_c <n> - number of columns in processor partition
717: . -mat_superlu_dist_matinput <0,1> - matrix input mode; 0=global, 1=distributed
718: . -mat_superlu_dist_equil - equilibrate the matrix
719: . -mat_superlu_dist_rowperm <LargeDiag,NATURAL> - row permutation
720: . -mat_superlu_dist_colperm <MMD_AT_PLUS_A,MMD_ATA,NATURAL> - column permutation
721: . -mat_superlu_dist_replacetinypivot - replace tiny pivots
722: . -mat_superlu_dist_fact <SamePattern> - (choose one of) SamePattern SamePattern_SameRowPerm
723: . -mat_superlu_dist_iterrefine - use iterative refinement
724: - -mat_superlu_dist_statprint - print factorization information

726:    Level: beginner

728: .seealso: PCLU

730: .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage

732: M*/