Actual source code: superlu_dist.c

petsc-3.8.4 2018-03-24
Report Typos and Errors

  2: /*
  3:         Provides an interface to the SuperLU_DIST sparse solver
  4: */

  6: #include <../src/mat/impls/aij/seq/aij.h>
  7: #include <../src/mat/impls/aij/mpi/mpiaij.h>
  8: #if defined(PETSC_HAVE_STDLIB_H) /* This is to get around weird problem with SuperLU on cray */
  9: #include <stdlib.h>
 10: #endif

 12: #if defined(PETSC_USE_64BIT_INDICES)
 13: /* ugly SuperLU_Dist variable telling it to use long long int */
 14: #define _LONGINT
 15: #endif

 17: EXTERN_C_BEGIN
 18: #if defined(PETSC_USE_COMPLEX)
 19: #include <superlu_zdefs.h>
 20: #else
 21: #include <superlu_ddefs.h>
 22: #endif
 23: EXTERN_C_END

 25: /*
 26:     GLOBAL - The sparse matrix and right hand side are all stored initially on process 0. Should be called centralized
 27:     DISTRIBUTED - The sparse matrix and right hand size are initially stored across the entire MPI communicator.
 28: */
 29: typedef enum {GLOBAL,DISTRIBUTED} SuperLU_MatInputMode;
 30: const char *SuperLU_MatInputModes[] = {"GLOBAL","DISTRIBUTED","SuperLU_MatInputMode","PETSC_",0};

 32: typedef struct {
 33:   int_t                  nprow,npcol,*row,*col;
 34:   gridinfo_t             grid;
 35:   superlu_dist_options_t options;
 36:   SuperMatrix            A_sup;
 37:   ScalePermstruct_t      ScalePermstruct;
 38:   LUstruct_t             LUstruct;
 39:   int                    StatPrint;
 40:   SuperLU_MatInputMode   MatInputMode;
 41:   SOLVEstruct_t          SOLVEstruct;
 42:   fact_t                 FactPattern;
 43:   MPI_Comm               comm_superlu;
 44: #if defined(PETSC_USE_COMPLEX)
 45:   doublecomplex          *val;
 46: #else
 47:   double                 *val;
 48: #endif
 49:   PetscBool              matsolve_iscalled,matmatsolve_iscalled;
 50:   PetscBool              CleanUpSuperLU_Dist;  /* Flag to clean up (non-global) SuperLU objects during Destroy */
 51: } Mat_SuperLU_DIST;


 54: PetscErrorCode MatSuperluDistGetDiagU_SuperLU_DIST(Mat F,PetscScalar *diagU)
 55: {
 56:   Mat_SuperLU_DIST  *lu= (Mat_SuperLU_DIST*)F->data;

 59: #if defined(PETSC_USE_COMPLEX)
 60:   PetscStackCall("SuperLU_DIST:pzGetDiagU",pzGetDiagU(F->rmap->N,&lu->LUstruct,&lu->grid,(doublecomplex*)diagU));
 61: #else
 62:   PetscStackCall("SuperLU_DIST:pdGetDiagU",pdGetDiagU(F->rmap->N,&lu->LUstruct,&lu->grid,diagU));
 63: #endif
 64:   return(0);
 65: }

 67: PetscErrorCode MatSuperluDistGetDiagU(Mat F,PetscScalar *diagU)
 68: {
 69:   PetscErrorCode    ierr;

 73:   PetscTryMethod(F,"MatSuperluDistGetDiagU_C",(Mat,PetscScalar*),(F,diagU));
 74:   return(0);
 75: }

 77: static PetscErrorCode MatDestroy_SuperLU_DIST(Mat A)
 78: {
 79:   PetscErrorCode   ierr;
 80:   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data;

 83:   if (lu->CleanUpSuperLU_Dist) {
 84:     /* Deallocate SuperLU_DIST storage */
 85:     if (lu->MatInputMode == GLOBAL) {
 86:       PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup));
 87:     } else {
 88:       PetscStackCall("SuperLU_DIST:Destroy_CompRowLoc_Matrix_dist",Destroy_CompRowLoc_Matrix_dist(&lu->A_sup));
 89:       if (lu->options.SolveInitialized) {
 90: #if defined(PETSC_USE_COMPLEX)
 91:         PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
 92: #else
 93:         PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
 94: #endif
 95:       }
 96:     }
 97:     PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(A->cmap->N, &lu->grid, &lu->LUstruct));
 98:     PetscStackCall("SuperLU_DIST:ScalePermstructFree",ScalePermstructFree(&lu->ScalePermstruct));
 99:     PetscStackCall("SuperLU_DIST:LUstructFree",LUstructFree(&lu->LUstruct));

101:     /* Release the SuperLU_DIST process grid. */
102:     PetscStackCall("SuperLU_DIST:superlu_gridexit",superlu_gridexit(&lu->grid));
103:     MPI_Comm_free(&(lu->comm_superlu));
104:   }
105:   PetscFree(A->data);
106:   /* clear composed functions */
107:   PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverPackage_C",NULL);
108:   PetscObjectComposeFunction((PetscObject)A,"MatSuperluDistGetDiagU_C",NULL);

110:   return(0);
111: }

113: static PetscErrorCode MatSolve_SuperLU_DIST(Mat A,Vec b_mpi,Vec x)
114: {
115:   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data;
116:   PetscErrorCode   ierr;
117:   PetscMPIInt      size;
118:   PetscInt         m=A->rmap->n,M=A->rmap->N,N=A->cmap->N;
119:   SuperLUStat_t    stat;
120:   double           berr[1];
121:   PetscScalar      *bptr;
122:   PetscInt         nrhs=1;
123:   Vec              x_seq;
124:   IS               iden;
125:   VecScatter       scat;
126:   int              info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
127:   static PetscBool cite = PETSC_FALSE;

130:   if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED");
131:   PetscCitationsRegister("@article{lidemmel03,\n  author = {Xiaoye S. Li and James W. Demmel},\n  title = {{SuperLU_DIST}: A Scalable Distributed-Memory Sparse Direct\n           Solver for Unsymmetric Linear Systems},\n  journal = {ACM Trans. Mathematical Software},\n  volume = {29},\n  number = {2},\n  pages = {110-140},\n  year = 2003\n}\n",&cite);

133:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
134:   if (size > 1 && lu->MatInputMode == GLOBAL) {
135:     /* global mat input, convert b to x_seq */
136:     VecCreateSeq(PETSC_COMM_SELF,N,&x_seq);
137:     ISCreateStride(PETSC_COMM_SELF,N,0,1,&iden);
138:     VecScatterCreate(b_mpi,iden,x_seq,iden,&scat);
139:     ISDestroy(&iden);

141:     VecScatterBegin(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);
142:     VecScatterEnd(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);
143:     VecGetArray(x_seq,&bptr);
144:   } else { /* size==1 || distributed mat input */
145:     if (lu->options.SolveInitialized && !lu->matsolve_iscalled) {
146:       /* see comments in MatMatSolve() */
147: #if defined(PETSC_USE_COMPLEX)
148:       PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
149: #else
150:       PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
151: #endif
152:       lu->options.SolveInitialized = NO;
153:     }
154:     VecCopy(b_mpi,x);
155:     VecGetArray(x,&bptr);
156:   }

158:   PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat));        /* Initialize the statistics variables. */
159:   if (lu->MatInputMode == GLOBAL) {
160: #if defined(PETSC_USE_COMPLEX)
161:     PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info));
162: #else
163:     PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info));
164: #endif
165:   } else { /* distributed mat input */
166: #if defined(PETSC_USE_COMPLEX)
167:     PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
168: #else
169:     PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
170: #endif
171:   }
172:   if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);

174:   if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid);      /* Print the statistics. */
175:   PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));

177:   if (size > 1 && lu->MatInputMode == GLOBAL) {
178:     /* convert seq x to mpi x */
179:     VecRestoreArray(x_seq,&bptr);
180:     VecScatterBegin(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);
181:     VecScatterEnd(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);
182:     VecScatterDestroy(&scat);
183:     VecDestroy(&x_seq);
184:   } else {
185:     VecRestoreArray(x,&bptr);

187:     lu->matsolve_iscalled    = PETSC_TRUE;
188:     lu->matmatsolve_iscalled = PETSC_FALSE;
189:   }
190:   return(0);
191: }

193: static PetscErrorCode MatMatSolve_SuperLU_DIST(Mat A,Mat B_mpi,Mat X)
194: {
195:   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data;
196:   PetscErrorCode   ierr;
197:   PetscMPIInt      size;
198:   PetscInt         M=A->rmap->N,m=A->rmap->n,nrhs;
199:   SuperLUStat_t    stat;
200:   double           berr[1];
201:   PetscScalar      *bptr;
202:   int              info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
203:   PetscBool        flg;

206:   if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED");
207:   PetscObjectTypeCompareAny((PetscObject)B_mpi,&flg,MATSEQDENSE,MATMPIDENSE,NULL);
208:   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix");
209:   PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);
210:   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix");

212:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
213:   if (size > 1 && lu->MatInputMode == GLOBAL) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatInputMode=GLOBAL for nproc %d>1 is not supported",size);
214:   /* size==1 or distributed mat input */
215:   if (lu->options.SolveInitialized && !lu->matmatsolve_iscalled) {
216:     /* communication pattern of SOLVEstruct is unlikely created for matmatsolve,
217:        thus destroy it and create a new SOLVEstruct.
218:        Otherwise it may result in memory corruption or incorrect solution
219:        See src/mat/examples/tests/ex125.c */
220: #if defined(PETSC_USE_COMPLEX)
221:     PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
222: #else
223:     PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
224: #endif
225:     lu->options.SolveInitialized = NO;
226:   }
227:   MatCopy(B_mpi,X,SAME_NONZERO_PATTERN);

229:   MatGetSize(B_mpi,NULL,&nrhs);

231:   PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat));        /* Initialize the statistics variables. */
232:   MatDenseGetArray(X,&bptr);
233:   if (lu->MatInputMode == GLOBAL) { /* size == 1 */
234: #if defined(PETSC_USE_COMPLEX)
235:     PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,(doublecomplex*)bptr, M, nrhs,&lu->grid, &lu->LUstruct, berr, &stat, &info));
236: #else
237:     PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr, M, nrhs, &lu->grid, &lu->LUstruct, berr, &stat, &info));
238: #endif
239:   } else { /* distributed mat input */
240: #if defined(PETSC_USE_COMPLEX)
241:     PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid, &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
242: #else
243:     PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
244: #endif
245:   }
246:   if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);
247:   MatDenseRestoreArray(X,&bptr);

249:   if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */
250:   PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
251:   lu->matsolve_iscalled    = PETSC_FALSE;
252:   lu->matmatsolve_iscalled = PETSC_TRUE;
253:   return(0);
254: }


257: static PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F,Mat A,const MatFactorInfo *info)
258: {
259:   Mat              *tseq,A_seq = NULL;
260:   Mat_SeqAIJ       *aa,*bb;
261:   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)(F)->data;
262:   PetscErrorCode   ierr;
263:   PetscInt         M=A->rmap->N,N=A->cmap->N,i,*ai,*aj,*bi,*bj,nz,rstart,*garray,
264:                    m=A->rmap->n, colA_start,j,jcol,jB,countA,countB,*bjj,*ajj;
265:   int              sinfo;   /* SuperLU_Dist info flag is always an int even with long long indices */
266:   PetscMPIInt      size;
267:   SuperLUStat_t    stat;
268:   double           *berr=0;
269:   IS               isrow;
270: #if defined(PETSC_USE_COMPLEX)
271:   doublecomplex    *av, *bv;
272: #else
273:   double           *av, *bv;
274: #endif

277:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);

279:   if (lu->MatInputMode == GLOBAL) { /* global mat input */
280:     if (size > 1) { /* convert mpi A to seq mat A */
281:       ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);
282:       MatCreateSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);
283:       ISDestroy(&isrow);

285:       A_seq = *tseq;
286:       PetscFree(tseq);
287:       aa    = (Mat_SeqAIJ*)A_seq->data;
288:     } else {
289:       PetscBool flg;
290:       PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);
291:       if (flg) {
292:         Mat_MPIAIJ *At = (Mat_MPIAIJ*)A->data;
293:         A = At->A;
294:       }
295:       aa =  (Mat_SeqAIJ*)A->data;
296:     }

298:     /* Convert Petsc NR matrix to SuperLU_DIST NC.
299:        Note: memories of lu->val, col and row are allocated by CompRow_to_CompCol_dist()! */
300:     if (lu->options.Fact != DOFACT) {/* successive numeric factorization, sparsity pattern is reused. */
301:       PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup));
302:       if (lu->FactPattern == SamePattern_SameRowPerm) {
303:         lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
304:       } else { /* lu->FactPattern == SamePattern */
305:         PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct));
306:         lu->options.Fact = SamePattern;
307:       }
308:     }
309: #if defined(PETSC_USE_COMPLEX)
310:     PetscStackCall("SuperLU_DIST:zCompRow_to_CompCol_dist",zCompRow_to_CompCol_dist(M,N,aa->nz,(doublecomplex*)aa->a,(int_t*)aa->j,(int_t*)aa->i,&lu->val,&lu->col, &lu->row));
311: #else
312:     PetscStackCall("SuperLU_DIST:dCompRow_to_CompCol_dist",dCompRow_to_CompCol_dist(M,N,aa->nz,aa->a,(int_t*)aa->j,(int_t*)aa->i,&lu->val, &lu->col, &lu->row));
313: #endif

315:     /* Create compressed column matrix A_sup. */
316: #if defined(PETSC_USE_COMPLEX)
317:     PetscStackCall("SuperLU_DIST:zCreate_CompCol_Matrix_dist",zCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_Z, SLU_GE));
318: #else
319:     PetscStackCall("SuperLU_DIST:dCreate_CompCol_Matrix_dist",dCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_D, SLU_GE));
320: #endif
321:   } else { /* distributed mat input */
322:     Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data;
323:     aa=(Mat_SeqAIJ*)(mat->A)->data;
324:     bb=(Mat_SeqAIJ*)(mat->B)->data;
325:     ai=aa->i; aj=aa->j;
326:     bi=bb->i; bj=bb->j;
327: #if defined(PETSC_USE_COMPLEX)
328:     av=(doublecomplex*)aa->a;
329:     bv=(doublecomplex*)bb->a;
330: #else
331:     av=aa->a;
332:     bv=bb->a;
333: #endif
334:     rstart = A->rmap->rstart;
335:     nz     = aa->nz + bb->nz;
336:     garray = mat->garray;

338:     if (lu->options.Fact == DOFACT) { /* first numeric factorization */
339: #if defined(PETSC_USE_COMPLEX)
340:       PetscStackCall("SuperLU_DIST:zallocateA_dist",zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
341: #else
342:       PetscStackCall("SuperLU_DIST:dallocateA_dist",dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
343: #endif
344:     } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */
345:       if (lu->FactPattern == SamePattern_SameRowPerm) {
346:         lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
347:       } else if (lu->FactPattern == SamePattern) {
348:         PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct)); /* Deallocate L and U matrices. */
349:         lu->options.Fact = SamePattern;
350:       } else if (lu->FactPattern == DOFACT) {
351:         PetscStackCall("SuperLU_DIST:Destroy_CompRowLoc_Matrix_dist",Destroy_CompRowLoc_Matrix_dist(&lu->A_sup));
352:         PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct));
353:         lu->options.Fact = DOFACT;

355: #if defined(PETSC_USE_COMPLEX)
356:       PetscStackCall("SuperLU_DIST:zallocateA_dist",zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
357: #else
358:       PetscStackCall("SuperLU_DIST:dallocateA_dist",dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row));
359: #endif
360:       } else {
361:         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"options.Fact must be one of SamePattern SamePattern_SameRowPerm DOFACT");
362:       }
363:     }
364:     nz = 0;
365:     for (i=0; i<m; i++) {
366:       lu->row[i] = nz;
367:       countA     = ai[i+1] - ai[i];
368:       countB     = bi[i+1] - bi[i];
369:       if (aj) {
370:         ajj = aj + ai[i]; /* ptr to the beginning of this row */
371:       } else {
372:         ajj = NULL;
373:       }
374:       bjj = bj + bi[i];

376:       /* B part, smaller col index */
377:       if (aj) {
378:         colA_start = rstart + ajj[0]; /* the smallest global col index of A */
379:       } else { /* superlu_dist does not require matrix has diagonal entries, thus aj=NULL would work */
380:         colA_start = rstart;
381:       }
382:       jB         = 0;
383:       for (j=0; j<countB; j++) {
384:         jcol = garray[bjj[j]];
385:         if (jcol > colA_start) {
386:           jB = j;
387:           break;
388:         }
389:         lu->col[nz]   = jcol;
390:         lu->val[nz++] = *bv++;
391:         if (j==countB-1) jB = countB;
392:       }

394:       /* A part */
395:       for (j=0; j<countA; j++) {
396:         lu->col[nz]   = rstart + ajj[j];
397:         lu->val[nz++] = *av++;
398:       }

400:       /* B part, larger col index */
401:       for (j=jB; j<countB; j++) {
402:         lu->col[nz]   = garray[bjj[j]];
403:         lu->val[nz++] = *bv++;
404:       }
405:     }
406:     lu->row[m] = nz;

408:     if (lu->options.Fact == DOFACT) {
409: #if defined(PETSC_USE_COMPLEX)
410:       PetscStackCall("SuperLU_DIST:zCreate_CompRowLoc_Matrix_dist",zCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_Z, SLU_GE));
411: #else
412:       PetscStackCall("SuperLU_DIST:dCreate_CompRowLoc_Matrix_dist",dCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_D, SLU_GE));
413: #endif
414:     }
415:   }

417:   /* Factor the matrix. */
418:   PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat));   /* Initialize the statistics variables. */
419:   if (lu->MatInputMode == GLOBAL) { /* global mat input */
420: #if defined(PETSC_USE_COMPLEX)
421:     PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo));
422: #else
423:     PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo));
424: #endif
425:   } else { /* distributed mat input */
426: #if defined(PETSC_USE_COMPLEX)
427:     PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo));
428: #else
429:     PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo));
430: #endif
431:   }
432: 
433:   if (sinfo > 0) {
434:     if (A->erroriffailure) {
435:       SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_MAT_LU_ZRPVT,"Zero pivot in row %D",sinfo);
436:     } else {
437:       if (sinfo <= lu->A_sup.ncol) {
438:         F->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT;
439:         PetscInfo1(F,"U(i,i) is exactly zero, i= %D\n",sinfo);
440:       } else if (sinfo > lu->A_sup.ncol) {
441:         /* 
442:          number of bytes allocated when memory allocation
443:          failure occurred, plus A->ncol.
444:          */
445:         F->factorerrortype = MAT_FACTOR_OUTMEMORY;
446:         PetscInfo1(F,"Number of bytes allocated when memory allocation fails %D\n",sinfo);
447:       }
448:     }
449:   } else if (sinfo < 0) {
450:     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB, "info = %D, argument in p*gssvx() had an illegal value", sinfo);
451:   }

453:   if (lu->MatInputMode == GLOBAL && size > 1) {
454:     MatDestroy(&A_seq);
455:   }

457:   if (lu->options.PrintStat) {
458:     PStatPrint(&lu->options, &stat, &lu->grid);  /* Print the statistics. */
459:   }
460:   PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
461:   (F)->assembled    = PETSC_TRUE;
462:   (F)->preallocated = PETSC_TRUE;
463:   lu->options.Fact  = FACTORED; /* The factored form of A is supplied. Local option used by this func. only */
464:   return(0);
465: }

467: /* Note the Petsc r and c permutations are ignored */
468: static PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
469: {
470:   Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->data;
471:   PetscInt         M   = A->rmap->N,N=A->cmap->N;

474:   /* Initialize the SuperLU process grid. */
475:   PetscStackCall("SuperLU_DIST:superlu_gridinit",superlu_gridinit(lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid));

477:   /* Initialize ScalePermstruct and LUstruct. */
478:   PetscStackCall("SuperLU_DIST:ScalePermstructInit",ScalePermstructInit(M, N, &lu->ScalePermstruct));
479:   PetscStackCall("SuperLU_DIST:LUstructInit",LUstructInit(N, &lu->LUstruct));
480:   F->ops->lufactornumeric = MatLUFactorNumeric_SuperLU_DIST;
481:   F->ops->solve           = MatSolve_SuperLU_DIST;
482:   F->ops->matsolve        = MatMatSolve_SuperLU_DIST;
483:   lu->CleanUpSuperLU_Dist = PETSC_TRUE;
484:   return(0);
485: }

487: static PetscErrorCode MatFactorGetSolverPackage_aij_superlu_dist(Mat A,const MatSolverPackage *type)
488: {
490:   *type = MATSOLVERSUPERLU_DIST;
491:   return(0);
492: }

494: static PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat A,PetscViewer viewer)
495: {
496:   Mat_SuperLU_DIST       *lu=(Mat_SuperLU_DIST*)A->data;
497:   superlu_dist_options_t options;
498:   PetscErrorCode         ierr;

501:   /* check if matrix is superlu_dist type */
502:   if (A->ops->solve != MatSolve_SuperLU_DIST) return(0);

504:   options = lu->options;
505:   PetscViewerASCIIPrintf(viewer,"SuperLU_DIST run parameters:\n");
506:   PetscViewerASCIIPrintf(viewer,"  Process grid nprow %D x npcol %D \n",lu->nprow,lu->npcol);
507:   PetscViewerASCIIPrintf(viewer,"  Equilibrate matrix %s \n",PetscBools[options.Equil != NO]);
508:   PetscViewerASCIIPrintf(viewer,"  Matrix input mode %d \n",lu->MatInputMode);
509:   PetscViewerASCIIPrintf(viewer,"  Replace tiny pivots %s \n",PetscBools[options.ReplaceTinyPivot != NO]);
510:   PetscViewerASCIIPrintf(viewer,"  Use iterative refinement %s \n",PetscBools[options.IterRefine == SLU_DOUBLE]);
511:   PetscViewerASCIIPrintf(viewer,"  Processors in row %d col partition %d \n",lu->nprow,lu->npcol);
512:   PetscViewerASCIIPrintf(viewer,"  Row permutation %s \n",(options.RowPerm == NOROWPERM) ? "NATURAL" : "LargeDiag");

514:   switch (options.ColPerm) {
515:   case NATURAL:
516:     PetscViewerASCIIPrintf(viewer,"  Column permutation NATURAL\n");
517:     break;
518:   case MMD_AT_PLUS_A:
519:     PetscViewerASCIIPrintf(viewer,"  Column permutation MMD_AT_PLUS_A\n");
520:     break;
521:   case MMD_ATA:
522:     PetscViewerASCIIPrintf(viewer,"  Column permutation MMD_ATA\n");
523:     break;
524:   case METIS_AT_PLUS_A:
525:     PetscViewerASCIIPrintf(viewer,"  Column permutation METIS_AT_PLUS_A\n");
526:     break;
527:   case PARMETIS:
528:     PetscViewerASCIIPrintf(viewer,"  Column permutation PARMETIS\n");
529:     break;
530:   default:
531:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
532:   }

534:   PetscViewerASCIIPrintf(viewer,"  Parallel symbolic factorization %s \n",PetscBools[options.ParSymbFact != NO]);

536:   if (lu->FactPattern == SamePattern) {
537:     PetscViewerASCIIPrintf(viewer,"  Repeated factorization SamePattern\n");
538:   } else if (lu->FactPattern == SamePattern_SameRowPerm) {
539:     PetscViewerASCIIPrintf(viewer,"  Repeated factorization SamePattern_SameRowPerm\n");
540:   } else if (lu->FactPattern == DOFACT) {
541:     PetscViewerASCIIPrintf(viewer,"  Repeated factorization DOFACT\n");
542:   } else {
543:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown factorization pattern");
544:   }
545:   return(0);
546: }

548: static PetscErrorCode MatView_SuperLU_DIST(Mat A,PetscViewer viewer)
549: {
550:   PetscErrorCode    ierr;
551:   PetscBool         iascii;
552:   PetscViewerFormat format;

555:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
556:   if (iascii) {
557:     PetscViewerGetFormat(viewer,&format);
558:     if (format == PETSC_VIEWER_ASCII_INFO) {
559:       MatFactorInfo_SuperLU_DIST(A,viewer);
560:     }
561:   }
562:   return(0);
563: }

565: static PetscErrorCode MatGetFactor_aij_superlu_dist(Mat A,MatFactorType ftype,Mat *F)
566: {
567:   Mat                    B;
568:   Mat_SuperLU_DIST       *lu;
569:   PetscErrorCode         ierr;
570:   PetscInt               M=A->rmap->N,N=A->cmap->N,indx;
571:   PetscMPIInt            size;
572:   superlu_dist_options_t options;
573:   PetscBool              flg;
574:   const char             *colperm[]     = {"NATURAL","MMD_AT_PLUS_A","MMD_ATA","METIS_AT_PLUS_A","PARMETIS"};
575:   const char             *rowperm[]     = {"LargeDiag","NATURAL"};
576:   const char             *factPattern[] = {"SamePattern","SamePattern_SameRowPerm","DOFACT"};
577:   PetscBool              set;

580:   /* Create the factorization matrix */
581:   MatCreate(PetscObjectComm((PetscObject)A),&B);
582:   MatSetSizes(B,A->rmap->n,A->cmap->n,M,N);
583:   PetscStrallocpy("superlu_dist",&((PetscObject)B)->type_name);
584:   MatSetUp(B);
585:   B->ops->getinfo          = MatGetInfo_External;
586:   B->ops->lufactorsymbolic = MatLUFactorSymbolic_SuperLU_DIST;
587:   B->ops->view             = MatView_SuperLU_DIST;
588:   B->ops->destroy          = MatDestroy_SuperLU_DIST;

590:   B->factortype = MAT_FACTOR_LU;

592:   /* set solvertype */
593:   PetscFree(B->solvertype);
594:   PetscStrallocpy(MATSOLVERSUPERLU_DIST,&B->solvertype);

596:   PetscNewLog(B,&lu);
597:   B->data = lu;

599:   /* Set the default input options:
600:      options.Fact              = DOFACT;
601:      options.Equil             = YES;
602:      options.ParSymbFact       = NO;
603:      options.ColPerm           = METIS_AT_PLUS_A;
604:      options.RowPerm           = LargeDiag;
605:      options.ReplaceTinyPivot  = YES;
606:      options.IterRefine        = DOUBLE;
607:      options.Trans             = NOTRANS;
608:      options.SolveInitialized  = NO; -hold the communication pattern used MatSolve() and MatMatSolve()
609:      options.RefineInitialized = NO;
610:      options.PrintStat         = YES;
611:   */
612:   set_default_options_dist(&options);

614:   MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(lu->comm_superlu));
615:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
616:   /* Default num of process columns and rows */
617:   lu->npcol = (int_t) (0.5 + PetscSqrtReal((PetscReal)size));
618:   if (!lu->npcol) lu->npcol = 1;
619:   while (lu->npcol > 0) {
620:     lu->nprow = (int_t) (size/lu->npcol);
621:     if (size == lu->nprow * lu->npcol) break;
622:     lu->npcol--;
623:   }

625:   PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"SuperLU_Dist Options","Mat");
626:   PetscOptionsInt("-mat_superlu_dist_r","Number rows in processor partition","None",lu->nprow,(PetscInt*)&lu->nprow,NULL);
627:   PetscOptionsInt("-mat_superlu_dist_c","Number columns in processor partition","None",lu->npcol,(PetscInt*)&lu->npcol,NULL);
628:   if (size != lu->nprow * lu->npcol) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Number of processes %d must equal to nprow %d * npcol %d",size,lu->nprow,lu->npcol);

630:   lu->MatInputMode = DISTRIBUTED;

632:   PetscOptionsEnum("-mat_superlu_dist_matinput","Matrix input mode (global or distributed)","None",SuperLU_MatInputModes,(PetscEnum)lu->MatInputMode,(PetscEnum*)&lu->MatInputMode,NULL);
633:   if (lu->MatInputMode == DISTRIBUTED && size == 1) lu->MatInputMode = GLOBAL;

635:   PetscOptionsBool("-mat_superlu_dist_equil","Equilibrate matrix","None",options.Equil ? PETSC_TRUE : PETSC_FALSE,&flg,&set);
636:   if (set && !flg) options.Equil = NO;

638:   PetscOptionsEList("-mat_superlu_dist_rowperm","Row permutation","None",rowperm,2,rowperm[0],&indx,&flg);
639:   if (flg) {
640:     switch (indx) {
641:     case 0:
642:       options.RowPerm = LargeDiag;
643:       break;
644:     case 1:
645:       options.RowPerm = NOROWPERM;
646:       break;
647:     }
648:   }

650:   PetscOptionsEList("-mat_superlu_dist_colperm","Column permutation","None",colperm,5,colperm[3],&indx,&flg);
651:   if (flg) {
652:     switch (indx) {
653:     case 0:
654:       options.ColPerm = NATURAL;
655:       break;
656:     case 1:
657:       options.ColPerm = MMD_AT_PLUS_A;
658:       break;
659:     case 2:
660:       options.ColPerm = MMD_ATA;
661:       break;
662:     case 3:
663:       options.ColPerm = METIS_AT_PLUS_A;
664:       break;
665:     case 4:
666:       options.ColPerm = PARMETIS;   /* only works for np>1 */
667:       break;
668:     default:
669:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
670:     }
671:   }

673:   options.ReplaceTinyPivot = NO;
674:   PetscOptionsBool("-mat_superlu_dist_replacetinypivot","Replace tiny pivots","None",options.ReplaceTinyPivot ? PETSC_TRUE : PETSC_FALSE,&flg,&set);
675:   if (set && flg) options.ReplaceTinyPivot = YES;

677:   options.ParSymbFact = NO;
678:   PetscOptionsBool("-mat_superlu_dist_parsymbfact","Parallel symbolic factorization","None",PETSC_FALSE,&flg,&set);
679:   if (set && flg && size>1) {
680:     if (lu->MatInputMode == GLOBAL) {
681: #if defined(PETSC_USE_INFO)
682:       PetscInfo(A,"Warning: '-mat_superlu_dist_parsymbfact' is ignored because MatInputMode=GLOBAL\n");
683: #endif
684:     } else {
685: #if defined(PETSC_HAVE_PARMETIS)
686:       options.ParSymbFact = YES;
687:       options.ColPerm     = PARMETIS;   /* in v2.2, PARMETIS is forced for ParSymbFact regardless of user ordering setting */
688: #else
689:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"parsymbfact needs PARMETIS");
690: #endif
691:     }
692:   }

694:   lu->FactPattern = SamePattern;
695:   PetscOptionsEList("-mat_superlu_dist_fact","Sparsity pattern for repeated matrix factorization","None",factPattern,3,factPattern[0],&indx,&flg);
696:   if (flg) {
697:     switch (indx) {
698:     case 0:
699:       lu->FactPattern = SamePattern;
700:       break;
701:     case 1:
702:       lu->FactPattern = SamePattern_SameRowPerm;
703:       break;
704:     case 2:
705:       lu->FactPattern = DOFACT;
706:       break;
707:     }
708:   }

710:   options.IterRefine = NOREFINE;
711:   PetscOptionsBool("-mat_superlu_dist_iterrefine","Use iterative refinement","None",options.IterRefine == NOREFINE ? PETSC_FALSE : PETSC_TRUE ,&flg,&set);
712:   if (set) {
713:     if (flg) options.IterRefine = SLU_DOUBLE;
714:     else options.IterRefine = NOREFINE;
715:   }

717:   if (PetscLogPrintInfo) options.PrintStat = YES;
718:   else options.PrintStat = NO;
719:   PetscOptionsBool("-mat_superlu_dist_statprint","Print factorization information","None",(PetscBool)options.PrintStat,(PetscBool*)&options.PrintStat,NULL);
720:   PetscOptionsEnd();

722:   lu->options              = options;
723:   lu->options.Fact         = DOFACT;
724:   lu->matsolve_iscalled    = PETSC_FALSE;
725:   lu->matmatsolve_iscalled = PETSC_FALSE;

727:   PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_aij_superlu_dist);
728:   PetscObjectComposeFunction((PetscObject)B,"MatSuperluDistGetDiagU_C",MatSuperluDistGetDiagU_SuperLU_DIST);

730:   *F = B;
731:   return(0);
732: }

734: PETSC_EXTERN PetscErrorCode MatSolverPackageRegister_SuperLU_DIST(void)
735: {
738:   MatSolverPackageRegister(MATSOLVERSUPERLU_DIST,MATMPIAIJ,  MAT_FACTOR_LU,MatGetFactor_aij_superlu_dist);
739:   MatSolverPackageRegister(MATSOLVERSUPERLU_DIST,MATSEQAIJ,  MAT_FACTOR_LU,MatGetFactor_aij_superlu_dist);
740:   return(0);
741: }

743: /*MC
744:   MATSOLVERSUPERLU_DIST - Parallel direct solver package for LU factorization

746:   Use ./configure --download-superlu_dist --download-parmetis --download-metis --download-ptscotch  to have PETSc installed with SuperLU_DIST

748:   Use -pc_type lu -pc_factor_mat_solver_package superlu_dist to use this direct solver

750:    Works with AIJ matrices

752:   Options Database Keys:
753: + -mat_superlu_dist_r <n> - number of rows in processor partition
754: . -mat_superlu_dist_c <n> - number of columns in processor partition
755: . -mat_superlu_dist_matinput <0,1> - matrix input mode; 0=global, 1=distributed
756: . -mat_superlu_dist_equil - equilibrate the matrix
757: . -mat_superlu_dist_rowperm <LargeDiag,NATURAL> - row permutation
758: . -mat_superlu_dist_colperm <MMD_AT_PLUS_A,MMD_ATA,NATURAL> - column permutation
759: . -mat_superlu_dist_replacetinypivot - replace tiny pivots
760: . -mat_superlu_dist_fact <SamePattern> - (choose one of) SamePattern SamePattern_SameRowPerm DOFACT
761: . -mat_superlu_dist_iterrefine - use iterative refinement
762: - -mat_superlu_dist_statprint - print factorization information

764:    Level: beginner

766: .seealso: PCLU

768: .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage

770: M*/