Actual source code: mumps.c
petsc-3.8.4 2018-03-24
2: /*
3: Provides an interface to the MUMPS sparse solver
4: */
6: #include <../src/mat/impls/aij/mpi/mpiaij.h>
7: #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
9: EXTERN_C_BEGIN
10: #if defined(PETSC_USE_COMPLEX)
11: #if defined(PETSC_USE_REAL_SINGLE)
12: #include <cmumps_c.h>
13: #else
14: #include <zmumps_c.h>
15: #endif
16: #else
17: #if defined(PETSC_USE_REAL_SINGLE)
18: #include <smumps_c.h>
19: #else
20: #include <dmumps_c.h>
21: #endif
22: #endif
23: EXTERN_C_END
24: #define JOB_INIT -1
25: #define JOB_FACTSYMBOLIC 1
26: #define JOB_FACTNUMERIC 2
27: #define JOB_SOLVE 3
28: #define JOB_END -2
30: /* calls to MUMPS */
31: #if defined(PETSC_USE_COMPLEX)
32: #if defined(PETSC_USE_REAL_SINGLE)
33: #define PetscMUMPS_c cmumps_c
34: #else
35: #define PetscMUMPS_c zmumps_c
36: #endif
37: #else
38: #if defined(PETSC_USE_REAL_SINGLE)
39: #define PetscMUMPS_c smumps_c
40: #else
41: #define PetscMUMPS_c dmumps_c
42: #endif
43: #endif
45: /* declare MumpsScalar */
46: #if defined(PETSC_USE_COMPLEX)
47: #if defined(PETSC_USE_REAL_SINGLE)
48: #define MumpsScalar mumps_complex
49: #else
50: #define MumpsScalar mumps_double_complex
51: #endif
52: #else
53: #define MumpsScalar PetscScalar
54: #endif
56: /* macros s.t. indices match MUMPS documentation */
57: #define ICNTL(I) icntl[(I)-1]
58: #define CNTL(I) cntl[(I)-1]
59: #define INFOG(I) infog[(I)-1]
60: #define INFO(I) info[(I)-1]
61: #define RINFOG(I) rinfog[(I)-1]
62: #define RINFO(I) rinfo[(I)-1]
64: typedef struct {
65: #if defined(PETSC_USE_COMPLEX)
66: #if defined(PETSC_USE_REAL_SINGLE)
67: CMUMPS_STRUC_C id;
68: #else
69: ZMUMPS_STRUC_C id;
70: #endif
71: #else
72: #if defined(PETSC_USE_REAL_SINGLE)
73: SMUMPS_STRUC_C id;
74: #else
75: DMUMPS_STRUC_C id;
76: #endif
77: #endif
79: MatStructure matstruc;
80: PetscMPIInt myid,size;
81: PetscInt *irn,*jcn,nz,sym;
82: PetscScalar *val;
83: MPI_Comm comm_mumps;
84: PetscBool isAIJ;
85: PetscInt ICNTL9_pre; /* check if ICNTL(9) is changed from previous MatSolve */
86: VecScatter scat_rhs, scat_sol; /* used by MatSolve() */
87: Vec b_seq,x_seq;
88: PetscInt ninfo,*info; /* display INFO */
89: PetscInt sizeredrhs;
90: PetscScalar *schur_sol;
91: PetscInt schur_sizesol;
93: PetscErrorCode (*ConvertToTriples)(Mat, int, MatReuse, int*, int**, int**, PetscScalar**);
94: } Mat_MUMPS;
96: extern PetscErrorCode MatDuplicate_MUMPS(Mat,MatDuplicateOption,Mat*);
98: static PetscErrorCode MatMumpsResetSchur_Private(Mat_MUMPS* mumps)
99: {
103: PetscFree2(mumps->id.listvar_schur,mumps->id.schur);
104: PetscFree(mumps->id.redrhs);
105: PetscFree(mumps->schur_sol);
106: mumps->id.size_schur = 0;
107: mumps->id.schur_lld = 0;
108: mumps->id.ICNTL(19) = 0;
109: return(0);
110: }
112: /* solve with rhs in mumps->id.redrhs and return in the same location */
113: static PetscErrorCode MatMumpsSolveSchur_Private(Mat F)
114: {
115: Mat_MUMPS *mumps=(Mat_MUMPS*)F->data;
116: Mat S,B,X;
117: MatFactorSchurStatus schurstatus;
118: PetscInt sizesol;
119: PetscErrorCode ierr;
122: MatFactorFactorizeSchurComplement(F);
123: MatFactorGetSchurComplement(F,&S,&schurstatus);
124: MatCreateSeqDense(PETSC_COMM_SELF,mumps->id.size_schur,mumps->id.nrhs,(PetscScalar*)mumps->id.redrhs,&B);
125: switch (schurstatus) {
126: case MAT_FACTOR_SCHUR_FACTORED:
127: MatCreateSeqDense(PETSC_COMM_SELF,mumps->id.size_schur,mumps->id.nrhs,(PetscScalar*)mumps->id.redrhs,&X);
128: if (!mumps->id.ICNTL(9)) { /* transpose solve */
129: MatMatSolveTranspose(S,B,X);
130: } else {
131: MatMatSolve(S,B,X);
132: }
133: break;
134: case MAT_FACTOR_SCHUR_INVERTED:
135: sizesol = mumps->id.nrhs*mumps->id.size_schur;
136: if (!mumps->schur_sol || sizesol > mumps->schur_sizesol) {
137: PetscFree(mumps->schur_sol);
138: PetscMalloc1(sizesol,&mumps->schur_sol);
139: mumps->schur_sizesol = sizesol;
140: }
141: MatCreateSeqDense(PETSC_COMM_SELF,mumps->id.size_schur,mumps->id.nrhs,mumps->schur_sol,&X);
142: if (!mumps->id.ICNTL(9)) { /* transpose solve */
143: MatTransposeMatMult(S,B,MAT_REUSE_MATRIX,PETSC_DEFAULT,&X);
144: } else {
145: MatMatMult(S,B,MAT_REUSE_MATRIX,PETSC_DEFAULT,&X);
146: }
147: MatCopy(X,B,SAME_NONZERO_PATTERN);
148: break;
149: default:
150: SETERRQ1(PetscObjectComm((PetscObject)F),PETSC_ERR_SUP,"Unhandled MatFactorSchurStatus %D",F->schur_status);
151: break;
152: }
153: MatFactorRestoreSchurComplement(F,&S,schurstatus);
154: MatDestroy(&B);
155: MatDestroy(&X);
156: return(0);
157: }
159: static PetscErrorCode MatMumpsHandleSchur_Private(Mat F, PetscBool expansion)
160: {
161: Mat_MUMPS *mumps=(Mat_MUMPS*)F->data;
165: if (!mumps->id.ICNTL(19)) { /* do nothing when Schur complement has not been computed */
166: return(0);
167: }
168: if (!expansion) { /* prepare for the condensation step */
169: PetscInt sizeredrhs = mumps->id.nrhs*mumps->id.size_schur;
170: /* allocate MUMPS internal array to store reduced right-hand sides */
171: if (!mumps->id.redrhs || sizeredrhs > mumps->sizeredrhs) {
172: PetscFree(mumps->id.redrhs);
173: mumps->id.lredrhs = mumps->id.size_schur;
174: PetscMalloc1(mumps->id.nrhs*mumps->id.lredrhs,&mumps->id.redrhs);
175: mumps->sizeredrhs = mumps->id.nrhs*mumps->id.lredrhs;
176: }
177: mumps->id.ICNTL(26) = 1; /* condensation phase */
178: } else { /* prepare for the expansion step */
179: /* solve Schur complement (this has to be done by the MUMPS user, so basically us) */
180: MatMumpsSolveSchur_Private(F);
181: mumps->id.ICNTL(26) = 2; /* expansion phase */
182: PetscMUMPS_c(&mumps->id);
183: if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
184: /* restore defaults */
185: mumps->id.ICNTL(26) = -1;
186: /* free MUMPS internal array for redrhs if we have solved for multiple rhs in order to save memory space */
187: if (mumps->id.nrhs > 1) {
188: PetscFree(mumps->id.redrhs);
189: mumps->id.lredrhs = 0;
190: mumps->sizeredrhs = 0;
191: }
192: }
193: return(0);
194: }
196: /*
197: MatConvertToTriples_A_B - convert Petsc matrix to triples: row[nz], col[nz], val[nz]
199: input:
200: A - matrix in aij,baij or sbaij (bs=1) format
201: shift - 0: C style output triple; 1: Fortran style output triple.
202: reuse - MAT_INITIAL_MATRIX: spaces are allocated and values are set for the triple
203: MAT_REUSE_MATRIX: only the values in v array are updated
204: output:
205: nnz - dim of r, c, and v (number of local nonzero entries of A)
206: r, c, v - row and col index, matrix values (matrix triples)
208: The returned values r, c, and sometimes v are obtained in a single PetscMalloc(). Then in MatDestroy_MUMPS() it is
209: freed with PetscFree((mumps->irn); This is not ideal code, the fact that v is ONLY sometimes part of mumps->irn means
210: that the PetscMalloc() cannot easily be replaced with a PetscMalloc3().
212: */
214: PetscErrorCode MatConvertToTriples_seqaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
215: {
216: const PetscInt *ai,*aj,*ajj,M=A->rmap->n;
217: PetscInt nz,rnz,i,j;
219: PetscInt *row,*col;
220: Mat_SeqAIJ *aa=(Mat_SeqAIJ*)A->data;
223: *v=aa->a;
224: if (reuse == MAT_INITIAL_MATRIX) {
225: nz = aa->nz;
226: ai = aa->i;
227: aj = aa->j;
228: *nnz = nz;
229: PetscMalloc1(2*nz, &row);
230: col = row + nz;
232: nz = 0;
233: for (i=0; i<M; i++) {
234: rnz = ai[i+1] - ai[i];
235: ajj = aj + ai[i];
236: for (j=0; j<rnz; j++) {
237: row[nz] = i+shift; col[nz++] = ajj[j] + shift;
238: }
239: }
240: *r = row; *c = col;
241: }
242: return(0);
243: }
245: PetscErrorCode MatConvertToTriples_seqbaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
246: {
247: Mat_SeqBAIJ *aa=(Mat_SeqBAIJ*)A->data;
248: const PetscInt *ai,*aj,*ajj,bs2 = aa->bs2;
249: PetscInt bs,M,nz,idx=0,rnz,i,j,k,m;
251: PetscInt *row,*col;
254: MatGetBlockSize(A,&bs);
255: M = A->rmap->N/bs;
256: *v = aa->a;
257: if (reuse == MAT_INITIAL_MATRIX) {
258: ai = aa->i; aj = aa->j;
259: nz = bs2*aa->nz;
260: *nnz = nz;
261: PetscMalloc1(2*nz, &row);
262: col = row + nz;
264: for (i=0; i<M; i++) {
265: ajj = aj + ai[i];
266: rnz = ai[i+1] - ai[i];
267: for (k=0; k<rnz; k++) {
268: for (j=0; j<bs; j++) {
269: for (m=0; m<bs; m++) {
270: row[idx] = i*bs + m + shift;
271: col[idx++] = bs*(ajj[k]) + j + shift;
272: }
273: }
274: }
275: }
276: *r = row; *c = col;
277: }
278: return(0);
279: }
281: PetscErrorCode MatConvertToTriples_seqsbaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
282: {
283: const PetscInt *ai, *aj,*ajj,M=A->rmap->n;
284: PetscInt nz,rnz,i,j;
286: PetscInt *row,*col;
287: Mat_SeqSBAIJ *aa=(Mat_SeqSBAIJ*)A->data;
290: *v = aa->a;
291: if (reuse == MAT_INITIAL_MATRIX) {
292: nz = aa->nz;
293: ai = aa->i;
294: aj = aa->j;
295: *v = aa->a;
296: *nnz = nz;
297: PetscMalloc1(2*nz, &row);
298: col = row + nz;
300: nz = 0;
301: for (i=0; i<M; i++) {
302: rnz = ai[i+1] - ai[i];
303: ajj = aj + ai[i];
304: for (j=0; j<rnz; j++) {
305: row[nz] = i+shift; col[nz++] = ajj[j] + shift;
306: }
307: }
308: *r = row; *c = col;
309: }
310: return(0);
311: }
313: PetscErrorCode MatConvertToTriples_seqaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
314: {
315: const PetscInt *ai,*aj,*ajj,*adiag,M=A->rmap->n;
316: PetscInt nz,rnz,i,j;
317: const PetscScalar *av,*v1;
318: PetscScalar *val;
319: PetscErrorCode ierr;
320: PetscInt *row,*col;
321: Mat_SeqAIJ *aa=(Mat_SeqAIJ*)A->data;
322: PetscBool missing;
325: ai =aa->i; aj=aa->j;av=aa->a;
326: adiag=aa->diag;
327: MatMissingDiagonal_SeqAIJ(A,&missing,&i);
328: if (reuse == MAT_INITIAL_MATRIX) {
329: /* count nz in the uppper triangular part of A */
330: nz = 0;
331: if (missing) {
332: for (i=0; i<M; i++) {
333: if (PetscUnlikely(adiag[i] >= ai[i+1])) {
334: for (j=ai[i];j<ai[i+1];j++) {
335: if (aj[j] < i) continue;
336: nz++;
337: }
338: } else {
339: nz += ai[i+1] - adiag[i];
340: }
341: }
342: } else {
343: for (i=0; i<M; i++) nz += ai[i+1] - adiag[i];
344: }
345: *nnz = nz;
347: PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);
348: col = row + nz;
349: val = (PetscScalar*)(col + nz);
351: nz = 0;
352: if (missing) {
353: for (i=0; i<M; i++) {
354: if (PetscUnlikely(adiag[i] >= ai[i+1])) {
355: for (j=ai[i];j<ai[i+1];j++) {
356: if (aj[j] < i) continue;
357: row[nz] = i+shift;
358: col[nz] = aj[j]+shift;
359: val[nz] = av[j];
360: nz++;
361: }
362: } else {
363: rnz = ai[i+1] - adiag[i];
364: ajj = aj + adiag[i];
365: v1 = av + adiag[i];
366: for (j=0; j<rnz; j++) {
367: row[nz] = i+shift; col[nz] = ajj[j] + shift; val[nz++] = v1[j];
368: }
369: }
370: }
371: } else {
372: for (i=0; i<M; i++) {
373: rnz = ai[i+1] - adiag[i];
374: ajj = aj + adiag[i];
375: v1 = av + adiag[i];
376: for (j=0; j<rnz; j++) {
377: row[nz] = i+shift; col[nz] = ajj[j] + shift; val[nz++] = v1[j];
378: }
379: }
380: }
381: *r = row; *c = col; *v = val;
382: } else {
383: nz = 0; val = *v;
384: if (missing) {
385: for (i=0; i <M; i++) {
386: if (PetscUnlikely(adiag[i] >= ai[i+1])) {
387: for (j=ai[i];j<ai[i+1];j++) {
388: if (aj[j] < i) continue;
389: val[nz++] = av[j];
390: }
391: } else {
392: rnz = ai[i+1] - adiag[i];
393: v1 = av + adiag[i];
394: for (j=0; j<rnz; j++) {
395: val[nz++] = v1[j];
396: }
397: }
398: }
399: } else {
400: for (i=0; i <M; i++) {
401: rnz = ai[i+1] - adiag[i];
402: v1 = av + adiag[i];
403: for (j=0; j<rnz; j++) {
404: val[nz++] = v1[j];
405: }
406: }
407: }
408: }
409: return(0);
410: }
412: PetscErrorCode MatConvertToTriples_mpisbaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
413: {
414: const PetscInt *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
415: PetscErrorCode ierr;
416: PetscInt rstart,nz,i,j,jj,irow,countA,countB;
417: PetscInt *row,*col;
418: const PetscScalar *av, *bv,*v1,*v2;
419: PetscScalar *val;
420: Mat_MPISBAIJ *mat = (Mat_MPISBAIJ*)A->data;
421: Mat_SeqSBAIJ *aa = (Mat_SeqSBAIJ*)(mat->A)->data;
422: Mat_SeqBAIJ *bb = (Mat_SeqBAIJ*)(mat->B)->data;
425: ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart;
426: av=aa->a; bv=bb->a;
428: garray = mat->garray;
430: if (reuse == MAT_INITIAL_MATRIX) {
431: nz = aa->nz + bb->nz;
432: *nnz = nz;
433: PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);
434: col = row + nz;
435: val = (PetscScalar*)(col + nz);
437: *r = row; *c = col; *v = val;
438: } else {
439: row = *r; col = *c; val = *v;
440: }
442: jj = 0; irow = rstart;
443: for (i=0; i<m; i++) {
444: ajj = aj + ai[i]; /* ptr to the beginning of this row */
445: countA = ai[i+1] - ai[i];
446: countB = bi[i+1] - bi[i];
447: bjj = bj + bi[i];
448: v1 = av + ai[i];
449: v2 = bv + bi[i];
451: /* A-part */
452: for (j=0; j<countA; j++) {
453: if (reuse == MAT_INITIAL_MATRIX) {
454: row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
455: }
456: val[jj++] = v1[j];
457: }
459: /* B-part */
460: for (j=0; j < countB; j++) {
461: if (reuse == MAT_INITIAL_MATRIX) {
462: row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
463: }
464: val[jj++] = v2[j];
465: }
466: irow++;
467: }
468: return(0);
469: }
471: PetscErrorCode MatConvertToTriples_mpiaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
472: {
473: const PetscInt *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
474: PetscErrorCode ierr;
475: PetscInt rstart,nz,i,j,jj,irow,countA,countB;
476: PetscInt *row,*col;
477: const PetscScalar *av, *bv,*v1,*v2;
478: PetscScalar *val;
479: Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data;
480: Mat_SeqAIJ *aa = (Mat_SeqAIJ*)(mat->A)->data;
481: Mat_SeqAIJ *bb = (Mat_SeqAIJ*)(mat->B)->data;
484: ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart;
485: av=aa->a; bv=bb->a;
487: garray = mat->garray;
489: if (reuse == MAT_INITIAL_MATRIX) {
490: nz = aa->nz + bb->nz;
491: *nnz = nz;
492: PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);
493: col = row + nz;
494: val = (PetscScalar*)(col + nz);
496: *r = row; *c = col; *v = val;
497: } else {
498: row = *r; col = *c; val = *v;
499: }
501: jj = 0; irow = rstart;
502: for (i=0; i<m; i++) {
503: ajj = aj + ai[i]; /* ptr to the beginning of this row */
504: countA = ai[i+1] - ai[i];
505: countB = bi[i+1] - bi[i];
506: bjj = bj + bi[i];
507: v1 = av + ai[i];
508: v2 = bv + bi[i];
510: /* A-part */
511: for (j=0; j<countA; j++) {
512: if (reuse == MAT_INITIAL_MATRIX) {
513: row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
514: }
515: val[jj++] = v1[j];
516: }
518: /* B-part */
519: for (j=0; j < countB; j++) {
520: if (reuse == MAT_INITIAL_MATRIX) {
521: row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
522: }
523: val[jj++] = v2[j];
524: }
525: irow++;
526: }
527: return(0);
528: }
530: PetscErrorCode MatConvertToTriples_mpibaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
531: {
532: Mat_MPIBAIJ *mat = (Mat_MPIBAIJ*)A->data;
533: Mat_SeqBAIJ *aa = (Mat_SeqBAIJ*)(mat->A)->data;
534: Mat_SeqBAIJ *bb = (Mat_SeqBAIJ*)(mat->B)->data;
535: const PetscInt *ai = aa->i, *bi = bb->i, *aj = aa->j, *bj = bb->j,*ajj, *bjj;
536: const PetscInt *garray = mat->garray,mbs=mat->mbs,rstart=A->rmap->rstart;
537: const PetscInt bs2=mat->bs2;
538: PetscErrorCode ierr;
539: PetscInt bs,nz,i,j,k,n,jj,irow,countA,countB,idx;
540: PetscInt *row,*col;
541: const PetscScalar *av=aa->a, *bv=bb->a,*v1,*v2;
542: PetscScalar *val;
545: MatGetBlockSize(A,&bs);
546: if (reuse == MAT_INITIAL_MATRIX) {
547: nz = bs2*(aa->nz + bb->nz);
548: *nnz = nz;
549: PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);
550: col = row + nz;
551: val = (PetscScalar*)(col + nz);
553: *r = row; *c = col; *v = val;
554: } else {
555: row = *r; col = *c; val = *v;
556: }
558: jj = 0; irow = rstart;
559: for (i=0; i<mbs; i++) {
560: countA = ai[i+1] - ai[i];
561: countB = bi[i+1] - bi[i];
562: ajj = aj + ai[i];
563: bjj = bj + bi[i];
564: v1 = av + bs2*ai[i];
565: v2 = bv + bs2*bi[i];
567: idx = 0;
568: /* A-part */
569: for (k=0; k<countA; k++) {
570: for (j=0; j<bs; j++) {
571: for (n=0; n<bs; n++) {
572: if (reuse == MAT_INITIAL_MATRIX) {
573: row[jj] = irow + n + shift;
574: col[jj] = rstart + bs*ajj[k] + j + shift;
575: }
576: val[jj++] = v1[idx++];
577: }
578: }
579: }
581: idx = 0;
582: /* B-part */
583: for (k=0; k<countB; k++) {
584: for (j=0; j<bs; j++) {
585: for (n=0; n<bs; n++) {
586: if (reuse == MAT_INITIAL_MATRIX) {
587: row[jj] = irow + n + shift;
588: col[jj] = bs*garray[bjj[k]] + j + shift;
589: }
590: val[jj++] = v2[idx++];
591: }
592: }
593: }
594: irow += bs;
595: }
596: return(0);
597: }
599: PetscErrorCode MatConvertToTriples_mpiaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
600: {
601: const PetscInt *ai, *aj,*adiag, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
602: PetscErrorCode ierr;
603: PetscInt rstart,nz,nza,nzb,i,j,jj,irow,countA,countB;
604: PetscInt *row,*col;
605: const PetscScalar *av, *bv,*v1,*v2;
606: PetscScalar *val;
607: Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data;
608: Mat_SeqAIJ *aa =(Mat_SeqAIJ*)(mat->A)->data;
609: Mat_SeqAIJ *bb =(Mat_SeqAIJ*)(mat->B)->data;
612: ai=aa->i; aj=aa->j; adiag=aa->diag;
613: bi=bb->i; bj=bb->j; garray = mat->garray;
614: av=aa->a; bv=bb->a;
616: rstart = A->rmap->rstart;
618: if (reuse == MAT_INITIAL_MATRIX) {
619: nza = 0; /* num of upper triangular entries in mat->A, including diagonals */
620: nzb = 0; /* num of upper triangular entries in mat->B */
621: for (i=0; i<m; i++) {
622: nza += (ai[i+1] - adiag[i]);
623: countB = bi[i+1] - bi[i];
624: bjj = bj + bi[i];
625: for (j=0; j<countB; j++) {
626: if (garray[bjj[j]] > rstart) nzb++;
627: }
628: }
630: nz = nza + nzb; /* total nz of upper triangular part of mat */
631: *nnz = nz;
632: PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);
633: col = row + nz;
634: val = (PetscScalar*)(col + nz);
636: *r = row; *c = col; *v = val;
637: } else {
638: row = *r; col = *c; val = *v;
639: }
641: jj = 0; irow = rstart;
642: for (i=0; i<m; i++) {
643: ajj = aj + adiag[i]; /* ptr to the beginning of the diagonal of this row */
644: v1 = av + adiag[i];
645: countA = ai[i+1] - adiag[i];
646: countB = bi[i+1] - bi[i];
647: bjj = bj + bi[i];
648: v2 = bv + bi[i];
650: /* A-part */
651: for (j=0; j<countA; j++) {
652: if (reuse == MAT_INITIAL_MATRIX) {
653: row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
654: }
655: val[jj++] = v1[j];
656: }
658: /* B-part */
659: for (j=0; j < countB; j++) {
660: if (garray[bjj[j]] > rstart) {
661: if (reuse == MAT_INITIAL_MATRIX) {
662: row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
663: }
664: val[jj++] = v2[j];
665: }
666: }
667: irow++;
668: }
669: return(0);
670: }
672: PetscErrorCode MatDestroy_MUMPS(Mat A)
673: {
674: Mat_MUMPS *mumps=(Mat_MUMPS*)A->data;
678: PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);
679: VecScatterDestroy(&mumps->scat_rhs);
680: VecScatterDestroy(&mumps->scat_sol);
681: VecDestroy(&mumps->b_seq);
682: VecDestroy(&mumps->x_seq);
683: PetscFree(mumps->id.perm_in);
684: PetscFree(mumps->irn);
685: PetscFree(mumps->info);
686: MatMumpsResetSchur_Private(mumps);
687: mumps->id.job = JOB_END;
688: PetscMUMPS_c(&mumps->id);
689: MPI_Comm_free(&mumps->comm_mumps);
690: PetscFree(A->data);
692: /* clear composed functions */
693: PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverPackage_C",NULL);
694: PetscObjectComposeFunction((PetscObject)A,"MatFactorSetSchurIS_C",NULL);
695: PetscObjectComposeFunction((PetscObject)A,"MatFactorCreateSchurComplement_C",NULL);
696: PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetIcntl_C",NULL);
697: PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetIcntl_C",NULL);
698: PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetCntl_C",NULL);
699: PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetCntl_C",NULL);
700: PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfo_C",NULL);
701: PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfog_C",NULL);
702: PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfo_C",NULL);
703: PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfog_C",NULL);
704: return(0);
705: }
707: PetscErrorCode MatSolve_MUMPS(Mat A,Vec b,Vec x)
708: {
709: Mat_MUMPS *mumps=(Mat_MUMPS*)A->data;
710: PetscScalar *array;
711: Vec b_seq;
712: IS is_iden,is_petsc;
713: PetscErrorCode ierr;
714: PetscInt i;
715: PetscBool second_solve = PETSC_FALSE;
716: static PetscBool cite1 = PETSC_FALSE,cite2 = PETSC_FALSE;
719: PetscCitationsRegister("@article{MUMPS01,\n author = {P.~R. Amestoy and I.~S. Duff and J.-Y. L'Excellent and J. Koster},\n title = {A fully asynchronous multifrontal solver using distributed dynamic scheduling},\n journal = {SIAM Journal on Matrix Analysis and Applications},\n volume = {23},\n number = {1},\n pages = {15--41},\n year = {2001}\n}\n",&cite1);
720: PetscCitationsRegister("@article{MUMPS02,\n author = {P.~R. Amestoy and A. Guermouche and J.-Y. L'Excellent and S. Pralet},\n title = {Hybrid scheduling for the parallel solution of linear systems},\n journal = {Parallel Computing},\n volume = {32},\n number = {2},\n pages = {136--156},\n year = {2006}\n}\n",&cite2);
722: if (A->factorerrortype) {
723: PetscInfo2(A,"MatSolve is called with singular matrix factor, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));
724: VecSetInf(x);
725: return(0);
726: }
728: mumps->id.nrhs = 1;
729: b_seq = mumps->b_seq;
730: if (mumps->size > 1) {
731: /* MUMPS only supports centralized rhs. Scatter b into a seqential rhs vector */
732: VecScatterBegin(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);
733: VecScatterEnd(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);
734: if (!mumps->myid) {VecGetArray(b_seq,&array);}
735: } else { /* size == 1 */
736: VecCopy(b,x);
737: VecGetArray(x,&array);
738: }
739: if (!mumps->myid) { /* define rhs on the host */
740: mumps->id.nrhs = 1;
741: mumps->id.rhs = (MumpsScalar*)array;
742: }
744: /*
745: handle condensation step of Schur complement (if any)
746: We set by default ICNTL(26) == -1 when Schur indices have been provided by the user.
747: According to MUMPS (5.0.0) manual, any value should be harmful during the factorization phase
748: Unless the user provides a valid value for ICNTL(26), MatSolve and MatMatSolve routines solve the full system.
749: This requires an extra call to PetscMUMPS_c and the computation of the factors for S
750: */
751: if (mumps->id.ICNTL(26) < 0 || mumps->id.ICNTL(26) > 2) {
752: if (mumps->size > 1) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Parallel Schur complements not yet supported from PETSc\n");
753: second_solve = PETSC_TRUE;
754: MatMumpsHandleSchur_Private(A,PETSC_FALSE);
755: }
756: /* solve phase */
757: /*-------------*/
758: mumps->id.job = JOB_SOLVE;
759: PetscMUMPS_c(&mumps->id);
760: if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
762: /* handle expansion step of Schur complement (if any) */
763: if (second_solve) {
764: MatMumpsHandleSchur_Private(A,PETSC_TRUE);
765: }
767: if (mumps->size > 1) { /* convert mumps distributed solution to petsc mpi x */
768: if (mumps->scat_sol && mumps->ICNTL9_pre != mumps->id.ICNTL(9)) {
769: /* when id.ICNTL(9) changes, the contents of lsol_loc may change (not its size, lsol_loc), recreates scat_sol */
770: VecScatterDestroy(&mumps->scat_sol);
771: }
772: if (!mumps->scat_sol) { /* create scatter scat_sol */
773: ISCreateStride(PETSC_COMM_SELF,mumps->id.lsol_loc,0,1,&is_iden); /* from */
774: for (i=0; i<mumps->id.lsol_loc; i++) {
775: mumps->id.isol_loc[i] -= 1; /* change Fortran style to C style */
776: }
777: ISCreateGeneral(PETSC_COMM_SELF,mumps->id.lsol_loc,mumps->id.isol_loc,PETSC_COPY_VALUES,&is_petsc); /* to */
778: VecScatterCreate(mumps->x_seq,is_iden,x,is_petsc,&mumps->scat_sol);
779: ISDestroy(&is_iden);
780: ISDestroy(&is_petsc);
782: mumps->ICNTL9_pre = mumps->id.ICNTL(9); /* save current value of id.ICNTL(9) */
783: }
785: VecScatterBegin(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);
786: VecScatterEnd(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);
787: }
788: return(0);
789: }
791: PetscErrorCode MatSolveTranspose_MUMPS(Mat A,Vec b,Vec x)
792: {
793: Mat_MUMPS *mumps=(Mat_MUMPS*)A->data;
797: mumps->id.ICNTL(9) = 0;
798: MatSolve_MUMPS(A,b,x);
799: mumps->id.ICNTL(9) = 1;
800: return(0);
801: }
803: PetscErrorCode MatMatSolve_MUMPS(Mat A,Mat B,Mat X)
804: {
806: Mat Bt = NULL;
807: PetscBool flg, flgT;
808: Mat_MUMPS *mumps=(Mat_MUMPS*)A->data;
809: PetscInt i,nrhs,M;
810: PetscScalar *array,*bray;
813: PetscObjectTypeCompareAny((PetscObject)B,&flg,MATSEQDENSE,MATMPIDENSE,NULL);
814: PetscObjectTypeCompare((PetscObject)B,MATTRANSPOSEMAT,&flgT);
815: if (flgT) {
816: if (mumps->size > 1) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix");
817: MatTransposeGetMat(B,&Bt);
818: } else {
819: if (!flg) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix");
820: }
821: PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);
822: if (!flg) SETERRQ(PetscObjectComm((PetscObject)X),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix");
823: if (B->rmap->n != X->rmap->n) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONG,"Matrix B and X must have same row distribution");
825: MatGetSize(B,&M,&nrhs);
826: mumps->id.nrhs = nrhs;
827: mumps->id.lrhs = M;
829: if (mumps->size == 1) {
830: PetscScalar *aa;
831: PetscInt spnr,*ia,*ja;
832: PetscBool second_solve = PETSC_FALSE;
834: /* copy B to X */
835: MatDenseGetArray(X,&array);
836: mumps->id.rhs = (MumpsScalar*)array;
837: if (!Bt) {
838: MatDenseGetArray(B,&bray);
839: PetscMemcpy(array,bray,M*nrhs*sizeof(PetscScalar));
840: MatDenseRestoreArray(B,&bray);
841: } else {
842: PetscBool done;
844: MatSeqAIJGetArray(Bt,&aa);
845: MatGetRowIJ(Bt,1,PETSC_FALSE,PETSC_FALSE,&spnr,(const PetscInt**)&ia,(const PetscInt**)&ja,&done);
846: if (!done) SETERRQ(PetscObjectComm((PetscObject)Bt),PETSC_ERR_ARG_WRONG,"Cannot get IJ structure");
847: mumps->id.irhs_ptr = ia;
848: mumps->id.irhs_sparse = ja;
849: mumps->id.nz_rhs = ia[spnr] - 1;
850: mumps->id.rhs_sparse = (MumpsScalar*)aa;
851: mumps->id.ICNTL(20) = 1;
852: }
853: /* handle condensation step of Schur complement (if any) */
854: if (mumps->id.ICNTL(26) < 0 || mumps->id.ICNTL(26) > 2) {
855: second_solve = PETSC_TRUE;
856: MatMumpsHandleSchur_Private(A,PETSC_FALSE);
857: }
858: /* solve phase */
859: /*-------------*/
860: mumps->id.job = JOB_SOLVE;
861: PetscMUMPS_c(&mumps->id);
862: if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
864: /* handle expansion step of Schur complement (if any) */
865: if (second_solve) {
866: MatMumpsHandleSchur_Private(A,PETSC_TRUE);
867: }
868: if (Bt) {
869: PetscBool done;
871: MatSeqAIJRestoreArray(Bt,&aa);
872: MatRestoreRowIJ(Bt,1,PETSC_FALSE,PETSC_FALSE,&spnr,(const PetscInt**)&ia,(const PetscInt**)&ja,&done);
873: if (!done) SETERRQ(PetscObjectComm((PetscObject)Bt),PETSC_ERR_ARG_WRONG,"Cannot restore IJ structure");
874: mumps->id.ICNTL(20) = 0;
875: }
876: MatDenseRestoreArray(X,&array);
877: } else { /*--------- parallel case --------*/
878: PetscInt lsol_loc,nlsol_loc,*isol_loc,*idx,*iidx,*idxx,*isol_loc_save;
879: MumpsScalar *sol_loc,*sol_loc_save;
880: IS is_to,is_from;
881: PetscInt k,proc,j,m;
882: const PetscInt *rstart;
883: Vec v_mpi,b_seq,x_seq;
884: VecScatter scat_rhs,scat_sol;
886: if (mumps->size > 1 && mumps->id.ICNTL(19)) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Parallel Schur complements not yet supported from PETSc\n");
888: /* create x_seq to hold local solution */
889: isol_loc_save = mumps->id.isol_loc; /* save it for MatSovle() */
890: sol_loc_save = mumps->id.sol_loc;
892: lsol_loc = mumps->id.INFO(23);
893: nlsol_loc = nrhs*lsol_loc; /* length of sol_loc */
894: PetscMalloc2(nlsol_loc,&sol_loc,nlsol_loc,&isol_loc);
895: mumps->id.sol_loc = (MumpsScalar*)sol_loc;
896: mumps->id.isol_loc = isol_loc;
898: VecCreateSeqWithArray(PETSC_COMM_SELF,1,nlsol_loc,(PetscScalar*)sol_loc,&x_seq);
900: /* copy rhs matrix B into vector v_mpi */
901: MatGetLocalSize(B,&m,NULL);
902: MatDenseGetArray(B,&bray);
903: VecCreateMPIWithArray(PetscObjectComm((PetscObject)B),1,nrhs*m,nrhs*M,(const PetscScalar*)bray,&v_mpi);
904: MatDenseRestoreArray(B,&bray);
906: /* scatter v_mpi to b_seq because MUMPS only supports centralized rhs */
907: /* idx: maps from k-th index of v_mpi to (i,j)-th global entry of B;
908: iidx: inverse of idx, will be used by scattering xx_seq -> X */
909: PetscMalloc2(nrhs*M,&idx,nrhs*M,&iidx);
910: MatGetOwnershipRanges(B,&rstart);
911: k = 0;
912: for (proc=0; proc<mumps->size; proc++){
913: for (j=0; j<nrhs; j++){
914: for (i=rstart[proc]; i<rstart[proc+1]; i++){
915: iidx[j*M + i] = k;
916: idx[k++] = j*M + i;
917: }
918: }
919: }
921: if (!mumps->myid) {
922: VecCreateSeq(PETSC_COMM_SELF,nrhs*M,&b_seq);
923: ISCreateGeneral(PETSC_COMM_SELF,nrhs*M,idx,PETSC_COPY_VALUES,&is_to);
924: ISCreateStride(PETSC_COMM_SELF,nrhs*M,0,1,&is_from);
925: } else {
926: VecCreateSeq(PETSC_COMM_SELF,0,&b_seq);
927: ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_to);
928: ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_from);
929: }
930: VecScatterCreate(v_mpi,is_from,b_seq,is_to,&scat_rhs);
931: VecScatterBegin(scat_rhs,v_mpi,b_seq,INSERT_VALUES,SCATTER_FORWARD);
932: ISDestroy(&is_to);
933: ISDestroy(&is_from);
934: VecScatterEnd(scat_rhs,v_mpi,b_seq,INSERT_VALUES,SCATTER_FORWARD);
936: if (!mumps->myid) { /* define rhs on the host */
937: VecGetArray(b_seq,&bray);
938: mumps->id.rhs = (MumpsScalar*)bray;
939: VecRestoreArray(b_seq,&bray);
940: }
942: /* solve phase */
943: /*-------------*/
944: mumps->id.job = JOB_SOLVE;
945: PetscMUMPS_c(&mumps->id);
946: if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
948: /* scatter mumps distributed solution to petsc vector v_mpi, which shares local arrays with solution matrix X */
949: MatDenseGetArray(X,&array);
950: VecPlaceArray(v_mpi,array);
951:
952: /* create scatter scat_sol */
953: PetscMalloc1(nlsol_loc,&idxx);
954: ISCreateStride(PETSC_COMM_SELF,nlsol_loc,0,1,&is_from);
955: for (i=0; i<lsol_loc; i++) {
956: isol_loc[i] -= 1; /* change Fortran style to C style */
957: idxx[i] = iidx[isol_loc[i]];
958: for (j=1; j<nrhs; j++){
959: idxx[j*lsol_loc+i] = iidx[isol_loc[i]+j*M];
960: }
961: }
962: ISCreateGeneral(PETSC_COMM_SELF,nlsol_loc,idxx,PETSC_COPY_VALUES,&is_to);
963: VecScatterCreate(x_seq,is_from,v_mpi,is_to,&scat_sol);
964: VecScatterBegin(scat_sol,x_seq,v_mpi,INSERT_VALUES,SCATTER_FORWARD);
965: ISDestroy(&is_from);
966: ISDestroy(&is_to);
967: VecScatterEnd(scat_sol,x_seq,v_mpi,INSERT_VALUES,SCATTER_FORWARD);
968: MatDenseRestoreArray(X,&array);
970: /* free spaces */
971: mumps->id.sol_loc = sol_loc_save;
972: mumps->id.isol_loc = isol_loc_save;
974: PetscFree2(sol_loc,isol_loc);
975: PetscFree2(idx,iidx);
976: PetscFree(idxx);
977: VecDestroy(&x_seq);
978: VecDestroy(&v_mpi);
979: VecDestroy(&b_seq);
980: VecScatterDestroy(&scat_rhs);
981: VecScatterDestroy(&scat_sol);
982: }
983: return(0);
984: }
986: #if !defined(PETSC_USE_COMPLEX)
987: /*
988: input:
989: F: numeric factor
990: output:
991: nneg: total number of negative pivots
992: nzero: total number of zero pivots
993: npos: (global dimension of F) - nneg - nzero
994: */
995: PetscErrorCode MatGetInertia_SBAIJMUMPS(Mat F,int *nneg,int *nzero,int *npos)
996: {
997: Mat_MUMPS *mumps =(Mat_MUMPS*)F->data;
999: PetscMPIInt size;
1002: MPI_Comm_size(PetscObjectComm((PetscObject)F),&size);
1003: /* MUMPS 4.3.1 calls ScaLAPACK when ICNTL(13)=0 (default), which does not offer the possibility to compute the inertia of a dense matrix. Set ICNTL(13)=1 to skip ScaLAPACK */
1004: if (size > 1 && mumps->id.ICNTL(13) != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"ICNTL(13)=%d. -mat_mumps_icntl_13 must be set as 1 for correct global matrix inertia\n",mumps->id.INFOG(13));
1006: if (nneg) *nneg = mumps->id.INFOG(12);
1007: if (nzero || npos) {
1008: if (mumps->id.ICNTL(24) != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"-mat_mumps_icntl_24 must be set as 1 for null pivot row detection");
1009: if (nzero) *nzero = mumps->id.INFOG(28);
1010: if (npos) *npos = F->rmap->N - (mumps->id.INFOG(12) + mumps->id.INFOG(28));
1011: }
1012: return(0);
1013: }
1014: #endif
1016: PetscErrorCode MatFactorNumeric_MUMPS(Mat F,Mat A,const MatFactorInfo *info)
1017: {
1018: Mat_MUMPS *mumps =(Mat_MUMPS*)(F)->data;
1020: PetscBool isMPIAIJ;
1023: if (mumps->id.INFOG(1) < 0) {
1024: if (mumps->id.INFOG(1) == -6) {
1025: PetscInfo2(A,"MatFactorNumeric is called with singular matrix structure, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));
1026: }
1027: PetscInfo2(A,"MatFactorNumeric is called after analysis phase fails, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));
1028: return(0);
1029: }
1031: (*mumps->ConvertToTriples)(A, 1, MAT_REUSE_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);
1033: /* numerical factorization phase */
1034: /*-------------------------------*/
1035: mumps->id.job = JOB_FACTNUMERIC;
1036: if (!mumps->id.ICNTL(18)) { /* A is centralized */
1037: if (!mumps->myid) {
1038: mumps->id.a = (MumpsScalar*)mumps->val;
1039: }
1040: } else {
1041: mumps->id.a_loc = (MumpsScalar*)mumps->val;
1042: }
1043: PetscMUMPS_c(&mumps->id);
1044: if (mumps->id.INFOG(1) < 0) {
1045: if (A->erroriffailure) {
1046: SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));
1047: } else {
1048: if (mumps->id.INFOG(1) == -10) { /* numerically singular matrix */
1049: PetscInfo2(F,"matrix is numerically singular, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));
1050: F->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT;
1051: } else if (mumps->id.INFOG(1) == -13) {
1052: PetscInfo2(F,"MUMPS in numerical factorization phase: INFOG(1)=%d, cannot allocate required memory %d megabytes\n",mumps->id.INFOG(1),mumps->id.INFO(2));
1053: F->factorerrortype = MAT_FACTOR_OUTMEMORY;
1054: } else if (mumps->id.INFOG(1) == -8 || mumps->id.INFOG(1) == -9 || (-16 < mumps->id.INFOG(1) && mumps->id.INFOG(1) < -10) ) {
1055: PetscInfo2(F,"MUMPS in numerical factorization phase: INFOG(1)=%d, INFO(2)=%d, problem with workarray \n",mumps->id.INFOG(1),mumps->id.INFO(2));
1056: F->factorerrortype = MAT_FACTOR_OUTMEMORY;
1057: } else {
1058: PetscInfo2(F,"MUMPS in numerical factorization phase: INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));
1059: F->factorerrortype = MAT_FACTOR_OTHER;
1060: }
1061: }
1062: }
1063: if (!mumps->myid && mumps->id.ICNTL(16) > 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB," mumps->id.ICNTL(16):=%d\n",mumps->id.INFOG(16));
1065: F->assembled = PETSC_TRUE;
1066: mumps->matstruc = SAME_NONZERO_PATTERN;
1067: if (F->schur) { /* reset Schur status to unfactored */
1068: if (mumps->id.ICNTL(19) == 1) { /* stored by rows */
1069: mumps->id.ICNTL(19) = 2;
1070: MatTranspose(F->schur,MAT_INPLACE_MATRIX,&F->schur);
1071: }
1072: MatFactorRestoreSchurComplement(F,NULL,MAT_FACTOR_SCHUR_UNFACTORED);
1073: }
1075: /* just to be sure that ICNTL(19) value returned by a call from MatMumpsGetIcntl is always consistent */
1076: if (!mumps->sym && mumps->id.ICNTL(19) && mumps->id.ICNTL(19) != 1) mumps->id.ICNTL(19) = 3;
1078: if (mumps->size > 1) {
1079: PetscInt lsol_loc;
1080: PetscScalar *sol_loc;
1082: PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);
1084: /* distributed solution; Create x_seq=sol_loc for repeated use */
1085: if (mumps->x_seq) {
1086: VecScatterDestroy(&mumps->scat_sol);
1087: PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);
1088: VecDestroy(&mumps->x_seq);
1089: }
1090: lsol_loc = mumps->id.INFO(23); /* length of sol_loc */
1091: PetscMalloc2(lsol_loc,&sol_loc,lsol_loc,&mumps->id.isol_loc);
1092: mumps->id.lsol_loc = lsol_loc;
1093: mumps->id.sol_loc = (MumpsScalar*)sol_loc;
1094: VecCreateSeqWithArray(PETSC_COMM_SELF,1,lsol_loc,sol_loc,&mumps->x_seq);
1095: }
1096: return(0);
1097: }
1099: /* Sets MUMPS options from the options database */
1100: PetscErrorCode PetscSetMUMPSFromOptions(Mat F, Mat A)
1101: {
1102: Mat_MUMPS *mumps = (Mat_MUMPS*)F->data;
1104: PetscInt icntl,info[40],i,ninfo=40;
1105: PetscBool flg;
1108: PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MUMPS Options","Mat");
1109: PetscOptionsInt("-mat_mumps_icntl_1","ICNTL(1): output stream for error messages","None",mumps->id.ICNTL(1),&icntl,&flg);
1110: if (flg) mumps->id.ICNTL(1) = icntl;
1111: PetscOptionsInt("-mat_mumps_icntl_2","ICNTL(2): output stream for diagnostic printing, statistics, and warning","None",mumps->id.ICNTL(2),&icntl,&flg);
1112: if (flg) mumps->id.ICNTL(2) = icntl;
1113: PetscOptionsInt("-mat_mumps_icntl_3","ICNTL(3): output stream for global information, collected on the host","None",mumps->id.ICNTL(3),&icntl,&flg);
1114: if (flg) mumps->id.ICNTL(3) = icntl;
1116: PetscOptionsInt("-mat_mumps_icntl_4","ICNTL(4): level of printing (0 to 4)","None",mumps->id.ICNTL(4),&icntl,&flg);
1117: if (flg) mumps->id.ICNTL(4) = icntl;
1118: if (mumps->id.ICNTL(4) || PetscLogPrintInfo) mumps->id.ICNTL(3) = 6; /* resume MUMPS default id.ICNTL(3) = 6 */
1120: PetscOptionsInt("-mat_mumps_icntl_6","ICNTL(6): permutes to a zero-free diagonal and/or scale the matrix (0 to 7)","None",mumps->id.ICNTL(6),&icntl,&flg);
1121: if (flg) mumps->id.ICNTL(6) = icntl;
1123: PetscOptionsInt("-mat_mumps_icntl_7","ICNTL(7): computes a symmetric permutation in sequential analysis (0 to 7). 3=Scotch, 4=PORD, 5=Metis","None",mumps->id.ICNTL(7),&icntl,&flg);
1124: if (flg) {
1125: if (icntl== 1 && mumps->size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"pivot order be set by the user in PERM_IN -- not supported by the PETSc/MUMPS interface\n");
1126: else mumps->id.ICNTL(7) = icntl;
1127: }
1129: PetscOptionsInt("-mat_mumps_icntl_8","ICNTL(8): scaling strategy (-2 to 8 or 77)","None",mumps->id.ICNTL(8),&mumps->id.ICNTL(8),NULL);
1130: /* PetscOptionsInt("-mat_mumps_icntl_9","ICNTL(9): computes the solution using A or A^T","None",mumps->id.ICNTL(9),&mumps->id.ICNTL(9),NULL); handled by MatSolveTranspose_MUMPS() */
1131: PetscOptionsInt("-mat_mumps_icntl_10","ICNTL(10): max num of refinements","None",mumps->id.ICNTL(10),&mumps->id.ICNTL(10),NULL);
1132: PetscOptionsInt("-mat_mumps_icntl_11","ICNTL(11): statistics related to an error analysis (via -ksp_view)","None",mumps->id.ICNTL(11),&mumps->id.ICNTL(11),NULL);
1133: PetscOptionsInt("-mat_mumps_icntl_12","ICNTL(12): an ordering strategy for symmetric matrices (0 to 3)","None",mumps->id.ICNTL(12),&mumps->id.ICNTL(12),NULL);
1134: PetscOptionsInt("-mat_mumps_icntl_13","ICNTL(13): parallelism of the root node (enable ScaLAPACK) and its splitting","None",mumps->id.ICNTL(13),&mumps->id.ICNTL(13),NULL);
1135: PetscOptionsInt("-mat_mumps_icntl_14","ICNTL(14): percentage increase in the estimated working space","None",mumps->id.ICNTL(14),&mumps->id.ICNTL(14),NULL);
1136: PetscOptionsInt("-mat_mumps_icntl_19","ICNTL(19): computes the Schur complement","None",mumps->id.ICNTL(19),&mumps->id.ICNTL(19),NULL);
1137: if (mumps->id.ICNTL(19) <= 0 || mumps->id.ICNTL(19) > 3) { /* reset any schur data (if any) */
1138: MatDestroy(&F->schur);
1139: MatMumpsResetSchur_Private(mumps);
1140: }
1141: /* PetscOptionsInt("-mat_mumps_icntl_20","ICNTL(20): the format (dense or sparse) of the right-hand sides","None",mumps->id.ICNTL(20),&mumps->id.ICNTL(20),NULL); -- sparse rhs is not supported in PETSc API */
1142: /* PetscOptionsInt("-mat_mumps_icntl_21","ICNTL(21): the distribution (centralized or distributed) of the solution vectors","None",mumps->id.ICNTL(21),&mumps->id.ICNTL(21),NULL); we only use distributed solution vector */
1144: PetscOptionsInt("-mat_mumps_icntl_22","ICNTL(22): in-core/out-of-core factorization and solve (0 or 1)","None",mumps->id.ICNTL(22),&mumps->id.ICNTL(22),NULL);
1145: PetscOptionsInt("-mat_mumps_icntl_23","ICNTL(23): max size of the working memory (MB) that can allocate per processor","None",mumps->id.ICNTL(23),&mumps->id.ICNTL(23),NULL);
1146: PetscOptionsInt("-mat_mumps_icntl_24","ICNTL(24): detection of null pivot rows (0 or 1)","None",mumps->id.ICNTL(24),&mumps->id.ICNTL(24),NULL);
1147: if (mumps->id.ICNTL(24)) {
1148: mumps->id.ICNTL(13) = 1; /* turn-off ScaLAPACK to help with the correct detection of null pivots */
1149: }
1151: PetscOptionsInt("-mat_mumps_icntl_25","ICNTL(25): compute a solution of a deficient matrix and a null space basis","None",mumps->id.ICNTL(25),&mumps->id.ICNTL(25),NULL);
1152: PetscOptionsInt("-mat_mumps_icntl_26","ICNTL(26): drives the solution phase if a Schur complement matrix","None",mumps->id.ICNTL(26),&mumps->id.ICNTL(26),NULL);
1153: PetscOptionsInt("-mat_mumps_icntl_27","ICNTL(27): the blocking size for multiple right-hand sides","None",mumps->id.ICNTL(27),&mumps->id.ICNTL(27),NULL);
1154: PetscOptionsInt("-mat_mumps_icntl_28","ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering","None",mumps->id.ICNTL(28),&mumps->id.ICNTL(28),NULL);
1155: PetscOptionsInt("-mat_mumps_icntl_29","ICNTL(29): parallel ordering 1 = ptscotch, 2 = parmetis","None",mumps->id.ICNTL(29),&mumps->id.ICNTL(29),NULL);
1156: PetscOptionsInt("-mat_mumps_icntl_30","ICNTL(30): compute user-specified set of entries in inv(A)","None",mumps->id.ICNTL(30),&mumps->id.ICNTL(30),NULL);
1157: PetscOptionsInt("-mat_mumps_icntl_31","ICNTL(31): indicates which factors may be discarded during factorization","None",mumps->id.ICNTL(31),&mumps->id.ICNTL(31),NULL);
1158: /* PetscOptionsInt("-mat_mumps_icntl_32","ICNTL(32): performs the forward elemination of the right-hand sides during factorization","None",mumps->id.ICNTL(32),&mumps->id.ICNTL(32),NULL); -- not supported by PETSc API */
1159: PetscOptionsInt("-mat_mumps_icntl_33","ICNTL(33): compute determinant","None",mumps->id.ICNTL(33),&mumps->id.ICNTL(33),NULL);
1161: PetscOptionsReal("-mat_mumps_cntl_1","CNTL(1): relative pivoting threshold","None",mumps->id.CNTL(1),&mumps->id.CNTL(1),NULL);
1162: PetscOptionsReal("-mat_mumps_cntl_2","CNTL(2): stopping criterion of refinement","None",mumps->id.CNTL(2),&mumps->id.CNTL(2),NULL);
1163: PetscOptionsReal("-mat_mumps_cntl_3","CNTL(3): absolute pivoting threshold","None",mumps->id.CNTL(3),&mumps->id.CNTL(3),NULL);
1164: PetscOptionsReal("-mat_mumps_cntl_4","CNTL(4): value for static pivoting","None",mumps->id.CNTL(4),&mumps->id.CNTL(4),NULL);
1165: PetscOptionsReal("-mat_mumps_cntl_5","CNTL(5): fixation for null pivots","None",mumps->id.CNTL(5),&mumps->id.CNTL(5),NULL);
1167: PetscOptionsString("-mat_mumps_ooc_tmpdir", "out of core directory", "None", mumps->id.ooc_tmpdir, mumps->id.ooc_tmpdir, 256, NULL);
1169: PetscOptionsIntArray("-mat_mumps_view_info","request INFO local to each processor","",info,&ninfo,NULL);
1170: if (ninfo) {
1171: if (ninfo > 40) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"number of INFO %d must <= 40\n",ninfo);
1172: PetscMalloc1(ninfo,&mumps->info);
1173: mumps->ninfo = ninfo;
1174: for (i=0; i<ninfo; i++) {
1175: if (info[i] < 0 || info[i]>40) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"index of INFO %d must between 1 and 40\n",ninfo);
1176: else mumps->info[i] = info[i];
1177: }
1178: }
1180: PetscOptionsEnd();
1181: return(0);
1182: }
1184: PetscErrorCode PetscInitializeMUMPS(Mat A,Mat_MUMPS *mumps)
1185: {
1189: MPI_Comm_rank(PetscObjectComm((PetscObject)A), &mumps->myid);
1190: MPI_Comm_size(PetscObjectComm((PetscObject)A),&mumps->size);
1191: MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(mumps->comm_mumps));
1193: mumps->id.comm_fortran = MPI_Comm_c2f(mumps->comm_mumps);
1195: mumps->id.job = JOB_INIT;
1196: mumps->id.par = 1; /* host participates factorizaton and solve */
1197: mumps->id.sym = mumps->sym;
1198: PetscMUMPS_c(&mumps->id);
1200: mumps->scat_rhs = NULL;
1201: mumps->scat_sol = NULL;
1203: /* set PETSc-MUMPS default options - override MUMPS default */
1204: mumps->id.ICNTL(3) = 0;
1205: mumps->id.ICNTL(4) = 0;
1206: if (mumps->size == 1) {
1207: mumps->id.ICNTL(18) = 0; /* centralized assembled matrix input */
1208: } else {
1209: mumps->id.ICNTL(18) = 3; /* distributed assembled matrix input */
1210: mumps->id.ICNTL(20) = 0; /* rhs is in dense format */
1211: mumps->id.ICNTL(21) = 1; /* distributed solution */
1212: }
1214: /* schur */
1215: mumps->id.size_schur = 0;
1216: mumps->id.listvar_schur = NULL;
1217: mumps->id.schur = NULL;
1218: mumps->sizeredrhs = 0;
1219: mumps->schur_sol = NULL;
1220: mumps->schur_sizesol = 0;
1221: return(0);
1222: }
1224: PetscErrorCode MatFactorSymbolic_MUMPS_ReportIfError(Mat F,Mat A,const MatFactorInfo *info,Mat_MUMPS *mumps)
1225: {
1229: if (mumps->id.INFOG(1) < 0) {
1230: if (A->erroriffailure) {
1231: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
1232: } else {
1233: if (mumps->id.INFOG(1) == -6) {
1234: PetscInfo2(F,"matrix is singular in structure, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));
1235: F->factorerrortype = MAT_FACTOR_STRUCT_ZEROPIVOT;
1236: } else if (mumps->id.INFOG(1) == -5 || mumps->id.INFOG(1) == -7) {
1237: PetscInfo2(F,"problem of workspace, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));
1238: F->factorerrortype = MAT_FACTOR_OUTMEMORY;
1239: } else {
1240: PetscInfo2(F,"Error reported by MUMPS in analysis phase: INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));
1241: F->factorerrortype = MAT_FACTOR_OTHER;
1242: }
1243: }
1244: }
1245: return(0);
1246: }
1248: /* Note Petsc r(=c) permutation is used when mumps->id.ICNTL(7)==1 with centralized assembled matrix input; otherwise r and c are ignored */
1249: PetscErrorCode MatLUFactorSymbolic_AIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
1250: {
1251: Mat_MUMPS *mumps = (Mat_MUMPS*)F->data;
1253: Vec b;
1254: IS is_iden;
1255: const PetscInt M = A->rmap->N;
1258: mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
1260: /* Set MUMPS options from the options database */
1261: PetscSetMUMPSFromOptions(F,A);
1263: (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);
1265: /* analysis phase */
1266: /*----------------*/
1267: mumps->id.job = JOB_FACTSYMBOLIC;
1268: mumps->id.n = M;
1269: switch (mumps->id.ICNTL(18)) {
1270: case 0: /* centralized assembled matrix input */
1271: if (!mumps->myid) {
1272: mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1273: if (mumps->id.ICNTL(6)>1) {
1274: mumps->id.a = (MumpsScalar*)mumps->val;
1275: }
1276: if (mumps->id.ICNTL(7) == 1) { /* use user-provide matrix ordering - assuming r = c ordering */
1277: /*
1278: PetscBool flag;
1279: ISEqual(r,c,&flag);
1280: if (!flag) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"row_perm != col_perm");
1281: ISView(r,PETSC_VIEWER_STDOUT_SELF);
1282: */
1283: if (!mumps->myid) {
1284: const PetscInt *idx;
1285: PetscInt i,*perm_in;
1287: PetscMalloc1(M,&perm_in);
1288: ISGetIndices(r,&idx);
1290: mumps->id.perm_in = perm_in;
1291: for (i=0; i<M; i++) perm_in[i] = idx[i]+1; /* perm_in[]: start from 1, not 0! */
1292: ISRestoreIndices(r,&idx);
1293: }
1294: }
1295: }
1296: break;
1297: case 3: /* distributed assembled matrix input (size>1) */
1298: mumps->id.nz_loc = mumps->nz;
1299: mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1300: if (mumps->id.ICNTL(6)>1) {
1301: mumps->id.a_loc = (MumpsScalar*)mumps->val;
1302: }
1303: /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1304: if (!mumps->myid) {
1305: VecCreateSeq(PETSC_COMM_SELF,A->rmap->N,&mumps->b_seq);
1306: ISCreateStride(PETSC_COMM_SELF,A->rmap->N,0,1,&is_iden);
1307: } else {
1308: VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);
1309: ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);
1310: }
1311: MatCreateVecs(A,NULL,&b);
1312: VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);
1313: ISDestroy(&is_iden);
1314: VecDestroy(&b);
1315: break;
1316: }
1317: PetscMUMPS_c(&mumps->id);
1318: MatFactorSymbolic_MUMPS_ReportIfError(F,A,info,mumps);
1320: F->ops->lufactornumeric = MatFactorNumeric_MUMPS;
1321: F->ops->solve = MatSolve_MUMPS;
1322: F->ops->solvetranspose = MatSolveTranspose_MUMPS;
1323: F->ops->matsolve = MatMatSolve_MUMPS;
1324: return(0);
1325: }
1327: /* Note the Petsc r and c permutations are ignored */
1328: PetscErrorCode MatLUFactorSymbolic_BAIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
1329: {
1330: Mat_MUMPS *mumps = (Mat_MUMPS*)F->data;
1332: Vec b;
1333: IS is_iden;
1334: const PetscInt M = A->rmap->N;
1337: mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
1339: /* Set MUMPS options from the options database */
1340: PetscSetMUMPSFromOptions(F,A);
1342: (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);
1344: /* analysis phase */
1345: /*----------------*/
1346: mumps->id.job = JOB_FACTSYMBOLIC;
1347: mumps->id.n = M;
1348: switch (mumps->id.ICNTL(18)) {
1349: case 0: /* centralized assembled matrix input */
1350: if (!mumps->myid) {
1351: mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1352: if (mumps->id.ICNTL(6)>1) {
1353: mumps->id.a = (MumpsScalar*)mumps->val;
1354: }
1355: }
1356: break;
1357: case 3: /* distributed assembled matrix input (size>1) */
1358: mumps->id.nz_loc = mumps->nz;
1359: mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1360: if (mumps->id.ICNTL(6)>1) {
1361: mumps->id.a_loc = (MumpsScalar*)mumps->val;
1362: }
1363: /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1364: if (!mumps->myid) {
1365: VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);
1366: ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);
1367: } else {
1368: VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);
1369: ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);
1370: }
1371: MatCreateVecs(A,NULL,&b);
1372: VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);
1373: ISDestroy(&is_iden);
1374: VecDestroy(&b);
1375: break;
1376: }
1377: PetscMUMPS_c(&mumps->id);
1378: MatFactorSymbolic_MUMPS_ReportIfError(F,A,info,mumps);
1380: F->ops->lufactornumeric = MatFactorNumeric_MUMPS;
1381: F->ops->solve = MatSolve_MUMPS;
1382: F->ops->solvetranspose = MatSolveTranspose_MUMPS;
1383: return(0);
1384: }
1386: /* Note the Petsc r permutation and factor info are ignored */
1387: PetscErrorCode MatCholeskyFactorSymbolic_MUMPS(Mat F,Mat A,IS r,const MatFactorInfo *info)
1388: {
1389: Mat_MUMPS *mumps = (Mat_MUMPS*)F->data;
1391: Vec b;
1392: IS is_iden;
1393: const PetscInt M = A->rmap->N;
1396: mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
1398: /* Set MUMPS options from the options database */
1399: PetscSetMUMPSFromOptions(F,A);
1401: (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);
1403: /* analysis phase */
1404: /*----------------*/
1405: mumps->id.job = JOB_FACTSYMBOLIC;
1406: mumps->id.n = M;
1407: switch (mumps->id.ICNTL(18)) {
1408: case 0: /* centralized assembled matrix input */
1409: if (!mumps->myid) {
1410: mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1411: if (mumps->id.ICNTL(6)>1) {
1412: mumps->id.a = (MumpsScalar*)mumps->val;
1413: }
1414: }
1415: break;
1416: case 3: /* distributed assembled matrix input (size>1) */
1417: mumps->id.nz_loc = mumps->nz;
1418: mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1419: if (mumps->id.ICNTL(6)>1) {
1420: mumps->id.a_loc = (MumpsScalar*)mumps->val;
1421: }
1422: /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1423: if (!mumps->myid) {
1424: VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);
1425: ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);
1426: } else {
1427: VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);
1428: ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);
1429: }
1430: MatCreateVecs(A,NULL,&b);
1431: VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);
1432: ISDestroy(&is_iden);
1433: VecDestroy(&b);
1434: break;
1435: }
1436: PetscMUMPS_c(&mumps->id);
1437: MatFactorSymbolic_MUMPS_ReportIfError(F,A,info,mumps);
1439: F->ops->choleskyfactornumeric = MatFactorNumeric_MUMPS;
1440: F->ops->solve = MatSolve_MUMPS;
1441: F->ops->solvetranspose = MatSolve_MUMPS;
1442: F->ops->matsolve = MatMatSolve_MUMPS;
1443: #if defined(PETSC_USE_COMPLEX)
1444: F->ops->getinertia = NULL;
1445: #else
1446: F->ops->getinertia = MatGetInertia_SBAIJMUMPS;
1447: #endif
1448: return(0);
1449: }
1451: PetscErrorCode MatView_MUMPS(Mat A,PetscViewer viewer)
1452: {
1453: PetscErrorCode ierr;
1454: PetscBool iascii;
1455: PetscViewerFormat format;
1456: Mat_MUMPS *mumps=(Mat_MUMPS*)A->data;
1459: /* check if matrix is mumps type */
1460: if (A->ops->solve != MatSolve_MUMPS) return(0);
1462: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
1463: if (iascii) {
1464: PetscViewerGetFormat(viewer,&format);
1465: if (format == PETSC_VIEWER_ASCII_INFO) {
1466: PetscViewerASCIIPrintf(viewer,"MUMPS run parameters:\n");
1467: PetscViewerASCIIPrintf(viewer," SYM (matrix type): %d \n",mumps->id.sym);
1468: PetscViewerASCIIPrintf(viewer," PAR (host participation): %d \n",mumps->id.par);
1469: PetscViewerASCIIPrintf(viewer," ICNTL(1) (output for error): %d \n",mumps->id.ICNTL(1));
1470: PetscViewerASCIIPrintf(viewer," ICNTL(2) (output of diagnostic msg): %d \n",mumps->id.ICNTL(2));
1471: PetscViewerASCIIPrintf(viewer," ICNTL(3) (output for global info): %d \n",mumps->id.ICNTL(3));
1472: PetscViewerASCIIPrintf(viewer," ICNTL(4) (level of printing): %d \n",mumps->id.ICNTL(4));
1473: PetscViewerASCIIPrintf(viewer," ICNTL(5) (input mat struct): %d \n",mumps->id.ICNTL(5));
1474: PetscViewerASCIIPrintf(viewer," ICNTL(6) (matrix prescaling): %d \n",mumps->id.ICNTL(6));
1475: PetscViewerASCIIPrintf(viewer," ICNTL(7) (sequential matrix ordering):%d \n",mumps->id.ICNTL(7));
1476: PetscViewerASCIIPrintf(viewer," ICNTL(8) (scaling strategy): %d \n",mumps->id.ICNTL(8));
1477: PetscViewerASCIIPrintf(viewer," ICNTL(10) (max num of refinements): %d \n",mumps->id.ICNTL(10));
1478: PetscViewerASCIIPrintf(viewer," ICNTL(11) (error analysis): %d \n",mumps->id.ICNTL(11));
1479: if (mumps->id.ICNTL(11)>0) {
1480: PetscViewerASCIIPrintf(viewer," RINFOG(4) (inf norm of input mat): %g\n",mumps->id.RINFOG(4));
1481: PetscViewerASCIIPrintf(viewer," RINFOG(5) (inf norm of solution): %g\n",mumps->id.RINFOG(5));
1482: PetscViewerASCIIPrintf(viewer," RINFOG(6) (inf norm of residual): %g\n",mumps->id.RINFOG(6));
1483: PetscViewerASCIIPrintf(viewer," RINFOG(7),RINFOG(8) (backward error est): %g, %g\n",mumps->id.RINFOG(7),mumps->id.RINFOG(8));
1484: PetscViewerASCIIPrintf(viewer," RINFOG(9) (error estimate): %g \n",mumps->id.RINFOG(9));
1485: PetscViewerASCIIPrintf(viewer," RINFOG(10),RINFOG(11)(condition numbers): %g, %g\n",mumps->id.RINFOG(10),mumps->id.RINFOG(11));
1486: }
1487: PetscViewerASCIIPrintf(viewer," ICNTL(12) (efficiency control): %d \n",mumps->id.ICNTL(12));
1488: PetscViewerASCIIPrintf(viewer," ICNTL(13) (efficiency control): %d \n",mumps->id.ICNTL(13));
1489: PetscViewerASCIIPrintf(viewer," ICNTL(14) (percentage of estimated workspace increase): %d \n",mumps->id.ICNTL(14));
1490: /* ICNTL(15-17) not used */
1491: PetscViewerASCIIPrintf(viewer," ICNTL(18) (input mat struct): %d \n",mumps->id.ICNTL(18));
1492: PetscViewerASCIIPrintf(viewer," ICNTL(19) (Schur complement info): %d \n",mumps->id.ICNTL(19));
1493: PetscViewerASCIIPrintf(viewer," ICNTL(20) (rhs sparse pattern): %d \n",mumps->id.ICNTL(20));
1494: PetscViewerASCIIPrintf(viewer," ICNTL(21) (solution struct): %d \n",mumps->id.ICNTL(21));
1495: PetscViewerASCIIPrintf(viewer," ICNTL(22) (in-core/out-of-core facility): %d \n",mumps->id.ICNTL(22));
1496: PetscViewerASCIIPrintf(viewer," ICNTL(23) (max size of memory can be allocated locally):%d \n",mumps->id.ICNTL(23));
1498: PetscViewerASCIIPrintf(viewer," ICNTL(24) (detection of null pivot rows): %d \n",mumps->id.ICNTL(24));
1499: PetscViewerASCIIPrintf(viewer," ICNTL(25) (computation of a null space basis): %d \n",mumps->id.ICNTL(25));
1500: PetscViewerASCIIPrintf(viewer," ICNTL(26) (Schur options for rhs or solution): %d \n",mumps->id.ICNTL(26));
1501: PetscViewerASCIIPrintf(viewer," ICNTL(27) (experimental parameter): %d \n",mumps->id.ICNTL(27));
1502: PetscViewerASCIIPrintf(viewer," ICNTL(28) (use parallel or sequential ordering): %d \n",mumps->id.ICNTL(28));
1503: PetscViewerASCIIPrintf(viewer," ICNTL(29) (parallel ordering): %d \n",mumps->id.ICNTL(29));
1505: PetscViewerASCIIPrintf(viewer," ICNTL(30) (user-specified set of entries in inv(A)): %d \n",mumps->id.ICNTL(30));
1506: PetscViewerASCIIPrintf(viewer," ICNTL(31) (factors is discarded in the solve phase): %d \n",mumps->id.ICNTL(31));
1507: PetscViewerASCIIPrintf(viewer," ICNTL(33) (compute determinant): %d \n",mumps->id.ICNTL(33));
1509: PetscViewerASCIIPrintf(viewer," CNTL(1) (relative pivoting threshold): %g \n",mumps->id.CNTL(1));
1510: PetscViewerASCIIPrintf(viewer," CNTL(2) (stopping criterion of refinement): %g \n",mumps->id.CNTL(2));
1511: PetscViewerASCIIPrintf(viewer," CNTL(3) (absolute pivoting threshold): %g \n",mumps->id.CNTL(3));
1512: PetscViewerASCIIPrintf(viewer," CNTL(4) (value of static pivoting): %g \n",mumps->id.CNTL(4));
1513: PetscViewerASCIIPrintf(viewer," CNTL(5) (fixation for null pivots): %g \n",mumps->id.CNTL(5));
1515: /* infomation local to each processor */
1516: PetscViewerASCIIPrintf(viewer, " RINFO(1) (local estimated flops for the elimination after analysis): \n");
1517: PetscViewerASCIIPushSynchronized(viewer);
1518: PetscViewerASCIISynchronizedPrintf(viewer," [%d] %g \n",mumps->myid,mumps->id.RINFO(1));
1519: PetscViewerFlush(viewer);
1520: PetscViewerASCIIPrintf(viewer, " RINFO(2) (local estimated flops for the assembly after factorization): \n");
1521: PetscViewerASCIISynchronizedPrintf(viewer," [%d] %g \n",mumps->myid,mumps->id.RINFO(2));
1522: PetscViewerFlush(viewer);
1523: PetscViewerASCIIPrintf(viewer, " RINFO(3) (local estimated flops for the elimination after factorization): \n");
1524: PetscViewerASCIISynchronizedPrintf(viewer," [%d] %g \n",mumps->myid,mumps->id.RINFO(3));
1525: PetscViewerFlush(viewer);
1527: PetscViewerASCIIPrintf(viewer, " INFO(15) (estimated size of (in MB) MUMPS internal data for running numerical factorization): \n");
1528: PetscViewerASCIISynchronizedPrintf(viewer," [%d] %d \n",mumps->myid,mumps->id.INFO(15));
1529: PetscViewerFlush(viewer);
1531: PetscViewerASCIIPrintf(viewer, " INFO(16) (size of (in MB) MUMPS internal data used during numerical factorization): \n");
1532: PetscViewerASCIISynchronizedPrintf(viewer," [%d] %d \n",mumps->myid,mumps->id.INFO(16));
1533: PetscViewerFlush(viewer);
1535: PetscViewerASCIIPrintf(viewer, " INFO(23) (num of pivots eliminated on this processor after factorization): \n");
1536: PetscViewerASCIISynchronizedPrintf(viewer," [%d] %d \n",mumps->myid,mumps->id.INFO(23));
1537: PetscViewerFlush(viewer);
1539: if (mumps->ninfo && mumps->ninfo <= 40){
1540: PetscInt i;
1541: for (i=0; i<mumps->ninfo; i++){
1542: PetscViewerASCIIPrintf(viewer, " INFO(%d): \n",mumps->info[i]);
1543: PetscViewerASCIISynchronizedPrintf(viewer," [%d] %d \n",mumps->myid,mumps->id.INFO(mumps->info[i]));
1544: PetscViewerFlush(viewer);
1545: }
1546: }
1549: PetscViewerASCIIPopSynchronized(viewer);
1551: if (!mumps->myid) { /* information from the host */
1552: PetscViewerASCIIPrintf(viewer," RINFOG(1) (global estimated flops for the elimination after analysis): %g \n",mumps->id.RINFOG(1));
1553: PetscViewerASCIIPrintf(viewer," RINFOG(2) (global estimated flops for the assembly after factorization): %g \n",mumps->id.RINFOG(2));
1554: PetscViewerASCIIPrintf(viewer," RINFOG(3) (global estimated flops for the elimination after factorization): %g \n",mumps->id.RINFOG(3));
1555: PetscViewerASCIIPrintf(viewer," (RINFOG(12) RINFOG(13))*2^INFOG(34) (determinant): (%g,%g)*(2^%d)\n",mumps->id.RINFOG(12),mumps->id.RINFOG(13),mumps->id.INFOG(34));
1557: PetscViewerASCIIPrintf(viewer," INFOG(3) (estimated real workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(3));
1558: PetscViewerASCIIPrintf(viewer," INFOG(4) (estimated integer workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(4));
1559: PetscViewerASCIIPrintf(viewer," INFOG(5) (estimated maximum front size in the complete tree): %d \n",mumps->id.INFOG(5));
1560: PetscViewerASCIIPrintf(viewer," INFOG(6) (number of nodes in the complete tree): %d \n",mumps->id.INFOG(6));
1561: PetscViewerASCIIPrintf(viewer," INFOG(7) (ordering option effectively use after analysis): %d \n",mumps->id.INFOG(7));
1562: PetscViewerASCIIPrintf(viewer," INFOG(8) (structural symmetry in percent of the permuted matrix after analysis): %d \n",mumps->id.INFOG(8));
1563: PetscViewerASCIIPrintf(viewer," INFOG(9) (total real/complex workspace to store the matrix factors after factorization): %d \n",mumps->id.INFOG(9));
1564: PetscViewerASCIIPrintf(viewer," INFOG(10) (total integer space store the matrix factors after factorization): %d \n",mumps->id.INFOG(10));
1565: PetscViewerASCIIPrintf(viewer," INFOG(11) (order of largest frontal matrix after factorization): %d \n",mumps->id.INFOG(11));
1566: PetscViewerASCIIPrintf(viewer," INFOG(12) (number of off-diagonal pivots): %d \n",mumps->id.INFOG(12));
1567: PetscViewerASCIIPrintf(viewer," INFOG(13) (number of delayed pivots after factorization): %d \n",mumps->id.INFOG(13));
1568: PetscViewerASCIIPrintf(viewer," INFOG(14) (number of memory compress after factorization): %d \n",mumps->id.INFOG(14));
1569: PetscViewerASCIIPrintf(viewer," INFOG(15) (number of steps of iterative refinement after solution): %d \n",mumps->id.INFOG(15));
1570: PetscViewerASCIIPrintf(viewer," INFOG(16) (estimated size (in MB) of all MUMPS internal data for factorization after analysis: value on the most memory consuming processor): %d \n",mumps->id.INFOG(16));
1571: PetscViewerASCIIPrintf(viewer," INFOG(17) (estimated size of all MUMPS internal data for factorization after analysis: sum over all processors): %d \n",mumps->id.INFOG(17));
1572: PetscViewerASCIIPrintf(viewer," INFOG(18) (size of all MUMPS internal data allocated during factorization: value on the most memory consuming processor): %d \n",mumps->id.INFOG(18));
1573: PetscViewerASCIIPrintf(viewer," INFOG(19) (size of all MUMPS internal data allocated during factorization: sum over all processors): %d \n",mumps->id.INFOG(19));
1574: PetscViewerASCIIPrintf(viewer," INFOG(20) (estimated number of entries in the factors): %d \n",mumps->id.INFOG(20));
1575: PetscViewerASCIIPrintf(viewer," INFOG(21) (size in MB of memory effectively used during factorization - value on the most memory consuming processor): %d \n",mumps->id.INFOG(21));
1576: PetscViewerASCIIPrintf(viewer," INFOG(22) (size in MB of memory effectively used during factorization - sum over all processors): %d \n",mumps->id.INFOG(22));
1577: PetscViewerASCIIPrintf(viewer," INFOG(23) (after analysis: value of ICNTL(6) effectively used): %d \n",mumps->id.INFOG(23));
1578: PetscViewerASCIIPrintf(viewer," INFOG(24) (after analysis: value of ICNTL(12) effectively used): %d \n",mumps->id.INFOG(24));
1579: PetscViewerASCIIPrintf(viewer," INFOG(25) (after factorization: number of pivots modified by static pivoting): %d \n",mumps->id.INFOG(25));
1580: PetscViewerASCIIPrintf(viewer," INFOG(28) (after factorization: number of null pivots encountered): %d\n",mumps->id.INFOG(28));
1581: PetscViewerASCIIPrintf(viewer," INFOG(29) (after factorization: effective number of entries in the factors (sum over all processors)): %d\n",mumps->id.INFOG(29));
1582: PetscViewerASCIIPrintf(viewer," INFOG(30, 31) (after solution: size in Mbytes of memory used during solution phase): %d, %d\n",mumps->id.INFOG(30),mumps->id.INFOG(31));
1583: PetscViewerASCIIPrintf(viewer," INFOG(32) (after analysis: type of analysis done): %d\n",mumps->id.INFOG(32));
1584: PetscViewerASCIIPrintf(viewer," INFOG(33) (value used for ICNTL(8)): %d\n",mumps->id.INFOG(33));
1585: PetscViewerASCIIPrintf(viewer," INFOG(34) (exponent of the determinant if determinant is requested): %d\n",mumps->id.INFOG(34));
1586: }
1587: }
1588: }
1589: return(0);
1590: }
1592: PetscErrorCode MatGetInfo_MUMPS(Mat A,MatInfoType flag,MatInfo *info)
1593: {
1594: Mat_MUMPS *mumps =(Mat_MUMPS*)A->data;
1597: info->block_size = 1.0;
1598: info->nz_allocated = mumps->id.INFOG(20);
1599: info->nz_used = mumps->id.INFOG(20);
1600: info->nz_unneeded = 0.0;
1601: info->assemblies = 0.0;
1602: info->mallocs = 0.0;
1603: info->memory = 0.0;
1604: info->fill_ratio_given = 0;
1605: info->fill_ratio_needed = 0;
1606: info->factor_mallocs = 0;
1607: return(0);
1608: }
1610: /* -------------------------------------------------------------------------------------------*/
1611: PetscErrorCode MatFactorSetSchurIS_MUMPS(Mat F, IS is)
1612: {
1613: Mat_MUMPS *mumps =(Mat_MUMPS*)F->data;
1614: const PetscInt *idxs;
1615: PetscInt size,i;
1619: ISGetLocalSize(is,&size);
1620: if (mumps->size > 1) {
1621: PetscBool ls,gs;
1623: ls = mumps->myid ? (size ? PETSC_FALSE : PETSC_TRUE) : PETSC_TRUE;
1624: MPI_Allreduce(&ls,&gs,1,MPIU_BOOL,MPI_LAND,mumps->comm_mumps);
1625: if (!gs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MUMPS distributed parallel Schur complements not yet supported from PETSc\n");
1626: }
1627: if (mumps->id.size_schur != size) {
1628: PetscFree2(mumps->id.listvar_schur,mumps->id.schur);
1629: mumps->id.size_schur = size;
1630: mumps->id.schur_lld = size;
1631: PetscMalloc2(size,&mumps->id.listvar_schur,size*size,&mumps->id.schur);
1632: }
1634: /* Schur complement matrix */
1635: MatCreateSeqDense(PETSC_COMM_SELF,mumps->id.size_schur,mumps->id.size_schur,(PetscScalar*)mumps->id.schur,&F->schur);
1636: if (mumps->sym == 1) {
1637: MatSetOption(F->schur,MAT_SPD,PETSC_TRUE);
1638: }
1640: /* MUMPS expects Fortran style indices */
1641: ISGetIndices(is,&idxs);
1642: PetscMemcpy(mumps->id.listvar_schur,idxs,size*sizeof(PetscInt));
1643: for (i=0;i<size;i++) mumps->id.listvar_schur[i]++;
1644: ISRestoreIndices(is,&idxs);
1645: if (mumps->size > 1) {
1646: mumps->id.ICNTL(19) = 1; /* MUMPS returns Schur centralized on the host */
1647: } else {
1648: if (F->factortype == MAT_FACTOR_LU) {
1649: mumps->id.ICNTL(19) = 3; /* MUMPS returns full matrix */
1650: } else {
1651: mumps->id.ICNTL(19) = 2; /* MUMPS returns lower triangular part */
1652: }
1653: }
1654: /* set a special value of ICNTL (not handled my MUMPS) to be used in the solve phase by PETSc */
1655: mumps->id.ICNTL(26) = -1;
1656: return(0);
1657: }
1659: /* -------------------------------------------------------------------------------------------*/
1660: PetscErrorCode MatFactorCreateSchurComplement_MUMPS(Mat F,Mat* S)
1661: {
1662: Mat St;
1663: Mat_MUMPS *mumps =(Mat_MUMPS*)F->data;
1664: PetscScalar *array;
1665: #if defined(PETSC_USE_COMPLEX)
1666: PetscScalar im = PetscSqrtScalar((PetscScalar)-1.0);
1667: #endif
1671: if (!mumps->id.ICNTL(19)) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatFactorSetSchurIS to enable it");
1672: MatCreate(PETSC_COMM_SELF,&St);
1673: MatSetSizes(St,PETSC_DECIDE,PETSC_DECIDE,mumps->id.size_schur,mumps->id.size_schur);
1674: MatSetType(St,MATDENSE);
1675: MatSetUp(St);
1676: MatDenseGetArray(St,&array);
1677: if (!mumps->sym) { /* MUMPS always return a full matrix */
1678: if (mumps->id.ICNTL(19) == 1) { /* stored by rows */
1679: PetscInt i,j,N=mumps->id.size_schur;
1680: for (i=0;i<N;i++) {
1681: for (j=0;j<N;j++) {
1682: #if !defined(PETSC_USE_COMPLEX)
1683: PetscScalar val = mumps->id.schur[i*N+j];
1684: #else
1685: PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i;
1686: #endif
1687: array[j*N+i] = val;
1688: }
1689: }
1690: } else { /* stored by columns */
1691: PetscMemcpy(array,mumps->id.schur,mumps->id.size_schur*mumps->id.size_schur*sizeof(PetscScalar));
1692: }
1693: } else { /* either full or lower-triangular (not packed) */
1694: if (mumps->id.ICNTL(19) == 2) { /* lower triangular stored by columns */
1695: PetscInt i,j,N=mumps->id.size_schur;
1696: for (i=0;i<N;i++) {
1697: for (j=i;j<N;j++) {
1698: #if !defined(PETSC_USE_COMPLEX)
1699: PetscScalar val = mumps->id.schur[i*N+j];
1700: #else
1701: PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i;
1702: #endif
1703: array[i*N+j] = val;
1704: array[j*N+i] = val;
1705: }
1706: }
1707: } else if (mumps->id.ICNTL(19) == 3) { /* full matrix */
1708: PetscMemcpy(array,mumps->id.schur,mumps->id.size_schur*mumps->id.size_schur*sizeof(PetscScalar));
1709: } else { /* ICNTL(19) == 1 lower triangular stored by rows */
1710: PetscInt i,j,N=mumps->id.size_schur;
1711: for (i=0;i<N;i++) {
1712: for (j=0;j<i+1;j++) {
1713: #if !defined(PETSC_USE_COMPLEX)
1714: PetscScalar val = mumps->id.schur[i*N+j];
1715: #else
1716: PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i;
1717: #endif
1718: array[i*N+j] = val;
1719: array[j*N+i] = val;
1720: }
1721: }
1722: }
1723: }
1724: MatDenseRestoreArray(St,&array);
1725: *S = St;
1726: return(0);
1727: }
1729: /* -------------------------------------------------------------------------------------------*/
1730: PetscErrorCode MatMumpsSetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt ival)
1731: {
1732: Mat_MUMPS *mumps =(Mat_MUMPS*)F->data;
1735: mumps->id.ICNTL(icntl) = ival;
1736: return(0);
1737: }
1739: PetscErrorCode MatMumpsGetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt *ival)
1740: {
1741: Mat_MUMPS *mumps =(Mat_MUMPS*)F->data;
1744: *ival = mumps->id.ICNTL(icntl);
1745: return(0);
1746: }
1748: /*@
1749: MatMumpsSetIcntl - Set MUMPS parameter ICNTL()
1751: Logically Collective on Mat
1753: Input Parameters:
1754: + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1755: . icntl - index of MUMPS parameter array ICNTL()
1756: - ival - value of MUMPS ICNTL(icntl)
1758: Options Database:
1759: . -mat_mumps_icntl_<icntl> <ival>
1761: Level: beginner
1763: References:
1764: . MUMPS Users' Guide
1766: .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog()
1767: @*/
1768: PetscErrorCode MatMumpsSetIcntl(Mat F,PetscInt icntl,PetscInt ival)
1769: {
1771:
1774: if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
1777: PetscTryMethod(F,"MatMumpsSetIcntl_C",(Mat,PetscInt,PetscInt),(F,icntl,ival));
1778: return(0);
1779: }
1781: /*@
1782: MatMumpsGetIcntl - Get MUMPS parameter ICNTL()
1784: Logically Collective on Mat
1786: Input Parameters:
1787: + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1788: - icntl - index of MUMPS parameter array ICNTL()
1790: Output Parameter:
1791: . ival - value of MUMPS ICNTL(icntl)
1793: Level: beginner
1795: References:
1796: . MUMPS Users' Guide
1798: .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog()
1799: @*/
1800: PetscErrorCode MatMumpsGetIcntl(Mat F,PetscInt icntl,PetscInt *ival)
1801: {
1806: if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
1809: PetscUseMethod(F,"MatMumpsGetIcntl_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));
1810: return(0);
1811: }
1813: /* -------------------------------------------------------------------------------------------*/
1814: PetscErrorCode MatMumpsSetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal val)
1815: {
1816: Mat_MUMPS *mumps =(Mat_MUMPS*)F->data;
1819: mumps->id.CNTL(icntl) = val;
1820: return(0);
1821: }
1823: PetscErrorCode MatMumpsGetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal *val)
1824: {
1825: Mat_MUMPS *mumps =(Mat_MUMPS*)F->data;
1828: *val = mumps->id.CNTL(icntl);
1829: return(0);
1830: }
1832: /*@
1833: MatMumpsSetCntl - Set MUMPS parameter CNTL()
1835: Logically Collective on Mat
1837: Input Parameters:
1838: + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1839: . icntl - index of MUMPS parameter array CNTL()
1840: - val - value of MUMPS CNTL(icntl)
1842: Options Database:
1843: . -mat_mumps_cntl_<icntl> <val>
1845: Level: beginner
1847: References:
1848: . MUMPS Users' Guide
1850: .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog()
1851: @*/
1852: PetscErrorCode MatMumpsSetCntl(Mat F,PetscInt icntl,PetscReal val)
1853: {
1858: if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
1861: PetscTryMethod(F,"MatMumpsSetCntl_C",(Mat,PetscInt,PetscReal),(F,icntl,val));
1862: return(0);
1863: }
1865: /*@
1866: MatMumpsGetCntl - Get MUMPS parameter CNTL()
1868: Logically Collective on Mat
1870: Input Parameters:
1871: + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1872: - icntl - index of MUMPS parameter array CNTL()
1874: Output Parameter:
1875: . val - value of MUMPS CNTL(icntl)
1877: Level: beginner
1879: References:
1880: . MUMPS Users' Guide
1882: .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog()
1883: @*/
1884: PetscErrorCode MatMumpsGetCntl(Mat F,PetscInt icntl,PetscReal *val)
1885: {
1890: if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
1893: PetscUseMethod(F,"MatMumpsGetCntl_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));
1894: return(0);
1895: }
1897: PetscErrorCode MatMumpsGetInfo_MUMPS(Mat F,PetscInt icntl,PetscInt *info)
1898: {
1899: Mat_MUMPS *mumps =(Mat_MUMPS*)F->data;
1902: *info = mumps->id.INFO(icntl);
1903: return(0);
1904: }
1906: PetscErrorCode MatMumpsGetInfog_MUMPS(Mat F,PetscInt icntl,PetscInt *infog)
1907: {
1908: Mat_MUMPS *mumps =(Mat_MUMPS*)F->data;
1911: *infog = mumps->id.INFOG(icntl);
1912: return(0);
1913: }
1915: PetscErrorCode MatMumpsGetRinfo_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfo)
1916: {
1917: Mat_MUMPS *mumps =(Mat_MUMPS*)F->data;
1920: *rinfo = mumps->id.RINFO(icntl);
1921: return(0);
1922: }
1924: PetscErrorCode MatMumpsGetRinfog_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfog)
1925: {
1926: Mat_MUMPS *mumps =(Mat_MUMPS*)F->data;
1929: *rinfog = mumps->id.RINFOG(icntl);
1930: return(0);
1931: }
1933: /*@
1934: MatMumpsGetInfo - Get MUMPS parameter INFO()
1936: Logically Collective on Mat
1938: Input Parameters:
1939: + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1940: - icntl - index of MUMPS parameter array INFO()
1942: Output Parameter:
1943: . ival - value of MUMPS INFO(icntl)
1945: Level: beginner
1947: References:
1948: . MUMPS Users' Guide
1950: .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog()
1951: @*/
1952: PetscErrorCode MatMumpsGetInfo(Mat F,PetscInt icntl,PetscInt *ival)
1953: {
1958: if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
1960: PetscUseMethod(F,"MatMumpsGetInfo_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));
1961: return(0);
1962: }
1964: /*@
1965: MatMumpsGetInfog - Get MUMPS parameter INFOG()
1967: Logically Collective on Mat
1969: Input Parameters:
1970: + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
1971: - icntl - index of MUMPS parameter array INFOG()
1973: Output Parameter:
1974: . ival - value of MUMPS INFOG(icntl)
1976: Level: beginner
1978: References:
1979: . MUMPS Users' Guide
1981: .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog()
1982: @*/
1983: PetscErrorCode MatMumpsGetInfog(Mat F,PetscInt icntl,PetscInt *ival)
1984: {
1989: if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
1991: PetscUseMethod(F,"MatMumpsGetInfog_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));
1992: return(0);
1993: }
1995: /*@
1996: MatMumpsGetRinfo - Get MUMPS parameter RINFO()
1998: Logically Collective on Mat
2000: Input Parameters:
2001: + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2002: - icntl - index of MUMPS parameter array RINFO()
2004: Output Parameter:
2005: . val - value of MUMPS RINFO(icntl)
2007: Level: beginner
2009: References:
2010: . MUMPS Users' Guide
2012: .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog()
2013: @*/
2014: PetscErrorCode MatMumpsGetRinfo(Mat F,PetscInt icntl,PetscReal *val)
2015: {
2020: if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
2022: PetscUseMethod(F,"MatMumpsGetRinfo_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));
2023: return(0);
2024: }
2026: /*@
2027: MatMumpsGetRinfog - Get MUMPS parameter RINFOG()
2029: Logically Collective on Mat
2031: Input Parameters:
2032: + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2033: - icntl - index of MUMPS parameter array RINFOG()
2035: Output Parameter:
2036: . val - value of MUMPS RINFOG(icntl)
2038: Level: beginner
2040: References:
2041: . MUMPS Users' Guide
2043: .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog()
2044: @*/
2045: PetscErrorCode MatMumpsGetRinfog(Mat F,PetscInt icntl,PetscReal *val)
2046: {
2051: if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
2053: PetscUseMethod(F,"MatMumpsGetRinfog_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));
2054: return(0);
2055: }
2057: /*MC
2058: MATSOLVERMUMPS - A matrix type providing direct solvers (LU and Cholesky) for
2059: distributed and sequential matrices via the external package MUMPS.
2061: Works with MATAIJ and MATSBAIJ matrices
2063: Use ./configure --download-mumps --download-scalapack --download-parmetis --download-metis --download-ptscotch to have PETSc installed with MUMPS
2065: Use -pc_type cholesky or lu -pc_factor_mat_solver_package mumps to use this direct solver
2067: Options Database Keys:
2068: + -mat_mumps_icntl_1 - ICNTL(1): output stream for error messages
2069: . -mat_mumps_icntl_2 - ICNTL(2): output stream for diagnostic printing, statistics, and warning
2070: . -mat_mumps_icntl_3 - ICNTL(3): output stream for global information, collected on the host
2071: . -mat_mumps_icntl_4 - ICNTL(4): level of printing (0 to 4)
2072: . -mat_mumps_icntl_6 - ICNTL(6): permutes to a zero-free diagonal and/or scale the matrix (0 to 7)
2073: . -mat_mumps_icntl_7 - ICNTL(7): computes a symmetric permutation in sequential analysis (0 to 7). 3=Scotch, 4=PORD, 5=Metis
2074: . -mat_mumps_icntl_8 - ICNTL(8): scaling strategy (-2 to 8 or 77)
2075: . -mat_mumps_icntl_10 - ICNTL(10): max num of refinements
2076: . -mat_mumps_icntl_11 - ICNTL(11): statistics related to an error analysis (via -ksp_view)
2077: . -mat_mumps_icntl_12 - ICNTL(12): an ordering strategy for symmetric matrices (0 to 3)
2078: . -mat_mumps_icntl_13 - ICNTL(13): parallelism of the root node (enable ScaLAPACK) and its splitting
2079: . -mat_mumps_icntl_14 - ICNTL(14): percentage increase in the estimated working space
2080: . -mat_mumps_icntl_19 - ICNTL(19): computes the Schur complement
2081: . -mat_mumps_icntl_22 - ICNTL(22): in-core/out-of-core factorization and solve (0 or 1)
2082: . -mat_mumps_icntl_23 - ICNTL(23): max size of the working memory (MB) that can allocate per processor
2083: . -mat_mumps_icntl_24 - ICNTL(24): detection of null pivot rows (0 or 1)
2084: . -mat_mumps_icntl_25 - ICNTL(25): compute a solution of a deficient matrix and a null space basis
2085: . -mat_mumps_icntl_26 - ICNTL(26): drives the solution phase if a Schur complement matrix
2086: . -mat_mumps_icntl_28 - ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering
2087: . -mat_mumps_icntl_29 - ICNTL(29): parallel ordering 1 = ptscotch, 2 = parmetis
2088: . -mat_mumps_icntl_30 - ICNTL(30): compute user-specified set of entries in inv(A)
2089: . -mat_mumps_icntl_31 - ICNTL(31): indicates which factors may be discarded during factorization
2090: . -mat_mumps_icntl_33 - ICNTL(33): compute determinant
2091: . -mat_mumps_cntl_1 - CNTL(1): relative pivoting threshold
2092: . -mat_mumps_cntl_2 - CNTL(2): stopping criterion of refinement
2093: . -mat_mumps_cntl_3 - CNTL(3): absolute pivoting threshold
2094: . -mat_mumps_cntl_4 - CNTL(4): value for static pivoting
2095: - -mat_mumps_cntl_5 - CNTL(5): fixation for null pivots
2097: Level: beginner
2099: Notes: When a MUMPS factorization fails inside a KSP solve, for example with a KSP_DIVERGED_PCSETUP_FAILED, one can find the MUMPS information about the failure by calling
2100: $ KSPGetPC(ksp,&pc);
2101: $ PCFactorGetMatrix(pc,&mat);
2102: $ MatMumpsGetInfo(mat,....);
2103: $ MatMumpsGetInfog(mat,....); etc.
2104: Or you can run with -ksp_error_if_not_converged and the program will be stopped and the information printed in the error message.
2106: .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage, MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog(), KSPGetPC(), PCGetFactor(), PCFactorGetMatrix()
2108: M*/
2110: static PetscErrorCode MatFactorGetSolverPackage_mumps(Mat A,const MatSolverPackage *type)
2111: {
2113: *type = MATSOLVERMUMPS;
2114: return(0);
2115: }
2117: /* MatGetFactor for Seq and MPI AIJ matrices */
2118: static PetscErrorCode MatGetFactor_aij_mumps(Mat A,MatFactorType ftype,Mat *F)
2119: {
2120: Mat B;
2122: Mat_MUMPS *mumps;
2123: PetscBool isSeqAIJ;
2126: /* Create the factorization matrix */
2127: PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);
2128: MatCreate(PetscObjectComm((PetscObject)A),&B);
2129: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
2130: PetscStrallocpy("mumps",&((PetscObject)B)->type_name);
2131: MatSetUp(B);
2133: PetscNewLog(B,&mumps);
2135: B->ops->view = MatView_MUMPS;
2136: B->ops->getinfo = MatGetInfo_MUMPS;
2138: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);
2139: PetscObjectComposeFunction((PetscObject)B,"MatFactorSetSchurIS_C",MatFactorSetSchurIS_MUMPS);
2140: PetscObjectComposeFunction((PetscObject)B,"MatFactorCreateSchurComplement_C",MatFactorCreateSchurComplement_MUMPS);
2141: PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);
2142: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);
2143: PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);
2144: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);
2145: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);
2146: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);
2147: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);
2148: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);
2150: if (ftype == MAT_FACTOR_LU) {
2151: B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMUMPS;
2152: B->factortype = MAT_FACTOR_LU;
2153: if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqaij;
2154: else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpiaij;
2155: mumps->sym = 0;
2156: } else {
2157: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS;
2158: B->factortype = MAT_FACTOR_CHOLESKY;
2159: if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqsbaij;
2160: else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpisbaij;
2161: #if defined(PETSC_USE_COMPLEX)
2162: mumps->sym = 2;
2163: #else
2164: if (A->spd_set && A->spd) mumps->sym = 1;
2165: else mumps->sym = 2;
2166: #endif
2167: }
2169: /* set solvertype */
2170: PetscFree(B->solvertype);
2171: PetscStrallocpy(MATSOLVERMUMPS,&B->solvertype);
2173: mumps->isAIJ = PETSC_TRUE;
2174: B->ops->destroy = MatDestroy_MUMPS;
2175: B->data = (void*)mumps;
2177: PetscInitializeMUMPS(A,mumps);
2179: *F = B;
2180: return(0);
2181: }
2183: /* MatGetFactor for Seq and MPI SBAIJ matrices */
2184: static PetscErrorCode MatGetFactor_sbaij_mumps(Mat A,MatFactorType ftype,Mat *F)
2185: {
2186: Mat B;
2188: Mat_MUMPS *mumps;
2189: PetscBool isSeqSBAIJ;
2192: if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with MUMPS LU, use AIJ matrix");
2193: if (A->rmap->bs > 1) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with block size > 1 with MUMPS Cholesky, use AIJ matrix instead");
2194: PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);
2195: /* Create the factorization matrix */
2196: MatCreate(PetscObjectComm((PetscObject)A),&B);
2197: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
2198: PetscStrallocpy("mumps",&((PetscObject)B)->type_name);
2199: MatSetUp(B);
2201: PetscNewLog(B,&mumps);
2202: if (isSeqSBAIJ) {
2203: mumps->ConvertToTriples = MatConvertToTriples_seqsbaij_seqsbaij;
2204: } else {
2205: mumps->ConvertToTriples = MatConvertToTriples_mpisbaij_mpisbaij;
2206: }
2208: B->ops->getinfo = MatGetInfo_External;
2209: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS;
2210: B->ops->view = MatView_MUMPS;
2212: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);
2213: PetscObjectComposeFunction((PetscObject)B,"MatFactorSetSchurIS_C",MatFactorSetSchurIS_MUMPS);
2214: PetscObjectComposeFunction((PetscObject)B,"MatFactorCreateSchurComplement_C",MatFactorCreateSchurComplement_MUMPS);
2215: PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);
2216: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);
2217: PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);
2218: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);
2219: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);
2220: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);
2221: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);
2222: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);
2224: B->factortype = MAT_FACTOR_CHOLESKY;
2225: #if defined(PETSC_USE_COMPLEX)
2226: mumps->sym = 2;
2227: #else
2228: if (A->spd_set && A->spd) mumps->sym = 1;
2229: else mumps->sym = 2;
2230: #endif
2232: /* set solvertype */
2233: PetscFree(B->solvertype);
2234: PetscStrallocpy(MATSOLVERMUMPS,&B->solvertype);
2236: mumps->isAIJ = PETSC_FALSE;
2237: B->ops->destroy = MatDestroy_MUMPS;
2238: B->data = (void*)mumps;
2240: PetscInitializeMUMPS(A,mumps);
2242: *F = B;
2243: return(0);
2244: }
2246: static PetscErrorCode MatGetFactor_baij_mumps(Mat A,MatFactorType ftype,Mat *F)
2247: {
2248: Mat B;
2250: Mat_MUMPS *mumps;
2251: PetscBool isSeqBAIJ;
2254: /* Create the factorization matrix */
2255: PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&isSeqBAIJ);
2256: MatCreate(PetscObjectComm((PetscObject)A),&B);
2257: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
2258: PetscStrallocpy("mumps",&((PetscObject)B)->type_name);
2259: MatSetUp(B);
2261: PetscNewLog(B,&mumps);
2262: if (ftype == MAT_FACTOR_LU) {
2263: B->ops->lufactorsymbolic = MatLUFactorSymbolic_BAIJMUMPS;
2264: B->factortype = MAT_FACTOR_LU;
2265: if (isSeqBAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqbaij_seqaij;
2266: else mumps->ConvertToTriples = MatConvertToTriples_mpibaij_mpiaij;
2267: mumps->sym = 0;
2268: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc BAIJ matrices with MUMPS Cholesky, use SBAIJ or AIJ matrix instead\n");
2270: B->ops->getinfo = MatGetInfo_External;
2271: B->ops->view = MatView_MUMPS;
2273: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);
2274: PetscObjectComposeFunction((PetscObject)B,"MatFactorSetSchurIS_C",MatFactorSetSchurIS_MUMPS);
2275: PetscObjectComposeFunction((PetscObject)B,"MatFactorCreateSchurComplement_C",MatFactorCreateSchurComplement_MUMPS);
2276: PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);
2277: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);
2278: PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);
2279: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);
2280: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);
2281: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);
2282: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);
2283: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);
2285: /* set solvertype */
2286: PetscFree(B->solvertype);
2287: PetscStrallocpy(MATSOLVERMUMPS,&B->solvertype);
2289: mumps->isAIJ = PETSC_TRUE;
2290: B->ops->destroy = MatDestroy_MUMPS;
2291: B->data = (void*)mumps;
2293: PetscInitializeMUMPS(A,mumps);
2295: *F = B;
2296: return(0);
2297: }
2299: PETSC_EXTERN PetscErrorCode MatSolverPackageRegister_MUMPS(void)
2300: {
2304: MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIAIJ,MAT_FACTOR_LU,MatGetFactor_aij_mumps);
2305: MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mumps);
2306: MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIBAIJ,MAT_FACTOR_LU,MatGetFactor_baij_mumps);
2307: MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_baij_mumps);
2308: MatSolverPackageRegister(MATSOLVERMUMPS,MATMPISBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_sbaij_mumps);
2309: MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQAIJ,MAT_FACTOR_LU,MatGetFactor_aij_mumps);
2310: MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mumps);
2311: MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQBAIJ,MAT_FACTOR_LU,MatGetFactor_baij_mumps);
2312: MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_baij_mumps);
2313: MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQSBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_sbaij_mumps);
2314: return(0);
2315: }