Actual source code: mumps.c
petsc-3.7.3 2016-08-01
2: /*
3: Provides an interface to the MUMPS sparse solver
4: */
6: #include <../src/mat/impls/aij/mpi/mpiaij.h> /*I "petscmat.h" I*/
7: #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
8: #include <petscblaslapack.h>
10: EXTERN_C_BEGIN
11: #if defined(PETSC_USE_COMPLEX)
12: #if defined(PETSC_USE_REAL_SINGLE)
13: #include <cmumps_c.h>
14: #else
15: #include <zmumps_c.h>
16: #endif
17: #else
18: #if defined(PETSC_USE_REAL_SINGLE)
19: #include <smumps_c.h>
20: #else
21: #include <dmumps_c.h>
22: #endif
23: #endif
24: EXTERN_C_END
25: #define JOB_INIT -1
26: #define JOB_FACTSYMBOLIC 1
27: #define JOB_FACTNUMERIC 2
28: #define JOB_SOLVE 3
29: #define JOB_END -2
31: /* calls to MUMPS */
32: #if defined(PETSC_USE_COMPLEX)
33: #if defined(PETSC_USE_REAL_SINGLE)
34: #define PetscMUMPS_c cmumps_c
35: #else
36: #define PetscMUMPS_c zmumps_c
37: #endif
38: #else
39: #if defined(PETSC_USE_REAL_SINGLE)
40: #define PetscMUMPS_c smumps_c
41: #else
42: #define PetscMUMPS_c dmumps_c
43: #endif
44: #endif
46: /* declare MumpsScalar */
47: #if defined(PETSC_USE_COMPLEX)
48: #if defined(PETSC_USE_REAL_SINGLE)
49: #define MumpsScalar mumps_complex
50: #else
51: #define MumpsScalar mumps_double_complex
52: #endif
53: #else
54: #define MumpsScalar PetscScalar
55: #endif
57: /* macros s.t. indices match MUMPS documentation */
58: #define ICNTL(I) icntl[(I)-1]
59: #define CNTL(I) cntl[(I)-1]
60: #define INFOG(I) infog[(I)-1]
61: #define INFO(I) info[(I)-1]
62: #define RINFOG(I) rinfog[(I)-1]
63: #define RINFO(I) rinfo[(I)-1]
65: typedef struct {
66: #if defined(PETSC_USE_COMPLEX)
67: #if defined(PETSC_USE_REAL_SINGLE)
68: CMUMPS_STRUC_C id;
69: #else
70: ZMUMPS_STRUC_C id;
71: #endif
72: #else
73: #if defined(PETSC_USE_REAL_SINGLE)
74: SMUMPS_STRUC_C id;
75: #else
76: DMUMPS_STRUC_C id;
77: #endif
78: #endif
80: MatStructure matstruc;
81: PetscMPIInt myid,size;
82: PetscInt *irn,*jcn,nz,sym;
83: PetscScalar *val;
84: MPI_Comm comm_mumps;
85: PetscBool isAIJ;
86: PetscInt ICNTL9_pre; /* check if ICNTL(9) is changed from previous MatSolve */
87: VecScatter scat_rhs, scat_sol; /* used by MatSolve() */
88: Vec b_seq,x_seq;
89: PetscInt ninfo,*info; /* display INFO */
90: PetscInt sizeredrhs;
91: PetscInt *schur_pivots;
92: PetscInt schur_B_lwork;
93: PetscScalar *schur_work;
94: PetscScalar *schur_sol;
95: PetscInt schur_sizesol;
96: PetscBool schur_factored;
97: PetscBool schur_inverted;
99: PetscErrorCode (*Destroy)(Mat);
100: PetscErrorCode (*ConvertToTriples)(Mat, int, MatReuse, int*, int**, int**, PetscScalar**);
101: } Mat_MUMPS;
103: extern PetscErrorCode MatDuplicate_MUMPS(Mat,MatDuplicateOption,Mat*);
107: static PetscErrorCode MatMumpsResetSchur_Private(Mat_MUMPS* mumps)
108: {
112: PetscFree2(mumps->id.listvar_schur,mumps->id.schur);
113: PetscFree(mumps->id.redrhs);
114: PetscFree(mumps->schur_sol);
115: PetscFree(mumps->schur_pivots);
116: PetscFree(mumps->schur_work);
117: mumps->id.size_schur = 0;
118: mumps->id.ICNTL(19) = 0;
119: return(0);
120: }
124: static PetscErrorCode MatMumpsFactorSchur_Private(Mat_MUMPS* mumps)
125: {
126: PetscBLASInt B_N,B_ierr,B_slda;
130: if (mumps->schur_factored) {
131: return(0);
132: }
133: PetscBLASIntCast(mumps->id.size_schur,&B_N);
134: PetscBLASIntCast(mumps->id.schur_lld,&B_slda);
135: if (!mumps->sym) { /* MUMPS always return a full Schur matrix */
136: if (!mumps->schur_pivots) {
137: PetscMalloc1(B_N,&mumps->schur_pivots);
138: }
139: PetscFPTrapPush(PETSC_FP_TRAP_OFF);
140: PetscStackCallBLAS("LAPACKgetrf",LAPACKgetrf_(&B_N,&B_N,(PetscScalar*)mumps->id.schur,&B_slda,mumps->schur_pivots,&B_ierr));
141: PetscFPTrapPop();
142: if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in GETRF Lapack routine %d",(int)B_ierr);
143: } else { /* either full or lower-triangular (not packed) */
144: char ord[2];
145: if (mumps->id.ICNTL(19) == 2 || mumps->id.ICNTL(19) == 3) { /* lower triangular stored by columns or full matrix */
146: sprintf(ord,"L");
147: } else { /* ICNTL(19) == 1 lower triangular stored by rows */
148: sprintf(ord,"U");
149: }
150: if (mumps->id.sym == 2) {
151: if (!mumps->schur_pivots) {
152: PetscScalar lwork;
154: PetscMalloc1(B_N,&mumps->schur_pivots);
155: mumps->schur_B_lwork=-1;
156: PetscFPTrapPush(PETSC_FP_TRAP_OFF);
157: PetscStackCallBLAS("LAPACKsytrf",LAPACKsytrf_(ord,&B_N,(PetscScalar*)mumps->id.schur,&B_slda,mumps->schur_pivots,&lwork,&mumps->schur_B_lwork,&B_ierr));
158: PetscFPTrapPop();
159: if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in query to SYTRF Lapack routine %d",(int)B_ierr);
160: PetscBLASIntCast((PetscInt)PetscRealPart(lwork),&mumps->schur_B_lwork);
161: PetscMalloc1(mumps->schur_B_lwork,&mumps->schur_work);
162: }
163: PetscFPTrapPush(PETSC_FP_TRAP_OFF);
164: PetscStackCallBLAS("LAPACKsytrf",LAPACKsytrf_(ord,&B_N,(PetscScalar*)mumps->id.schur,&B_slda,mumps->schur_pivots,mumps->schur_work,&mumps->schur_B_lwork,&B_ierr));
165: PetscFPTrapPop();
166: if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in SYTRF Lapack routine %d",(int)B_ierr);
167: } else {
168: PetscFPTrapPush(PETSC_FP_TRAP_OFF);
169: PetscStackCallBLAS("LAPACKpotrf",LAPACKpotrf_(ord,&B_N,(PetscScalar*)mumps->id.schur,&B_slda,&B_ierr));
170: PetscFPTrapPop();
171: if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in POTRF Lapack routine %d",(int)B_ierr);
172: }
173: }
174: mumps->schur_factored = PETSC_TRUE;
175: return(0);
176: }
180: static PetscErrorCode MatMumpsInvertSchur_Private(Mat_MUMPS* mumps)
181: {
182: PetscBLASInt B_N,B_ierr,B_slda;
186: MatMumpsFactorSchur_Private(mumps);
187: PetscBLASIntCast(mumps->id.size_schur,&B_N);
188: PetscBLASIntCast(mumps->id.schur_lld,&B_slda);
189: if (!mumps->sym) { /* MUMPS always return a full Schur matrix */
190: if (!mumps->schur_work) {
191: PetscScalar lwork;
193: mumps->schur_B_lwork = -1;
194: PetscFPTrapPush(PETSC_FP_TRAP_OFF);
195: PetscStackCallBLAS("LAPACKgetri",LAPACKgetri_(&B_N,(PetscScalar*)mumps->id.schur,&B_N,mumps->schur_pivots,&lwork,&mumps->schur_B_lwork,&B_ierr));
196: PetscFPTrapPop();
197: if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in query to GETRI Lapack routine %d",(int)B_ierr);
198: PetscBLASIntCast((PetscInt)PetscRealPart(lwork),&mumps->schur_B_lwork);
199: PetscMalloc1(mumps->schur_B_lwork,&mumps->schur_work);
200: }
201: PetscFPTrapPush(PETSC_FP_TRAP_OFF);
202: PetscStackCallBLAS("LAPACKgetri",LAPACKgetri_(&B_N,(PetscScalar*)mumps->id.schur,&B_N,mumps->schur_pivots,mumps->schur_work,&mumps->schur_B_lwork,&B_ierr));
203: PetscFPTrapPop();
204: if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in GETRI Lapack routine %d",(int)B_ierr);
205: } else { /* either full or lower-triangular (not packed) */
206: char ord[2];
207: if (mumps->id.ICNTL(19) == 2 || mumps->id.ICNTL(19) == 3) { /* lower triangular stored by columns or full matrix */
208: sprintf(ord,"L");
209: } else { /* ICNTL(19) == 1 lower triangular stored by rows */
210: sprintf(ord,"U");
211: }
212: if (mumps->id.sym == 2) {
213: PetscFPTrapPush(PETSC_FP_TRAP_OFF);
214: PetscStackCallBLAS("LAPACKsytri",LAPACKsytri_(ord,&B_N,(PetscScalar*)mumps->id.schur,&B_N,mumps->schur_pivots,mumps->schur_work,&B_ierr));
215: PetscFPTrapPop();
216: if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in SYTRI Lapack routine %d",(int)B_ierr);
217: } else {
218: PetscFPTrapPush(PETSC_FP_TRAP_OFF);
219: PetscStackCallBLAS("LAPACKpotri",LAPACKpotri_(ord,&B_N,(PetscScalar*)mumps->id.schur,&B_N,&B_ierr));
220: PetscFPTrapPop();
221: if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in POTRI Lapack routine %d",(int)B_ierr);
222: }
223: }
224: mumps->schur_inverted = PETSC_TRUE;
225: return(0);
226: }
230: static PetscErrorCode MatMumpsSolveSchur_Private(Mat_MUMPS* mumps, PetscBool sol_in_redrhs)
231: {
232: PetscBLASInt B_N,B_Nrhs,B_ierr,B_slda,B_rlda;
233: PetscScalar one=1.,zero=0.;
237: MatMumpsFactorSchur_Private(mumps);
238: PetscBLASIntCast(mumps->id.size_schur,&B_N);
239: PetscBLASIntCast(mumps->id.schur_lld,&B_slda);
240: PetscBLASIntCast(mumps->id.nrhs,&B_Nrhs);
241: PetscBLASIntCast(mumps->id.lredrhs,&B_rlda);
242: if (mumps->schur_inverted) {
243: PetscInt sizesol = B_Nrhs*B_N;
244: if (!mumps->schur_sol || sizesol > mumps->schur_sizesol) {
245: PetscFree(mumps->schur_sol);
246: PetscMalloc1(sizesol,&mumps->schur_sol);
247: mumps->schur_sizesol = sizesol;
248: }
249: if (!mumps->sym) {
250: char type[2];
251: if (mumps->id.ICNTL(19) == 1) { /* stored by rows */
252: if (!mumps->id.ICNTL(9)) { /* transpose solve */
253: sprintf(type,"N");
254: } else {
255: sprintf(type,"T");
256: }
257: } else { /* stored by columns */
258: if (!mumps->id.ICNTL(9)) { /* transpose solve */
259: sprintf(type,"T");
260: } else {
261: sprintf(type,"N");
262: }
263: }
264: PetscStackCallBLAS("BLASgemm",BLASgemm_(type,"N",&B_N,&B_Nrhs,&B_N,&one,(PetscScalar*)mumps->id.schur,&B_slda,(PetscScalar*)mumps->id.redrhs,&B_rlda,&zero,mumps->schur_sol,&B_rlda));
265: } else {
266: char ord[2];
267: if (mumps->id.ICNTL(19) == 2 || mumps->id.ICNTL(19) == 3) { /* lower triangular stored by columns or full matrix */
268: sprintf(ord,"L");
269: } else { /* ICNTL(19) == 1 lower triangular stored by rows */
270: sprintf(ord,"U");
271: }
272: PetscStackCallBLAS("BLASsymm",BLASsymm_("L",ord,&B_N,&B_Nrhs,&one,(PetscScalar*)mumps->id.schur,&B_slda,(PetscScalar*)mumps->id.redrhs,&B_rlda,&zero,mumps->schur_sol,&B_rlda));
273: }
274: if (sol_in_redrhs) {
275: PetscMemcpy(mumps->id.redrhs,mumps->schur_sol,sizesol*sizeof(PetscScalar));
276: }
277: } else { /* Schur complement has not been inverted */
278: MumpsScalar *orhs=NULL;
280: if (!sol_in_redrhs) {
281: PetscInt sizesol = B_Nrhs*B_N;
282: if (!mumps->schur_sol || sizesol > mumps->schur_sizesol) {
283: PetscFree(mumps->schur_sol);
284: PetscMalloc1(sizesol,&mumps->schur_sol);
285: mumps->schur_sizesol = sizesol;
286: }
287: orhs = mumps->id.redrhs;
288: PetscMemcpy(mumps->schur_sol,mumps->id.redrhs,sizesol*sizeof(PetscScalar));
289: mumps->id.redrhs = (MumpsScalar*)mumps->schur_sol;
290: }
291: if (!mumps->sym) { /* MUMPS always return a full Schur matrix */
292: char type[2];
293: if (mumps->id.ICNTL(19) == 1) { /* stored by rows */
294: if (!mumps->id.ICNTL(9)) { /* transpose solve */
295: sprintf(type,"N");
296: } else {
297: sprintf(type,"T");
298: }
299: } else { /* stored by columns */
300: if (!mumps->id.ICNTL(9)) { /* transpose solve */
301: sprintf(type,"T");
302: } else {
303: sprintf(type,"N");
304: }
305: }
306: PetscFPTrapPush(PETSC_FP_TRAP_OFF);
307: PetscStackCallBLAS("LAPACKgetrs",LAPACKgetrs_(type,&B_N,&B_Nrhs,(PetscScalar*)mumps->id.schur,&B_slda,mumps->schur_pivots,(PetscScalar*)mumps->id.redrhs,&B_rlda,&B_ierr));
308: PetscFPTrapPop();
309: if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in GETRS Lapack routine %d",(int)B_ierr);
310: } else { /* either full or lower-triangular (not packed) */
311: char ord[2];
312: if (mumps->id.ICNTL(19) == 2 || mumps->id.ICNTL(19) == 3) { /* lower triangular stored by columns or full matrix */
313: sprintf(ord,"L");
314: } else { /* ICNTL(19) == 1 lower triangular stored by rows */
315: sprintf(ord,"U");
316: }
317: if (mumps->id.sym == 2) {
318: PetscFPTrapPush(PETSC_FP_TRAP_OFF);
319: PetscStackCallBLAS("LAPACKsytrs",LAPACKsytrs_(ord,&B_N,&B_Nrhs,(PetscScalar*)mumps->id.schur,&B_slda,mumps->schur_pivots,(PetscScalar*)mumps->id.redrhs,&B_rlda,&B_ierr));
320: PetscFPTrapPop();
321: if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in SYTRS Lapack routine %d",(int)B_ierr);
322: } else {
323: PetscFPTrapPush(PETSC_FP_TRAP_OFF);
324: PetscStackCallBLAS("LAPACKpotrs",LAPACKpotrs_(ord,&B_N,&B_Nrhs,(PetscScalar*)mumps->id.schur,&B_slda,(PetscScalar*)mumps->id.redrhs,&B_rlda,&B_ierr));
325: PetscFPTrapPop();
326: if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in POTRS Lapack routine %d",(int)B_ierr);
327: }
328: }
329: if (!sol_in_redrhs) {
330: mumps->id.redrhs = orhs;
331: }
332: }
333: return(0);
334: }
338: static PetscErrorCode MatMumpsHandleSchur_Private(Mat_MUMPS* mumps, PetscBool expansion)
339: {
343: if (!mumps->id.ICNTL(19)) { /* do nothing when Schur complement has not been computed */
344: return(0);
345: }
346: if (!expansion) { /* prepare for the condensation step */
347: PetscInt sizeredrhs = mumps->id.nrhs*mumps->id.size_schur;
348: /* allocate MUMPS internal array to store reduced right-hand sides */
349: if (!mumps->id.redrhs || sizeredrhs > mumps->sizeredrhs) {
350: PetscFree(mumps->id.redrhs);
351: mumps->id.lredrhs = mumps->id.size_schur;
352: PetscMalloc1(mumps->id.nrhs*mumps->id.lredrhs,&mumps->id.redrhs);
353: mumps->sizeredrhs = mumps->id.nrhs*mumps->id.lredrhs;
354: }
355: mumps->id.ICNTL(26) = 1; /* condensation phase */
356: } else { /* prepare for the expansion step */
357: /* solve Schur complement (this has to be done by the MUMPS user, so basically us) */
358: MatMumpsSolveSchur_Private(mumps,PETSC_TRUE);
359: mumps->id.ICNTL(26) = 2; /* expansion phase */
360: PetscMUMPS_c(&mumps->id);
361: if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
362: /* restore defaults */
363: mumps->id.ICNTL(26) = -1;
364: }
365: return(0);
366: }
368: /*
369: MatConvertToTriples_A_B - convert Petsc matrix to triples: row[nz], col[nz], val[nz]
371: input:
372: A - matrix in aij,baij or sbaij (bs=1) format
373: shift - 0: C style output triple; 1: Fortran style output triple.
374: reuse - MAT_INITIAL_MATRIX: spaces are allocated and values are set for the triple
375: MAT_REUSE_MATRIX: only the values in v array are updated
376: output:
377: nnz - dim of r, c, and v (number of local nonzero entries of A)
378: r, c, v - row and col index, matrix values (matrix triples)
380: The returned values r, c, and sometimes v are obtained in a single PetscMalloc(). Then in MatDestroy_MUMPS() it is
381: freed with PetscFree((mumps->irn); This is not ideal code, the fact that v is ONLY sometimes part of mumps->irn means
382: that the PetscMalloc() cannot easily be replaced with a PetscMalloc3().
384: */
388: PetscErrorCode MatConvertToTriples_seqaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
389: {
390: const PetscInt *ai,*aj,*ajj,M=A->rmap->n;
391: PetscInt nz,rnz,i,j;
393: PetscInt *row,*col;
394: Mat_SeqAIJ *aa=(Mat_SeqAIJ*)A->data;
397: *v=aa->a;
398: if (reuse == MAT_INITIAL_MATRIX) {
399: nz = aa->nz;
400: ai = aa->i;
401: aj = aa->j;
402: *nnz = nz;
403: PetscMalloc1(2*nz, &row);
404: col = row + nz;
406: nz = 0;
407: for (i=0; i<M; i++) {
408: rnz = ai[i+1] - ai[i];
409: ajj = aj + ai[i];
410: for (j=0; j<rnz; j++) {
411: row[nz] = i+shift; col[nz++] = ajj[j] + shift;
412: }
413: }
414: *r = row; *c = col;
415: }
416: return(0);
417: }
421: PetscErrorCode MatConvertToTriples_seqbaij_seqaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
422: {
423: Mat_SeqBAIJ *aa=(Mat_SeqBAIJ*)A->data;
424: const PetscInt *ai,*aj,*ajj,bs2 = aa->bs2;
425: PetscInt bs,M,nz,idx=0,rnz,i,j,k,m;
427: PetscInt *row,*col;
430: MatGetBlockSize(A,&bs);
431: M = A->rmap->N/bs;
432: *v = aa->a;
433: if (reuse == MAT_INITIAL_MATRIX) {
434: ai = aa->i; aj = aa->j;
435: nz = bs2*aa->nz;
436: *nnz = nz;
437: PetscMalloc1(2*nz, &row);
438: col = row + nz;
440: for (i=0; i<M; i++) {
441: ajj = aj + ai[i];
442: rnz = ai[i+1] - ai[i];
443: for (k=0; k<rnz; k++) {
444: for (j=0; j<bs; j++) {
445: for (m=0; m<bs; m++) {
446: row[idx] = i*bs + m + shift;
447: col[idx++] = bs*(ajj[k]) + j + shift;
448: }
449: }
450: }
451: }
452: *r = row; *c = col;
453: }
454: return(0);
455: }
459: PetscErrorCode MatConvertToTriples_seqsbaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
460: {
461: const PetscInt *ai, *aj,*ajj,M=A->rmap->n;
462: PetscInt nz,rnz,i,j;
464: PetscInt *row,*col;
465: Mat_SeqSBAIJ *aa=(Mat_SeqSBAIJ*)A->data;
468: *v = aa->a;
469: if (reuse == MAT_INITIAL_MATRIX) {
470: nz = aa->nz;
471: ai = aa->i;
472: aj = aa->j;
473: *v = aa->a;
474: *nnz = nz;
475: PetscMalloc1(2*nz, &row);
476: col = row + nz;
478: nz = 0;
479: for (i=0; i<M; i++) {
480: rnz = ai[i+1] - ai[i];
481: ajj = aj + ai[i];
482: for (j=0; j<rnz; j++) {
483: row[nz] = i+shift; col[nz++] = ajj[j] + shift;
484: }
485: }
486: *r = row; *c = col;
487: }
488: return(0);
489: }
493: PetscErrorCode MatConvertToTriples_seqaij_seqsbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
494: {
495: const PetscInt *ai,*aj,*ajj,*adiag,M=A->rmap->n;
496: PetscInt nz,rnz,i,j;
497: const PetscScalar *av,*v1;
498: PetscScalar *val;
499: PetscErrorCode ierr;
500: PetscInt *row,*col;
501: Mat_SeqAIJ *aa=(Mat_SeqAIJ*)A->data;
504: ai =aa->i; aj=aa->j;av=aa->a;
505: adiag=aa->diag;
506: if (reuse == MAT_INITIAL_MATRIX) {
507: /* count nz in the uppper triangular part of A */
508: nz = 0;
509: for (i=0; i<M; i++) nz += ai[i+1] - adiag[i];
510: *nnz = nz;
512: PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);
513: col = row + nz;
514: val = (PetscScalar*)(col + nz);
516: nz = 0;
517: for (i=0; i<M; i++) {
518: rnz = ai[i+1] - adiag[i];
519: ajj = aj + adiag[i];
520: v1 = av + adiag[i];
521: for (j=0; j<rnz; j++) {
522: row[nz] = i+shift; col[nz] = ajj[j] + shift; val[nz++] = v1[j];
523: }
524: }
525: *r = row; *c = col; *v = val;
526: } else {
527: nz = 0; val = *v;
528: for (i=0; i <M; i++) {
529: rnz = ai[i+1] - adiag[i];
530: ajj = aj + adiag[i];
531: v1 = av + adiag[i];
532: for (j=0; j<rnz; j++) {
533: val[nz++] = v1[j];
534: }
535: }
536: }
537: return(0);
538: }
542: PetscErrorCode MatConvertToTriples_mpisbaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
543: {
544: const PetscInt *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
545: PetscErrorCode ierr;
546: PetscInt rstart,nz,i,j,jj,irow,countA,countB;
547: PetscInt *row,*col;
548: const PetscScalar *av, *bv,*v1,*v2;
549: PetscScalar *val;
550: Mat_MPISBAIJ *mat = (Mat_MPISBAIJ*)A->data;
551: Mat_SeqSBAIJ *aa = (Mat_SeqSBAIJ*)(mat->A)->data;
552: Mat_SeqBAIJ *bb = (Mat_SeqBAIJ*)(mat->B)->data;
555: ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart;
556: av=aa->a; bv=bb->a;
558: garray = mat->garray;
560: if (reuse == MAT_INITIAL_MATRIX) {
561: nz = aa->nz + bb->nz;
562: *nnz = nz;
563: PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);
564: col = row + nz;
565: val = (PetscScalar*)(col + nz);
567: *r = row; *c = col; *v = val;
568: } else {
569: row = *r; col = *c; val = *v;
570: }
572: jj = 0; irow = rstart;
573: for (i=0; i<m; i++) {
574: ajj = aj + ai[i]; /* ptr to the beginning of this row */
575: countA = ai[i+1] - ai[i];
576: countB = bi[i+1] - bi[i];
577: bjj = bj + bi[i];
578: v1 = av + ai[i];
579: v2 = bv + bi[i];
581: /* A-part */
582: for (j=0; j<countA; j++) {
583: if (reuse == MAT_INITIAL_MATRIX) {
584: row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
585: }
586: val[jj++] = v1[j];
587: }
589: /* B-part */
590: for (j=0; j < countB; j++) {
591: if (reuse == MAT_INITIAL_MATRIX) {
592: row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
593: }
594: val[jj++] = v2[j];
595: }
596: irow++;
597: }
598: return(0);
599: }
603: PetscErrorCode MatConvertToTriples_mpiaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
604: {
605: const PetscInt *ai, *aj, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
606: PetscErrorCode ierr;
607: PetscInt rstart,nz,i,j,jj,irow,countA,countB;
608: PetscInt *row,*col;
609: const PetscScalar *av, *bv,*v1,*v2;
610: PetscScalar *val;
611: Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data;
612: Mat_SeqAIJ *aa = (Mat_SeqAIJ*)(mat->A)->data;
613: Mat_SeqAIJ *bb = (Mat_SeqAIJ*)(mat->B)->data;
616: ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart;
617: av=aa->a; bv=bb->a;
619: garray = mat->garray;
621: if (reuse == MAT_INITIAL_MATRIX) {
622: nz = aa->nz + bb->nz;
623: *nnz = nz;
624: PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);
625: col = row + nz;
626: val = (PetscScalar*)(col + nz);
628: *r = row; *c = col; *v = val;
629: } else {
630: row = *r; col = *c; val = *v;
631: }
633: jj = 0; irow = rstart;
634: for (i=0; i<m; i++) {
635: ajj = aj + ai[i]; /* ptr to the beginning of this row */
636: countA = ai[i+1] - ai[i];
637: countB = bi[i+1] - bi[i];
638: bjj = bj + bi[i];
639: v1 = av + ai[i];
640: v2 = bv + bi[i];
642: /* A-part */
643: for (j=0; j<countA; j++) {
644: if (reuse == MAT_INITIAL_MATRIX) {
645: row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
646: }
647: val[jj++] = v1[j];
648: }
650: /* B-part */
651: for (j=0; j < countB; j++) {
652: if (reuse == MAT_INITIAL_MATRIX) {
653: row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
654: }
655: val[jj++] = v2[j];
656: }
657: irow++;
658: }
659: return(0);
660: }
664: PetscErrorCode MatConvertToTriples_mpibaij_mpiaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
665: {
666: Mat_MPIBAIJ *mat = (Mat_MPIBAIJ*)A->data;
667: Mat_SeqBAIJ *aa = (Mat_SeqBAIJ*)(mat->A)->data;
668: Mat_SeqBAIJ *bb = (Mat_SeqBAIJ*)(mat->B)->data;
669: const PetscInt *ai = aa->i, *bi = bb->i, *aj = aa->j, *bj = bb->j,*ajj, *bjj;
670: const PetscInt *garray = mat->garray,mbs=mat->mbs,rstart=A->rmap->rstart;
671: const PetscInt bs2=mat->bs2;
672: PetscErrorCode ierr;
673: PetscInt bs,nz,i,j,k,n,jj,irow,countA,countB,idx;
674: PetscInt *row,*col;
675: const PetscScalar *av=aa->a, *bv=bb->a,*v1,*v2;
676: PetscScalar *val;
679: MatGetBlockSize(A,&bs);
680: if (reuse == MAT_INITIAL_MATRIX) {
681: nz = bs2*(aa->nz + bb->nz);
682: *nnz = nz;
683: PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);
684: col = row + nz;
685: val = (PetscScalar*)(col + nz);
687: *r = row; *c = col; *v = val;
688: } else {
689: row = *r; col = *c; val = *v;
690: }
692: jj = 0; irow = rstart;
693: for (i=0; i<mbs; i++) {
694: countA = ai[i+1] - ai[i];
695: countB = bi[i+1] - bi[i];
696: ajj = aj + ai[i];
697: bjj = bj + bi[i];
698: v1 = av + bs2*ai[i];
699: v2 = bv + bs2*bi[i];
701: idx = 0;
702: /* A-part */
703: for (k=0; k<countA; k++) {
704: for (j=0; j<bs; j++) {
705: for (n=0; n<bs; n++) {
706: if (reuse == MAT_INITIAL_MATRIX) {
707: row[jj] = irow + n + shift;
708: col[jj] = rstart + bs*ajj[k] + j + shift;
709: }
710: val[jj++] = v1[idx++];
711: }
712: }
713: }
715: idx = 0;
716: /* B-part */
717: for (k=0; k<countB; k++) {
718: for (j=0; j<bs; j++) {
719: for (n=0; n<bs; n++) {
720: if (reuse == MAT_INITIAL_MATRIX) {
721: row[jj] = irow + n + shift;
722: col[jj] = bs*garray[bjj[k]] + j + shift;
723: }
724: val[jj++] = v2[idx++];
725: }
726: }
727: }
728: irow += bs;
729: }
730: return(0);
731: }
735: PetscErrorCode MatConvertToTriples_mpiaij_mpisbaij(Mat A,int shift,MatReuse reuse,int *nnz,int **r, int **c, PetscScalar **v)
736: {
737: const PetscInt *ai, *aj,*adiag, *bi, *bj,*garray,m=A->rmap->n,*ajj,*bjj;
738: PetscErrorCode ierr;
739: PetscInt rstart,nz,nza,nzb,i,j,jj,irow,countA,countB;
740: PetscInt *row,*col;
741: const PetscScalar *av, *bv,*v1,*v2;
742: PetscScalar *val;
743: Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data;
744: Mat_SeqAIJ *aa =(Mat_SeqAIJ*)(mat->A)->data;
745: Mat_SeqAIJ *bb =(Mat_SeqAIJ*)(mat->B)->data;
748: ai=aa->i; aj=aa->j; adiag=aa->diag;
749: bi=bb->i; bj=bb->j; garray = mat->garray;
750: av=aa->a; bv=bb->a;
752: rstart = A->rmap->rstart;
754: if (reuse == MAT_INITIAL_MATRIX) {
755: nza = 0; /* num of upper triangular entries in mat->A, including diagonals */
756: nzb = 0; /* num of upper triangular entries in mat->B */
757: for (i=0; i<m; i++) {
758: nza += (ai[i+1] - adiag[i]);
759: countB = bi[i+1] - bi[i];
760: bjj = bj + bi[i];
761: for (j=0; j<countB; j++) {
762: if (garray[bjj[j]] > rstart) nzb++;
763: }
764: }
766: nz = nza + nzb; /* total nz of upper triangular part of mat */
767: *nnz = nz;
768: PetscMalloc((2*nz*sizeof(PetscInt)+nz*sizeof(PetscScalar)), &row);
769: col = row + nz;
770: val = (PetscScalar*)(col + nz);
772: *r = row; *c = col; *v = val;
773: } else {
774: row = *r; col = *c; val = *v;
775: }
777: jj = 0; irow = rstart;
778: for (i=0; i<m; i++) {
779: ajj = aj + adiag[i]; /* ptr to the beginning of the diagonal of this row */
780: v1 = av + adiag[i];
781: countA = ai[i+1] - adiag[i];
782: countB = bi[i+1] - bi[i];
783: bjj = bj + bi[i];
784: v2 = bv + bi[i];
786: /* A-part */
787: for (j=0; j<countA; j++) {
788: if (reuse == MAT_INITIAL_MATRIX) {
789: row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
790: }
791: val[jj++] = v1[j];
792: }
794: /* B-part */
795: for (j=0; j < countB; j++) {
796: if (garray[bjj[j]] > rstart) {
797: if (reuse == MAT_INITIAL_MATRIX) {
798: row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
799: }
800: val[jj++] = v2[j];
801: }
802: }
803: irow++;
804: }
805: return(0);
806: }
810: PetscErrorCode MatGetDiagonal_MUMPS(Mat A,Vec v)
811: {
813: SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Mat type: MUMPS factor");
814: return(0);
815: }
819: PetscErrorCode MatDestroy_MUMPS(Mat A)
820: {
821: Mat_MUMPS *mumps=(Mat_MUMPS*)A->spptr;
825: PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);
826: VecScatterDestroy(&mumps->scat_rhs);
827: VecScatterDestroy(&mumps->scat_sol);
828: VecDestroy(&mumps->b_seq);
829: VecDestroy(&mumps->x_seq);
830: PetscFree(mumps->id.perm_in);
831: PetscFree(mumps->irn);
832: PetscFree(mumps->info);
833: MatMumpsResetSchur_Private(mumps);
834: mumps->id.job = JOB_END;
835: PetscMUMPS_c(&mumps->id);
836: MPI_Comm_free(&mumps->comm_mumps);
837: if (mumps->Destroy) {
838: (mumps->Destroy)(A);
839: }
840: PetscFree(A->spptr);
842: /* clear composed functions */
843: PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverPackage_C",NULL);
844: PetscObjectComposeFunction((PetscObject)A,"MatFactorSetSchurIS_C",NULL);
845: PetscObjectComposeFunction((PetscObject)A,"MatFactorInvertSchurComplement_C",NULL);
846: PetscObjectComposeFunction((PetscObject)A,"MatFactorCreateSchurComplement_C",NULL);
847: PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSchurComplement_C",NULL);
848: PetscObjectComposeFunction((PetscObject)A,"MatFactorSolveSchurComplement_C",NULL);
849: PetscObjectComposeFunction((PetscObject)A,"MatFactorSolveSchurComplementTranspose_C",NULL);
850: PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetIcntl_C",NULL);
851: PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetIcntl_C",NULL);
852: PetscObjectComposeFunction((PetscObject)A,"MatMumpsSetCntl_C",NULL);
853: PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetCntl_C",NULL);
854: PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfo_C",NULL);
855: PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetInfog_C",NULL);
856: PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfo_C",NULL);
857: PetscObjectComposeFunction((PetscObject)A,"MatMumpsGetRinfog_C",NULL);
858: return(0);
859: }
863: PetscErrorCode MatSolve_MUMPS(Mat A,Vec b,Vec x)
864: {
865: Mat_MUMPS *mumps=(Mat_MUMPS*)A->spptr;
866: PetscScalar *array;
867: Vec b_seq;
868: IS is_iden,is_petsc;
869: PetscErrorCode ierr;
870: PetscInt i;
871: PetscBool second_solve = PETSC_FALSE;
872: static PetscBool cite1 = PETSC_FALSE,cite2 = PETSC_FALSE;
875: PetscCitationsRegister("@article{MUMPS01,\n author = {P.~R. Amestoy and I.~S. Duff and J.-Y. L'Excellent and J. Koster},\n title = {A fully asynchronous multifrontal solver using distributed dynamic scheduling},\n journal = {SIAM Journal on Matrix Analysis and Applications},\n volume = {23},\n number = {1},\n pages = {15--41},\n year = {2001}\n}\n",&cite1);
876: PetscCitationsRegister("@article{MUMPS02,\n author = {P.~R. Amestoy and A. Guermouche and J.-Y. L'Excellent and S. Pralet},\n title = {Hybrid scheduling for the parallel solution of linear systems},\n journal = {Parallel Computing},\n volume = {32},\n number = {2},\n pages = {136--156},\n year = {2006}\n}\n",&cite2);
878: if (A->errortype) {
879: PetscInfo2(A,"MatSolve is called with singular matrix factor, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));
880: VecSetInf(x);
881: return(0);
882: }
884: mumps->id.nrhs = 1;
885: b_seq = mumps->b_seq;
886: if (mumps->size > 1) {
887: /* MUMPS only supports centralized rhs. Scatter b into a seqential rhs vector */
888: VecScatterBegin(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);
889: VecScatterEnd(mumps->scat_rhs,b,b_seq,INSERT_VALUES,SCATTER_FORWARD);
890: if (!mumps->myid) {VecGetArray(b_seq,&array);}
891: } else { /* size == 1 */
892: VecCopy(b,x);
893: VecGetArray(x,&array);
894: }
895: if (!mumps->myid) { /* define rhs on the host */
896: mumps->id.nrhs = 1;
897: mumps->id.rhs = (MumpsScalar*)array;
898: }
900: /*
901: handle condensation step of Schur complement (if any)
902: We set by default ICNTL(26) == -1 when Schur indices have been provided by the user.
903: According to MUMPS (5.0.0) manual, any value should be harmful during the factorization phase
904: Unless the user provides a valid value for ICNTL(26), MatSolve and MatMatSolve routines solve the full system.
905: This requires an extra call to PetscMUMPS_c and the computation of the factors for S
906: */
907: if (mumps->id.ICNTL(26) < 0 || mumps->id.ICNTL(26) > 2) {
908: second_solve = PETSC_TRUE;
909: MatMumpsHandleSchur_Private(mumps,PETSC_FALSE);
910: }
911: /* solve phase */
912: /*-------------*/
913: mumps->id.job = JOB_SOLVE;
914: PetscMUMPS_c(&mumps->id);
915: if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
917: /* handle expansion step of Schur complement (if any) */
918: if (second_solve) {
919: MatMumpsHandleSchur_Private(mumps,PETSC_TRUE);
920: }
922: if (mumps->size > 1) { /* convert mumps distributed solution to petsc mpi x */
923: if (mumps->scat_sol && mumps->ICNTL9_pre != mumps->id.ICNTL(9)) {
924: /* when id.ICNTL(9) changes, the contents of lsol_loc may change (not its size, lsol_loc), recreates scat_sol */
925: VecScatterDestroy(&mumps->scat_sol);
926: }
927: if (!mumps->scat_sol) { /* create scatter scat_sol */
928: ISCreateStride(PETSC_COMM_SELF,mumps->id.lsol_loc,0,1,&is_iden); /* from */
929: for (i=0; i<mumps->id.lsol_loc; i++) {
930: mumps->id.isol_loc[i] -= 1; /* change Fortran style to C style */
931: }
932: ISCreateGeneral(PETSC_COMM_SELF,mumps->id.lsol_loc,mumps->id.isol_loc,PETSC_COPY_VALUES,&is_petsc); /* to */
933: VecScatterCreate(mumps->x_seq,is_iden,x,is_petsc,&mumps->scat_sol);
934: ISDestroy(&is_iden);
935: ISDestroy(&is_petsc);
937: mumps->ICNTL9_pre = mumps->id.ICNTL(9); /* save current value of id.ICNTL(9) */
938: }
940: VecScatterBegin(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);
941: VecScatterEnd(mumps->scat_sol,mumps->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);
942: }
943: return(0);
944: }
948: PetscErrorCode MatSolveTranspose_MUMPS(Mat A,Vec b,Vec x)
949: {
950: Mat_MUMPS *mumps=(Mat_MUMPS*)A->spptr;
954: mumps->id.ICNTL(9) = 0;
955: MatSolve_MUMPS(A,b,x);
956: mumps->id.ICNTL(9) = 1;
957: return(0);
958: }
962: PetscErrorCode MatMatSolve_MUMPS(Mat A,Mat B,Mat X)
963: {
965: PetscBool flg;
966: Mat_MUMPS *mumps=(Mat_MUMPS*)A->spptr;
967: PetscInt i,nrhs,M;
968: PetscScalar *array,*bray;
971: PetscObjectTypeCompareAny((PetscObject)B,&flg,MATSEQDENSE,MATMPIDENSE,NULL);
972: if (!flg) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix");
973: PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);
974: if (!flg) SETERRQ(PetscObjectComm((PetscObject)X),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix");
975: if (B->rmap->n != X->rmap->n) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONG,"Matrix B and X must have same row distribution");
977: MatGetSize(B,&M,&nrhs);
978: mumps->id.nrhs = nrhs;
979: mumps->id.lrhs = M;
981: if (mumps->size == 1) {
982: /* copy B to X */
983: MatDenseGetArray(B,&bray);
984: MatDenseGetArray(X,&array);
985: PetscMemcpy(array,bray,M*nrhs*sizeof(PetscScalar));
986: MatDenseRestoreArray(B,&bray);
987: mumps->id.rhs = (MumpsScalar*)array;
988: /* handle condensation step of Schur complement (if any) */
989: MatMumpsHandleSchur_Private(mumps,PETSC_FALSE);
991: /* solve phase */
992: /*-------------*/
993: mumps->id.job = JOB_SOLVE;
994: PetscMUMPS_c(&mumps->id);
995: if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
997: /* handle expansion step of Schur complement (if any) */
998: MatMumpsHandleSchur_Private(mumps,PETSC_TRUE);
999: MatDenseRestoreArray(X,&array);
1000: } else { /*--------- parallel case --------*/
1001: PetscInt lsol_loc,nlsol_loc,*isol_loc,*idx,*iidx,*idxx,*isol_loc_save;
1002: MumpsScalar *sol_loc,*sol_loc_save;
1003: IS is_to,is_from;
1004: PetscInt k,proc,j,m;
1005: const PetscInt *rstart;
1006: Vec v_mpi,b_seq,x_seq;
1007: VecScatter scat_rhs,scat_sol;
1009: /* create x_seq to hold local solution */
1010: isol_loc_save = mumps->id.isol_loc; /* save it for MatSovle() */
1011: sol_loc_save = mumps->id.sol_loc;
1013: lsol_loc = mumps->id.INFO(23);
1014: nlsol_loc = nrhs*lsol_loc; /* length of sol_loc */
1015: PetscMalloc2(nlsol_loc,&sol_loc,nlsol_loc,&isol_loc);
1016: mumps->id.sol_loc = (MumpsScalar*)sol_loc;
1017: mumps->id.isol_loc = isol_loc;
1019: VecCreateSeqWithArray(PETSC_COMM_SELF,1,nlsol_loc,(PetscScalar*)sol_loc,&x_seq);
1021: /* copy rhs matrix B into vector v_mpi */
1022: MatGetLocalSize(B,&m,NULL);
1023: MatDenseGetArray(B,&bray);
1024: VecCreateMPIWithArray(PetscObjectComm((PetscObject)B),1,nrhs*m,nrhs*M,(const PetscScalar*)bray,&v_mpi);
1025: MatDenseRestoreArray(B,&bray);
1027: /* scatter v_mpi to b_seq because MUMPS only supports centralized rhs */
1028: /* idx: maps from k-th index of v_mpi to (i,j)-th global entry of B;
1029: iidx: inverse of idx, will be used by scattering xx_seq -> X */
1030: PetscMalloc2(nrhs*M,&idx,nrhs*M,&iidx);
1031: MatGetOwnershipRanges(B,&rstart);
1032: k = 0;
1033: for (proc=0; proc<mumps->size; proc++){
1034: for (j=0; j<nrhs; j++){
1035: for (i=rstart[proc]; i<rstart[proc+1]; i++){
1036: iidx[j*M + i] = k;
1037: idx[k++] = j*M + i;
1038: }
1039: }
1040: }
1042: if (!mumps->myid) {
1043: VecCreateSeq(PETSC_COMM_SELF,nrhs*M,&b_seq);
1044: ISCreateGeneral(PETSC_COMM_SELF,nrhs*M,idx,PETSC_COPY_VALUES,&is_to);
1045: ISCreateStride(PETSC_COMM_SELF,nrhs*M,0,1,&is_from);
1046: } else {
1047: VecCreateSeq(PETSC_COMM_SELF,0,&b_seq);
1048: ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_to);
1049: ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_from);
1050: }
1051: VecScatterCreate(v_mpi,is_from,b_seq,is_to,&scat_rhs);
1052: VecScatterBegin(scat_rhs,v_mpi,b_seq,INSERT_VALUES,SCATTER_FORWARD);
1053: ISDestroy(&is_to);
1054: ISDestroy(&is_from);
1055: VecScatterEnd(scat_rhs,v_mpi,b_seq,INSERT_VALUES,SCATTER_FORWARD);
1057: if (!mumps->myid) { /* define rhs on the host */
1058: VecGetArray(b_seq,&bray);
1059: mumps->id.rhs = (MumpsScalar*)bray;
1060: VecRestoreArray(b_seq,&bray);
1061: }
1063: /* solve phase */
1064: /*-------------*/
1065: mumps->id.job = JOB_SOLVE;
1066: PetscMUMPS_c(&mumps->id);
1067: if (mumps->id.INFOG(1) < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
1069: /* scatter mumps distributed solution to petsc vector v_mpi, which shares local arrays with solution matrix X */
1070: MatDenseGetArray(X,&array);
1071: VecPlaceArray(v_mpi,array);
1072:
1073: /* create scatter scat_sol */
1074: PetscMalloc1(nlsol_loc,&idxx);
1075: ISCreateStride(PETSC_COMM_SELF,nlsol_loc,0,1,&is_from);
1076: for (i=0; i<lsol_loc; i++) {
1077: isol_loc[i] -= 1; /* change Fortran style to C style */
1078: idxx[i] = iidx[isol_loc[i]];
1079: for (j=1; j<nrhs; j++){
1080: idxx[j*lsol_loc+i] = iidx[isol_loc[i]+j*M];
1081: }
1082: }
1083: ISCreateGeneral(PETSC_COMM_SELF,nlsol_loc,idxx,PETSC_COPY_VALUES,&is_to);
1084: VecScatterCreate(x_seq,is_from,v_mpi,is_to,&scat_sol);
1085: VecScatterBegin(scat_sol,x_seq,v_mpi,INSERT_VALUES,SCATTER_FORWARD);
1086: ISDestroy(&is_from);
1087: ISDestroy(&is_to);
1088: VecScatterEnd(scat_sol,x_seq,v_mpi,INSERT_VALUES,SCATTER_FORWARD);
1089: MatDenseRestoreArray(X,&array);
1091: /* free spaces */
1092: mumps->id.sol_loc = sol_loc_save;
1093: mumps->id.isol_loc = isol_loc_save;
1095: PetscFree2(sol_loc,isol_loc);
1096: PetscFree2(idx,iidx);
1097: PetscFree(idxx);
1098: VecDestroy(&x_seq);
1099: VecDestroy(&v_mpi);
1100: VecDestroy(&b_seq);
1101: VecScatterDestroy(&scat_rhs);
1102: VecScatterDestroy(&scat_sol);
1103: }
1104: return(0);
1105: }
1107: #if !defined(PETSC_USE_COMPLEX)
1108: /*
1109: input:
1110: F: numeric factor
1111: output:
1112: nneg: total number of negative pivots
1113: nzero: 0
1114: npos: (global dimension of F) - nneg
1115: */
1119: PetscErrorCode MatGetInertia_SBAIJMUMPS(Mat F,int *nneg,int *nzero,int *npos)
1120: {
1121: Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1123: PetscMPIInt size;
1126: MPI_Comm_size(PetscObjectComm((PetscObject)F),&size);
1127: /* MUMPS 4.3.1 calls ScaLAPACK when ICNTL(13)=0 (default), which does not offer the possibility to compute the inertia of a dense matrix. Set ICNTL(13)=1 to skip ScaLAPACK */
1128: if (size > 1 && mumps->id.ICNTL(13) != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"ICNTL(13)=%d. -mat_mumps_icntl_13 must be set as 1 for correct global matrix inertia\n",mumps->id.INFOG(13));
1130: if (nneg) *nneg = mumps->id.INFOG(12);
1131: if (nzero || npos) {
1132: if (mumps->id.ICNTL(24) != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"-mat_mumps_icntl_24 must be set as 1 for null pivot row detection");
1133: if (nzero) *nzero = mumps->id.INFOG(28);
1134: if (npos) *npos = F->rmap->N - (mumps->id.INFOG(12) + mumps->id.INFOG(28));
1135: }
1136: return(0);
1137: }
1138: #endif /* !defined(PETSC_USE_COMPLEX) */
1142: PetscErrorCode MatFactorNumeric_MUMPS(Mat F,Mat A,const MatFactorInfo *info)
1143: {
1144: Mat_MUMPS *mumps =(Mat_MUMPS*)(F)->spptr;
1146: Mat F_diag;
1147: PetscBool isMPIAIJ;
1150: if (mumps->id.INFOG(1) < 0) {
1151: if (mumps->id.INFOG(1) == -6) {
1152: PetscInfo2(A,"MatFactorNumeric is called with singular matrix structure, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));
1153: }
1154: PetscInfo2(A,"MatFactorNumeric is called after analysis phase fails, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));
1155: return(0);
1156: }
1158: (*mumps->ConvertToTriples)(A, 1, MAT_REUSE_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);
1160: /* numerical factorization phase */
1161: /*-------------------------------*/
1162: mumps->id.job = JOB_FACTNUMERIC;
1163: if (!mumps->id.ICNTL(18)) { /* A is centralized */
1164: if (!mumps->myid) {
1165: mumps->id.a = (MumpsScalar*)mumps->val;
1166: }
1167: } else {
1168: mumps->id.a_loc = (MumpsScalar*)mumps->val;
1169: }
1170: PetscMUMPS_c(&mumps->id);
1171: if (mumps->id.INFOG(1) < 0) {
1172: if (A->erroriffailure) {
1173: SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));
1174: } else {
1175: if (mumps->id.INFOG(1) == -10) { /* numerically singular matrix */
1176: PetscInfo2(F,"matrix is numerically singular, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));
1177: F->errortype = MAT_FACTOR_NUMERIC_ZEROPIVOT;
1178: } else if (mumps->id.INFOG(1) == -13) {
1179: PetscInfo2(F,"MUMPS in numerical factorization phase: INFOG(1)=%d, cannot allocate required memory %d megabytes\n",mumps->id.INFOG(1),mumps->id.INFO(2));
1180: F->errortype = MAT_FACTOR_OUTMEMORY;
1181: } else if (mumps->id.INFOG(1) == -8 || mumps->id.INFOG(1) == -9 || (-16 < mumps->id.INFOG(1) && mumps->id.INFOG(1) < -10) ) {
1182: PetscInfo2(F,"MUMPS in numerical factorization phase: INFOG(1)=%d, INFO(2)=%d, problem with workarray \n",mumps->id.INFOG(1),mumps->id.INFO(2));
1183: F->errortype = MAT_FACTOR_OUTMEMORY;
1184: } else {
1185: PetscInfo2(F,"MUMPS in numerical factorization phase: INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));
1186: F->errortype = MAT_FACTOR_OTHER;
1187: }
1188: }
1189: }
1190: if (!mumps->myid && mumps->id.ICNTL(16) > 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB," mumps->id.ICNTL(16):=%d\n",mumps->id.INFOG(16));
1192: (F)->assembled = PETSC_TRUE;
1193: mumps->matstruc = SAME_NONZERO_PATTERN;
1194: mumps->schur_factored = PETSC_FALSE;
1195: mumps->schur_inverted = PETSC_FALSE;
1197: /* just to be sure that ICNTL(19) value returned by a call from MatMumpsGetIcntl is always consistent */
1198: if (!mumps->sym && mumps->id.ICNTL(19) && mumps->id.ICNTL(19) != 1) mumps->id.ICNTL(19) = 3;
1200: if (mumps->size > 1) {
1201: PetscInt lsol_loc;
1202: PetscScalar *sol_loc;
1204: PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);
1205: if (isMPIAIJ) F_diag = ((Mat_MPIAIJ*)(F)->data)->A;
1206: else F_diag = ((Mat_MPISBAIJ*)(F)->data)->A;
1207: F_diag->assembled = PETSC_TRUE;
1209: /* distributed solution; Create x_seq=sol_loc for repeated use */
1210: if (mumps->x_seq) {
1211: VecScatterDestroy(&mumps->scat_sol);
1212: PetscFree2(mumps->id.sol_loc,mumps->id.isol_loc);
1213: VecDestroy(&mumps->x_seq);
1214: }
1215: lsol_loc = mumps->id.INFO(23); /* length of sol_loc */
1216: PetscMalloc2(lsol_loc,&sol_loc,lsol_loc,&mumps->id.isol_loc);
1217: mumps->id.lsol_loc = lsol_loc;
1218: mumps->id.sol_loc = (MumpsScalar*)sol_loc;
1219: VecCreateSeqWithArray(PETSC_COMM_SELF,1,lsol_loc,sol_loc,&mumps->x_seq);
1220: }
1221: return(0);
1222: }
1224: /* Sets MUMPS options from the options database */
1227: PetscErrorCode PetscSetMUMPSFromOptions(Mat F, Mat A)
1228: {
1229: Mat_MUMPS *mumps = (Mat_MUMPS*)F->spptr;
1231: PetscInt icntl,info[40],i,ninfo=40;
1232: PetscBool flg;
1235: PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MUMPS Options","Mat");
1236: PetscOptionsInt("-mat_mumps_icntl_1","ICNTL(1): output stream for error messages","None",mumps->id.ICNTL(1),&icntl,&flg);
1237: if (flg) mumps->id.ICNTL(1) = icntl;
1238: PetscOptionsInt("-mat_mumps_icntl_2","ICNTL(2): output stream for diagnostic printing, statistics, and warning","None",mumps->id.ICNTL(2),&icntl,&flg);
1239: if (flg) mumps->id.ICNTL(2) = icntl;
1240: PetscOptionsInt("-mat_mumps_icntl_3","ICNTL(3): output stream for global information, collected on the host","None",mumps->id.ICNTL(3),&icntl,&flg);
1241: if (flg) mumps->id.ICNTL(3) = icntl;
1243: PetscOptionsInt("-mat_mumps_icntl_4","ICNTL(4): level of printing (0 to 4)","None",mumps->id.ICNTL(4),&icntl,&flg);
1244: if (flg) mumps->id.ICNTL(4) = icntl;
1245: if (mumps->id.ICNTL(4) || PetscLogPrintInfo) mumps->id.ICNTL(3) = 6; /* resume MUMPS default id.ICNTL(3) = 6 */
1247: PetscOptionsInt("-mat_mumps_icntl_6","ICNTL(6): permutes to a zero-free diagonal and/or scale the matrix (0 to 7)","None",mumps->id.ICNTL(6),&icntl,&flg);
1248: if (flg) mumps->id.ICNTL(6) = icntl;
1250: PetscOptionsInt("-mat_mumps_icntl_7","ICNTL(7): computes a symmetric permutation in sequential analysis (0 to 7). 3=Scotch, 4=PORD, 5=Metis","None",mumps->id.ICNTL(7),&icntl,&flg);
1251: if (flg) {
1252: if (icntl== 1 && mumps->size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"pivot order be set by the user in PERM_IN -- not supported by the PETSc/MUMPS interface\n");
1253: else mumps->id.ICNTL(7) = icntl;
1254: }
1256: PetscOptionsInt("-mat_mumps_icntl_8","ICNTL(8): scaling strategy (-2 to 8 or 77)","None",mumps->id.ICNTL(8),&mumps->id.ICNTL(8),NULL);
1257: /* PetscOptionsInt("-mat_mumps_icntl_9","ICNTL(9): computes the solution using A or A^T","None",mumps->id.ICNTL(9),&mumps->id.ICNTL(9),NULL); handled by MatSolveTranspose_MUMPS() */
1258: PetscOptionsInt("-mat_mumps_icntl_10","ICNTL(10): max num of refinements","None",mumps->id.ICNTL(10),&mumps->id.ICNTL(10),NULL);
1259: PetscOptionsInt("-mat_mumps_icntl_11","ICNTL(11): statistics related to an error analysis (via -ksp_view)","None",mumps->id.ICNTL(11),&mumps->id.ICNTL(11),NULL);
1260: PetscOptionsInt("-mat_mumps_icntl_12","ICNTL(12): an ordering strategy for symmetric matrices (0 to 3)","None",mumps->id.ICNTL(12),&mumps->id.ICNTL(12),NULL);
1261: PetscOptionsInt("-mat_mumps_icntl_13","ICNTL(13): parallelism of the root node (enable ScaLAPACK) and its splitting","None",mumps->id.ICNTL(13),&mumps->id.ICNTL(13),NULL);
1262: PetscOptionsInt("-mat_mumps_icntl_14","ICNTL(14): percentage increase in the estimated working space","None",mumps->id.ICNTL(14),&mumps->id.ICNTL(14),NULL);
1263: PetscOptionsInt("-mat_mumps_icntl_19","ICNTL(19): computes the Schur complement","None",mumps->id.ICNTL(19),&mumps->id.ICNTL(19),NULL);
1264: if (mumps->id.ICNTL(19) <= 0 || mumps->id.ICNTL(19) > 3) { /* reset any schur data (if any) */
1265: MatMumpsResetSchur_Private(mumps);
1266: }
1267: /* PetscOptionsInt("-mat_mumps_icntl_20","ICNTL(20): the format (dense or sparse) of the right-hand sides","None",mumps->id.ICNTL(20),&mumps->id.ICNTL(20),NULL); -- sparse rhs is not supported in PETSc API */
1268: /* PetscOptionsInt("-mat_mumps_icntl_21","ICNTL(21): the distribution (centralized or distributed) of the solution vectors","None",mumps->id.ICNTL(21),&mumps->id.ICNTL(21),NULL); we only use distributed solution vector */
1270: PetscOptionsInt("-mat_mumps_icntl_22","ICNTL(22): in-core/out-of-core factorization and solve (0 or 1)","None",mumps->id.ICNTL(22),&mumps->id.ICNTL(22),NULL);
1271: PetscOptionsInt("-mat_mumps_icntl_23","ICNTL(23): max size of the working memory (MB) that can allocate per processor","None",mumps->id.ICNTL(23),&mumps->id.ICNTL(23),NULL);
1272: PetscOptionsInt("-mat_mumps_icntl_24","ICNTL(24): detection of null pivot rows (0 or 1)","None",mumps->id.ICNTL(24),&mumps->id.ICNTL(24),NULL);
1273: if (mumps->id.ICNTL(24)) {
1274: mumps->id.ICNTL(13) = 1; /* turn-off ScaLAPACK to help with the correct detection of null pivots */
1275: }
1277: PetscOptionsInt("-mat_mumps_icntl_25","ICNTL(25): compute a solution of a deficient matrix and a null space basis","None",mumps->id.ICNTL(25),&mumps->id.ICNTL(25),NULL);
1278: PetscOptionsInt("-mat_mumps_icntl_26","ICNTL(26): drives the solution phase if a Schur complement matrix","None",mumps->id.ICNTL(26),&mumps->id.ICNTL(26),NULL);
1279: PetscOptionsInt("-mat_mumps_icntl_27","ICNTL(27): the blocking size for multiple right-hand sides","None",mumps->id.ICNTL(27),&mumps->id.ICNTL(27),NULL);
1280: PetscOptionsInt("-mat_mumps_icntl_28","ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering","None",mumps->id.ICNTL(28),&mumps->id.ICNTL(28),NULL);
1281: PetscOptionsInt("-mat_mumps_icntl_29","ICNTL(29): parallel ordering 1 = ptscotch, 2 = parmetis","None",mumps->id.ICNTL(29),&mumps->id.ICNTL(29),NULL);
1282: PetscOptionsInt("-mat_mumps_icntl_30","ICNTL(30): compute user-specified set of entries in inv(A)","None",mumps->id.ICNTL(30),&mumps->id.ICNTL(30),NULL);
1283: PetscOptionsInt("-mat_mumps_icntl_31","ICNTL(31): indicates which factors may be discarded during factorization","None",mumps->id.ICNTL(31),&mumps->id.ICNTL(31),NULL);
1284: /* PetscOptionsInt("-mat_mumps_icntl_32","ICNTL(32): performs the forward elemination of the right-hand sides during factorization","None",mumps->id.ICNTL(32),&mumps->id.ICNTL(32),NULL); -- not supported by PETSc API */
1285: PetscOptionsInt("-mat_mumps_icntl_33","ICNTL(33): compute determinant","None",mumps->id.ICNTL(33),&mumps->id.ICNTL(33),NULL);
1287: PetscOptionsReal("-mat_mumps_cntl_1","CNTL(1): relative pivoting threshold","None",mumps->id.CNTL(1),&mumps->id.CNTL(1),NULL);
1288: PetscOptionsReal("-mat_mumps_cntl_2","CNTL(2): stopping criterion of refinement","None",mumps->id.CNTL(2),&mumps->id.CNTL(2),NULL);
1289: PetscOptionsReal("-mat_mumps_cntl_3","CNTL(3): absolute pivoting threshold","None",mumps->id.CNTL(3),&mumps->id.CNTL(3),NULL);
1290: PetscOptionsReal("-mat_mumps_cntl_4","CNTL(4): value for static pivoting","None",mumps->id.CNTL(4),&mumps->id.CNTL(4),NULL);
1291: PetscOptionsReal("-mat_mumps_cntl_5","CNTL(5): fixation for null pivots","None",mumps->id.CNTL(5),&mumps->id.CNTL(5),NULL);
1293: PetscOptionsString("-mat_mumps_ooc_tmpdir", "out of core directory", "None", mumps->id.ooc_tmpdir, mumps->id.ooc_tmpdir, 256, NULL);
1295: PetscOptionsIntArray("-mat_mumps_view_info","request INFO local to each processor","",info,&ninfo,NULL);
1296: if (ninfo) {
1297: if (ninfo > 40) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"number of INFO %d must <= 40\n",ninfo);
1298: PetscMalloc1(ninfo,&mumps->info);
1299: mumps->ninfo = ninfo;
1300: for (i=0; i<ninfo; i++) {
1301: if (info[i] < 0 || info[i]>40) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"index of INFO %d must between 1 and 40\n",ninfo);
1302: else {
1303: mumps->info[i] = info[i];
1304: }
1305: }
1306: }
1308: PetscOptionsEnd();
1309: return(0);
1310: }
1314: PetscErrorCode PetscInitializeMUMPS(Mat A,Mat_MUMPS *mumps)
1315: {
1319: MPI_Comm_rank(PetscObjectComm((PetscObject)A), &mumps->myid);
1320: MPI_Comm_size(PetscObjectComm((PetscObject)A),&mumps->size);
1321: MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(mumps->comm_mumps));
1323: mumps->id.comm_fortran = MPI_Comm_c2f(mumps->comm_mumps);
1325: mumps->id.job = JOB_INIT;
1326: mumps->id.par = 1; /* host participates factorizaton and solve */
1327: mumps->id.sym = mumps->sym;
1328: PetscMUMPS_c(&mumps->id);
1330: mumps->scat_rhs = NULL;
1331: mumps->scat_sol = NULL;
1333: /* set PETSc-MUMPS default options - override MUMPS default */
1334: mumps->id.ICNTL(3) = 0;
1335: mumps->id.ICNTL(4) = 0;
1336: if (mumps->size == 1) {
1337: mumps->id.ICNTL(18) = 0; /* centralized assembled matrix input */
1338: } else {
1339: mumps->id.ICNTL(18) = 3; /* distributed assembled matrix input */
1340: mumps->id.ICNTL(20) = 0; /* rhs is in dense format */
1341: mumps->id.ICNTL(21) = 1; /* distributed solution */
1342: }
1344: /* schur */
1345: mumps->id.size_schur = 0;
1346: mumps->id.listvar_schur = NULL;
1347: mumps->id.schur = NULL;
1348: mumps->sizeredrhs = 0;
1349: mumps->schur_pivots = NULL;
1350: mumps->schur_work = NULL;
1351: mumps->schur_sol = NULL;
1352: mumps->schur_sizesol = 0;
1353: mumps->schur_factored = PETSC_FALSE;
1354: mumps->schur_inverted = PETSC_FALSE;
1355: return(0);
1356: }
1360: PetscErrorCode MatFactorSymbolic_MUMPS_ReportIfError(Mat F,Mat A,const MatFactorInfo *info,Mat_MUMPS *mumps)
1361: {
1365: if (mumps->id.INFOG(1) < 0) {
1366: if (A->erroriffailure) {
1367: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",mumps->id.INFOG(1));
1368: } else {
1369: if (mumps->id.INFOG(1) == -6) {
1370: PetscInfo2(F,"matrix is singular in structure, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));
1371: F->errortype = MAT_FACTOR_STRUCT_ZEROPIVOT;
1372: } else if (mumps->id.INFOG(1) == -5 || mumps->id.INFOG(1) == -7) {
1373: PetscInfo2(F,"problem of workspace, INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));
1374: F->errortype = MAT_FACTOR_OUTMEMORY;
1375: } else {
1376: PetscInfo2(F,"Error reported by MUMPS in analysis phase: INFOG(1)=%d, INFO(2)=%d\n",mumps->id.INFOG(1),mumps->id.INFO(2));
1377: F->errortype = MAT_FACTOR_OTHER;
1378: }
1379: }
1380: }
1381: return(0);
1382: }
1384: /* Note Petsc r(=c) permutation is used when mumps->id.ICNTL(7)==1 with centralized assembled matrix input; otherwise r and c are ignored */
1387: PetscErrorCode MatLUFactorSymbolic_AIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
1388: {
1389: Mat_MUMPS *mumps = (Mat_MUMPS*)F->spptr;
1391: Vec b;
1392: IS is_iden;
1393: const PetscInt M = A->rmap->N;
1396: mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
1398: /* Set MUMPS options from the options database */
1399: PetscSetMUMPSFromOptions(F,A);
1401: (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);
1403: /* analysis phase */
1404: /*----------------*/
1405: mumps->id.job = JOB_FACTSYMBOLIC;
1406: mumps->id.n = M;
1407: switch (mumps->id.ICNTL(18)) {
1408: case 0: /* centralized assembled matrix input */
1409: if (!mumps->myid) {
1410: mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1411: if (mumps->id.ICNTL(6)>1) {
1412: mumps->id.a = (MumpsScalar*)mumps->val;
1413: }
1414: if (mumps->id.ICNTL(7) == 1) { /* use user-provide matrix ordering - assuming r = c ordering */
1415: /*
1416: PetscBool flag;
1417: ISEqual(r,c,&flag);
1418: if (!flag) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"row_perm != col_perm");
1419: ISView(r,PETSC_VIEWER_STDOUT_SELF);
1420: */
1421: if (!mumps->myid) {
1422: const PetscInt *idx;
1423: PetscInt i,*perm_in;
1425: PetscMalloc1(M,&perm_in);
1426: ISGetIndices(r,&idx);
1428: mumps->id.perm_in = perm_in;
1429: for (i=0; i<M; i++) perm_in[i] = idx[i]+1; /* perm_in[]: start from 1, not 0! */
1430: ISRestoreIndices(r,&idx);
1431: }
1432: }
1433: }
1434: break;
1435: case 3: /* distributed assembled matrix input (size>1) */
1436: mumps->id.nz_loc = mumps->nz;
1437: mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1438: if (mumps->id.ICNTL(6)>1) {
1439: mumps->id.a_loc = (MumpsScalar*)mumps->val;
1440: }
1441: /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1442: if (!mumps->myid) {
1443: VecCreateSeq(PETSC_COMM_SELF,A->rmap->N,&mumps->b_seq);
1444: ISCreateStride(PETSC_COMM_SELF,A->rmap->N,0,1,&is_iden);
1445: } else {
1446: VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);
1447: ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);
1448: }
1449: MatCreateVecs(A,NULL,&b);
1450: VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);
1451: ISDestroy(&is_iden);
1452: VecDestroy(&b);
1453: break;
1454: }
1455: PetscMUMPS_c(&mumps->id);
1456: MatFactorSymbolic_MUMPS_ReportIfError(F,A,info,mumps);
1458: F->ops->lufactornumeric = MatFactorNumeric_MUMPS;
1459: F->ops->solve = MatSolve_MUMPS;
1460: F->ops->solvetranspose = MatSolveTranspose_MUMPS;
1461: F->ops->matsolve = MatMatSolve_MUMPS;
1462: return(0);
1463: }
1465: /* Note the Petsc r and c permutations are ignored */
1468: PetscErrorCode MatLUFactorSymbolic_BAIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
1469: {
1470: Mat_MUMPS *mumps = (Mat_MUMPS*)F->spptr;
1472: Vec b;
1473: IS is_iden;
1474: const PetscInt M = A->rmap->N;
1477: mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
1479: /* Set MUMPS options from the options database */
1480: PetscSetMUMPSFromOptions(F,A);
1482: (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);
1484: /* analysis phase */
1485: /*----------------*/
1486: mumps->id.job = JOB_FACTSYMBOLIC;
1487: mumps->id.n = M;
1488: switch (mumps->id.ICNTL(18)) {
1489: case 0: /* centralized assembled matrix input */
1490: if (!mumps->myid) {
1491: mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1492: if (mumps->id.ICNTL(6)>1) {
1493: mumps->id.a = (MumpsScalar*)mumps->val;
1494: }
1495: }
1496: break;
1497: case 3: /* distributed assembled matrix input (size>1) */
1498: mumps->id.nz_loc = mumps->nz;
1499: mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1500: if (mumps->id.ICNTL(6)>1) {
1501: mumps->id.a_loc = (MumpsScalar*)mumps->val;
1502: }
1503: /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1504: if (!mumps->myid) {
1505: VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);
1506: ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);
1507: } else {
1508: VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);
1509: ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);
1510: }
1511: MatCreateVecs(A,NULL,&b);
1512: VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);
1513: ISDestroy(&is_iden);
1514: VecDestroy(&b);
1515: break;
1516: }
1517: PetscMUMPS_c(&mumps->id);
1518: MatFactorSymbolic_MUMPS_ReportIfError(F,A,info,mumps);
1520: F->ops->lufactornumeric = MatFactorNumeric_MUMPS;
1521: F->ops->solve = MatSolve_MUMPS;
1522: F->ops->solvetranspose = MatSolveTranspose_MUMPS;
1523: return(0);
1524: }
1526: /* Note the Petsc r permutation and factor info are ignored */
1529: PetscErrorCode MatCholeskyFactorSymbolic_MUMPS(Mat F,Mat A,IS r,const MatFactorInfo *info)
1530: {
1531: Mat_MUMPS *mumps = (Mat_MUMPS*)F->spptr;
1533: Vec b;
1534: IS is_iden;
1535: const PetscInt M = A->rmap->N;
1538: mumps->matstruc = DIFFERENT_NONZERO_PATTERN;
1540: /* Set MUMPS options from the options database */
1541: PetscSetMUMPSFromOptions(F,A);
1543: (*mumps->ConvertToTriples)(A, 1, MAT_INITIAL_MATRIX, &mumps->nz, &mumps->irn, &mumps->jcn, &mumps->val);
1545: /* analysis phase */
1546: /*----------------*/
1547: mumps->id.job = JOB_FACTSYMBOLIC;
1548: mumps->id.n = M;
1549: switch (mumps->id.ICNTL(18)) {
1550: case 0: /* centralized assembled matrix input */
1551: if (!mumps->myid) {
1552: mumps->id.nz =mumps->nz; mumps->id.irn=mumps->irn; mumps->id.jcn=mumps->jcn;
1553: if (mumps->id.ICNTL(6)>1) {
1554: mumps->id.a = (MumpsScalar*)mumps->val;
1555: }
1556: }
1557: break;
1558: case 3: /* distributed assembled matrix input (size>1) */
1559: mumps->id.nz_loc = mumps->nz;
1560: mumps->id.irn_loc=mumps->irn; mumps->id.jcn_loc=mumps->jcn;
1561: if (mumps->id.ICNTL(6)>1) {
1562: mumps->id.a_loc = (MumpsScalar*)mumps->val;
1563: }
1564: /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
1565: if (!mumps->myid) {
1566: VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&mumps->b_seq);
1567: ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);
1568: } else {
1569: VecCreateSeq(PETSC_COMM_SELF,0,&mumps->b_seq);
1570: ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);
1571: }
1572: MatCreateVecs(A,NULL,&b);
1573: VecScatterCreate(b,is_iden,mumps->b_seq,is_iden,&mumps->scat_rhs);
1574: ISDestroy(&is_iden);
1575: VecDestroy(&b);
1576: break;
1577: }
1578: PetscMUMPS_c(&mumps->id);
1579: MatFactorSymbolic_MUMPS_ReportIfError(F,A,info,mumps);
1582: F->ops->choleskyfactornumeric = MatFactorNumeric_MUMPS;
1583: F->ops->solve = MatSolve_MUMPS;
1584: F->ops->solvetranspose = MatSolve_MUMPS;
1585: F->ops->matsolve = MatMatSolve_MUMPS;
1586: #if defined(PETSC_USE_COMPLEX)
1587: F->ops->getinertia = NULL;
1588: #else
1589: F->ops->getinertia = MatGetInertia_SBAIJMUMPS;
1590: #endif
1591: return(0);
1592: }
1596: PetscErrorCode MatView_MUMPS(Mat A,PetscViewer viewer)
1597: {
1598: PetscErrorCode ierr;
1599: PetscBool iascii;
1600: PetscViewerFormat format;
1601: Mat_MUMPS *mumps=(Mat_MUMPS*)A->spptr;
1604: /* check if matrix is mumps type */
1605: if (A->ops->solve != MatSolve_MUMPS) return(0);
1607: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
1608: if (iascii) {
1609: PetscViewerGetFormat(viewer,&format);
1610: if (format == PETSC_VIEWER_ASCII_INFO) {
1611: PetscViewerASCIIPrintf(viewer,"MUMPS run parameters:\n");
1612: PetscViewerASCIIPrintf(viewer," SYM (matrix type): %d \n",mumps->id.sym);
1613: PetscViewerASCIIPrintf(viewer," PAR (host participation): %d \n",mumps->id.par);
1614: PetscViewerASCIIPrintf(viewer," ICNTL(1) (output for error): %d \n",mumps->id.ICNTL(1));
1615: PetscViewerASCIIPrintf(viewer," ICNTL(2) (output of diagnostic msg): %d \n",mumps->id.ICNTL(2));
1616: PetscViewerASCIIPrintf(viewer," ICNTL(3) (output for global info): %d \n",mumps->id.ICNTL(3));
1617: PetscViewerASCIIPrintf(viewer," ICNTL(4) (level of printing): %d \n",mumps->id.ICNTL(4));
1618: PetscViewerASCIIPrintf(viewer," ICNTL(5) (input mat struct): %d \n",mumps->id.ICNTL(5));
1619: PetscViewerASCIIPrintf(viewer," ICNTL(6) (matrix prescaling): %d \n",mumps->id.ICNTL(6));
1620: PetscViewerASCIIPrintf(viewer," ICNTL(7) (sequentia matrix ordering):%d \n",mumps->id.ICNTL(7));
1621: PetscViewerASCIIPrintf(viewer," ICNTL(8) (scalling strategy): %d \n",mumps->id.ICNTL(8));
1622: PetscViewerASCIIPrintf(viewer," ICNTL(10) (max num of refinements): %d \n",mumps->id.ICNTL(10));
1623: PetscViewerASCIIPrintf(viewer," ICNTL(11) (error analysis): %d \n",mumps->id.ICNTL(11));
1624: if (mumps->id.ICNTL(11)>0) {
1625: PetscViewerASCIIPrintf(viewer," RINFOG(4) (inf norm of input mat): %g\n",mumps->id.RINFOG(4));
1626: PetscViewerASCIIPrintf(viewer," RINFOG(5) (inf norm of solution): %g\n",mumps->id.RINFOG(5));
1627: PetscViewerASCIIPrintf(viewer," RINFOG(6) (inf norm of residual): %g\n",mumps->id.RINFOG(6));
1628: PetscViewerASCIIPrintf(viewer," RINFOG(7),RINFOG(8) (backward error est): %g, %g\n",mumps->id.RINFOG(7),mumps->id.RINFOG(8));
1629: PetscViewerASCIIPrintf(viewer," RINFOG(9) (error estimate): %g \n",mumps->id.RINFOG(9));
1630: PetscViewerASCIIPrintf(viewer," RINFOG(10),RINFOG(11)(condition numbers): %g, %g\n",mumps->id.RINFOG(10),mumps->id.RINFOG(11));
1631: }
1632: PetscViewerASCIIPrintf(viewer," ICNTL(12) (efficiency control): %d \n",mumps->id.ICNTL(12));
1633: PetscViewerASCIIPrintf(viewer," ICNTL(13) (efficiency control): %d \n",mumps->id.ICNTL(13));
1634: PetscViewerASCIIPrintf(viewer," ICNTL(14) (percentage of estimated workspace increase): %d \n",mumps->id.ICNTL(14));
1635: /* ICNTL(15-17) not used */
1636: PetscViewerASCIIPrintf(viewer," ICNTL(18) (input mat struct): %d \n",mumps->id.ICNTL(18));
1637: PetscViewerASCIIPrintf(viewer," ICNTL(19) (Shur complement info): %d \n",mumps->id.ICNTL(19));
1638: PetscViewerASCIIPrintf(viewer," ICNTL(20) (rhs sparse pattern): %d \n",mumps->id.ICNTL(20));
1639: PetscViewerASCIIPrintf(viewer," ICNTL(21) (solution struct): %d \n",mumps->id.ICNTL(21));
1640: PetscViewerASCIIPrintf(viewer," ICNTL(22) (in-core/out-of-core facility): %d \n",mumps->id.ICNTL(22));
1641: PetscViewerASCIIPrintf(viewer," ICNTL(23) (max size of memory can be allocated locally):%d \n",mumps->id.ICNTL(23));
1643: PetscViewerASCIIPrintf(viewer," ICNTL(24) (detection of null pivot rows): %d \n",mumps->id.ICNTL(24));
1644: PetscViewerASCIIPrintf(viewer," ICNTL(25) (computation of a null space basis): %d \n",mumps->id.ICNTL(25));
1645: PetscViewerASCIIPrintf(viewer," ICNTL(26) (Schur options for rhs or solution): %d \n",mumps->id.ICNTL(26));
1646: PetscViewerASCIIPrintf(viewer," ICNTL(27) (experimental parameter): %d \n",mumps->id.ICNTL(27));
1647: PetscViewerASCIIPrintf(viewer," ICNTL(28) (use parallel or sequential ordering): %d \n",mumps->id.ICNTL(28));
1648: PetscViewerASCIIPrintf(viewer," ICNTL(29) (parallel ordering): %d \n",mumps->id.ICNTL(29));
1650: PetscViewerASCIIPrintf(viewer," ICNTL(30) (user-specified set of entries in inv(A)): %d \n",mumps->id.ICNTL(30));
1651: PetscViewerASCIIPrintf(viewer," ICNTL(31) (factors is discarded in the solve phase): %d \n",mumps->id.ICNTL(31));
1652: PetscViewerASCIIPrintf(viewer," ICNTL(33) (compute determinant): %d \n",mumps->id.ICNTL(33));
1654: PetscViewerASCIIPrintf(viewer," CNTL(1) (relative pivoting threshold): %g \n",mumps->id.CNTL(1));
1655: PetscViewerASCIIPrintf(viewer," CNTL(2) (stopping criterion of refinement): %g \n",mumps->id.CNTL(2));
1656: PetscViewerASCIIPrintf(viewer," CNTL(3) (absolute pivoting threshold): %g \n",mumps->id.CNTL(3));
1657: PetscViewerASCIIPrintf(viewer," CNTL(4) (value of static pivoting): %g \n",mumps->id.CNTL(4));
1658: PetscViewerASCIIPrintf(viewer," CNTL(5) (fixation for null pivots): %g \n",mumps->id.CNTL(5));
1660: /* infomation local to each processor */
1661: PetscViewerASCIIPrintf(viewer, " RINFO(1) (local estimated flops for the elimination after analysis): \n");
1662: PetscViewerASCIIPushSynchronized(viewer);
1663: PetscViewerASCIISynchronizedPrintf(viewer," [%d] %g \n",mumps->myid,mumps->id.RINFO(1));
1664: PetscViewerFlush(viewer);
1665: PetscViewerASCIIPrintf(viewer, " RINFO(2) (local estimated flops for the assembly after factorization): \n");
1666: PetscViewerASCIISynchronizedPrintf(viewer," [%d] %g \n",mumps->myid,mumps->id.RINFO(2));
1667: PetscViewerFlush(viewer);
1668: PetscViewerASCIIPrintf(viewer, " RINFO(3) (local estimated flops for the elimination after factorization): \n");
1669: PetscViewerASCIISynchronizedPrintf(viewer," [%d] %g \n",mumps->myid,mumps->id.RINFO(3));
1670: PetscViewerFlush(viewer);
1672: PetscViewerASCIIPrintf(viewer, " INFO(15) (estimated size of (in MB) MUMPS internal data for running numerical factorization): \n");
1673: PetscViewerASCIISynchronizedPrintf(viewer," [%d] %d \n",mumps->myid,mumps->id.INFO(15));
1674: PetscViewerFlush(viewer);
1676: PetscViewerASCIIPrintf(viewer, " INFO(16) (size of (in MB) MUMPS internal data used during numerical factorization): \n");
1677: PetscViewerASCIISynchronizedPrintf(viewer," [%d] %d \n",mumps->myid,mumps->id.INFO(16));
1678: PetscViewerFlush(viewer);
1680: PetscViewerASCIIPrintf(viewer, " INFO(23) (num of pivots eliminated on this processor after factorization): \n");
1681: PetscViewerASCIISynchronizedPrintf(viewer," [%d] %d \n",mumps->myid,mumps->id.INFO(23));
1682: PetscViewerFlush(viewer);
1684: if (mumps->ninfo && mumps->ninfo <= 40){
1685: PetscInt i;
1686: for (i=0; i<mumps->ninfo; i++){
1687: PetscViewerASCIIPrintf(viewer, " INFO(%d): \n",mumps->info[i]);
1688: PetscViewerASCIISynchronizedPrintf(viewer," [%d] %d \n",mumps->myid,mumps->id.INFO(mumps->info[i]));
1689: PetscViewerFlush(viewer);
1690: }
1691: }
1694: PetscViewerASCIIPopSynchronized(viewer);
1696: if (!mumps->myid) { /* information from the host */
1697: PetscViewerASCIIPrintf(viewer," RINFOG(1) (global estimated flops for the elimination after analysis): %g \n",mumps->id.RINFOG(1));
1698: PetscViewerASCIIPrintf(viewer," RINFOG(2) (global estimated flops for the assembly after factorization): %g \n",mumps->id.RINFOG(2));
1699: PetscViewerASCIIPrintf(viewer," RINFOG(3) (global estimated flops for the elimination after factorization): %g \n",mumps->id.RINFOG(3));
1700: PetscViewerASCIIPrintf(viewer," (RINFOG(12) RINFOG(13))*2^INFOG(34) (determinant): (%g,%g)*(2^%d)\n",mumps->id.RINFOG(12),mumps->id.RINFOG(13),mumps->id.INFOG(34));
1702: PetscViewerASCIIPrintf(viewer," INFOG(3) (estimated real workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(3));
1703: PetscViewerASCIIPrintf(viewer," INFOG(4) (estimated integer workspace for factors on all processors after analysis): %d \n",mumps->id.INFOG(4));
1704: PetscViewerASCIIPrintf(viewer," INFOG(5) (estimated maximum front size in the complete tree): %d \n",mumps->id.INFOG(5));
1705: PetscViewerASCIIPrintf(viewer," INFOG(6) (number of nodes in the complete tree): %d \n",mumps->id.INFOG(6));
1706: PetscViewerASCIIPrintf(viewer," INFOG(7) (ordering option effectively use after analysis): %d \n",mumps->id.INFOG(7));
1707: PetscViewerASCIIPrintf(viewer," INFOG(8) (structural symmetry in percent of the permuted matrix after analysis): %d \n",mumps->id.INFOG(8));
1708: PetscViewerASCIIPrintf(viewer," INFOG(9) (total real/complex workspace to store the matrix factors after factorization): %d \n",mumps->id.INFOG(9));
1709: PetscViewerASCIIPrintf(viewer," INFOG(10) (total integer space store the matrix factors after factorization): %d \n",mumps->id.INFOG(10));
1710: PetscViewerASCIIPrintf(viewer," INFOG(11) (order of largest frontal matrix after factorization): %d \n",mumps->id.INFOG(11));
1711: PetscViewerASCIIPrintf(viewer," INFOG(12) (number of off-diagonal pivots): %d \n",mumps->id.INFOG(12));
1712: PetscViewerASCIIPrintf(viewer," INFOG(13) (number of delayed pivots after factorization): %d \n",mumps->id.INFOG(13));
1713: PetscViewerASCIIPrintf(viewer," INFOG(14) (number of memory compress after factorization): %d \n",mumps->id.INFOG(14));
1714: PetscViewerASCIIPrintf(viewer," INFOG(15) (number of steps of iterative refinement after solution): %d \n",mumps->id.INFOG(15));
1715: PetscViewerASCIIPrintf(viewer," INFOG(16) (estimated size (in MB) of all MUMPS internal data for factorization after analysis: value on the most memory consuming processor): %d \n",mumps->id.INFOG(16));
1716: PetscViewerASCIIPrintf(viewer," INFOG(17) (estimated size of all MUMPS internal data for factorization after analysis: sum over all processors): %d \n",mumps->id.INFOG(17));
1717: PetscViewerASCIIPrintf(viewer," INFOG(18) (size of all MUMPS internal data allocated during factorization: value on the most memory consuming processor): %d \n",mumps->id.INFOG(18));
1718: PetscViewerASCIIPrintf(viewer," INFOG(19) (size of all MUMPS internal data allocated during factorization: sum over all processors): %d \n",mumps->id.INFOG(19));
1719: PetscViewerASCIIPrintf(viewer," INFOG(20) (estimated number of entries in the factors): %d \n",mumps->id.INFOG(20));
1720: PetscViewerASCIIPrintf(viewer," INFOG(21) (size in MB of memory effectively used during factorization - value on the most memory consuming processor): %d \n",mumps->id.INFOG(21));
1721: PetscViewerASCIIPrintf(viewer," INFOG(22) (size in MB of memory effectively used during factorization - sum over all processors): %d \n",mumps->id.INFOG(22));
1722: PetscViewerASCIIPrintf(viewer," INFOG(23) (after analysis: value of ICNTL(6) effectively used): %d \n",mumps->id.INFOG(23));
1723: PetscViewerASCIIPrintf(viewer," INFOG(24) (after analysis: value of ICNTL(12) effectively used): %d \n",mumps->id.INFOG(24));
1724: PetscViewerASCIIPrintf(viewer," INFOG(25) (after factorization: number of pivots modified by static pivoting): %d \n",mumps->id.INFOG(25));
1725: PetscViewerASCIIPrintf(viewer," INFOG(28) (after factorization: number of null pivots encountered): %d\n",mumps->id.INFOG(28));
1726: PetscViewerASCIIPrintf(viewer," INFOG(29) (after factorization: effective number of entries in the factors (sum over all processors)): %d\n",mumps->id.INFOG(29));
1727: PetscViewerASCIIPrintf(viewer," INFOG(30, 31) (after solution: size in Mbytes of memory used during solution phase): %d, %d\n",mumps->id.INFOG(30),mumps->id.INFOG(31));
1728: PetscViewerASCIIPrintf(viewer," INFOG(32) (after analysis: type of analysis done): %d\n",mumps->id.INFOG(32));
1729: PetscViewerASCIIPrintf(viewer," INFOG(33) (value used for ICNTL(8)): %d\n",mumps->id.INFOG(33));
1730: PetscViewerASCIIPrintf(viewer," INFOG(34) (exponent of the determinant if determinant is requested): %d\n",mumps->id.INFOG(34));
1731: }
1732: }
1733: }
1734: return(0);
1735: }
1739: PetscErrorCode MatGetInfo_MUMPS(Mat A,MatInfoType flag,MatInfo *info)
1740: {
1741: Mat_MUMPS *mumps =(Mat_MUMPS*)A->spptr;
1744: info->block_size = 1.0;
1745: info->nz_allocated = mumps->id.INFOG(20);
1746: info->nz_used = mumps->id.INFOG(20);
1747: info->nz_unneeded = 0.0;
1748: info->assemblies = 0.0;
1749: info->mallocs = 0.0;
1750: info->memory = 0.0;
1751: info->fill_ratio_given = 0;
1752: info->fill_ratio_needed = 0;
1753: info->factor_mallocs = 0;
1754: return(0);
1755: }
1757: /* -------------------------------------------------------------------------------------------*/
1760: PetscErrorCode MatFactorSetSchurIS_MUMPS(Mat F, IS is)
1761: {
1762: Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1763: const PetscInt *idxs;
1764: PetscInt size,i;
1768: if (mumps->size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MUMPS parallel Schur complements not yet supported from PETSc\n");
1769: ISGetLocalSize(is,&size);
1770: if (mumps->id.size_schur != size) {
1771: PetscFree2(mumps->id.listvar_schur,mumps->id.schur);
1772: mumps->id.size_schur = size;
1773: mumps->id.schur_lld = size;
1774: PetscMalloc2(size,&mumps->id.listvar_schur,size*size,&mumps->id.schur);
1775: }
1776: ISGetIndices(is,&idxs);
1777: PetscMemcpy(mumps->id.listvar_schur,idxs,size*sizeof(PetscInt));
1778: /* MUMPS expects Fortran style indices */
1779: for (i=0;i<size;i++) mumps->id.listvar_schur[i]++;
1780: ISRestoreIndices(is,&idxs);
1781: if (size) { /* turn on Schur switch if we the set of indices is not empty */
1782: if (F->factortype == MAT_FACTOR_LU) {
1783: mumps->id.ICNTL(19) = 3; /* MUMPS returns full matrix */
1784: } else {
1785: mumps->id.ICNTL(19) = 2; /* MUMPS returns lower triangular part */
1786: }
1787: /* set a special value of ICNTL (not handled my MUMPS) to be used in the solve phase by PETSc */
1788: mumps->id.ICNTL(26) = -1;
1789: }
1790: return(0);
1791: }
1793: /* -------------------------------------------------------------------------------------------*/
1796: PetscErrorCode MatFactorCreateSchurComplement_MUMPS(Mat F,Mat* S)
1797: {
1798: Mat St;
1799: Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1800: PetscScalar *array;
1801: #if defined(PETSC_USE_COMPLEX)
1802: PetscScalar im = PetscSqrtScalar((PetscScalar)-1.0);
1803: #endif
1807: if (!mumps->id.ICNTL(19)) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatFactorSetSchurIS to enable it");
1808: else if (!mumps->id.size_schur) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatFactorSetSchurIS before");
1810: MatCreate(PetscObjectComm((PetscObject)F),&St);
1811: MatSetSizes(St,PETSC_DECIDE,PETSC_DECIDE,mumps->id.size_schur,mumps->id.size_schur);
1812: MatSetType(St,MATDENSE);
1813: MatSetUp(St);
1814: MatDenseGetArray(St,&array);
1815: if (!mumps->sym) { /* MUMPS always return a full matrix */
1816: if (mumps->id.ICNTL(19) == 1) { /* stored by rows */
1817: PetscInt i,j,N=mumps->id.size_schur;
1818: for (i=0;i<N;i++) {
1819: for (j=0;j<N;j++) {
1820: #if !defined(PETSC_USE_COMPLEX)
1821: PetscScalar val = mumps->id.schur[i*N+j];
1822: #else
1823: PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i;
1824: #endif
1825: array[j*N+i] = val;
1826: }
1827: }
1828: } else { /* stored by columns */
1829: PetscMemcpy(array,mumps->id.schur,mumps->id.size_schur*mumps->id.size_schur*sizeof(PetscScalar));
1830: }
1831: } else { /* either full or lower-triangular (not packed) */
1832: if (mumps->id.ICNTL(19) == 2) { /* lower triangular stored by columns */
1833: PetscInt i,j,N=mumps->id.size_schur;
1834: for (i=0;i<N;i++) {
1835: for (j=i;j<N;j++) {
1836: #if !defined(PETSC_USE_COMPLEX)
1837: PetscScalar val = mumps->id.schur[i*N+j];
1838: #else
1839: PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i;
1840: #endif
1841: array[i*N+j] = val;
1842: array[j*N+i] = val;
1843: }
1844: }
1845: } else if (mumps->id.ICNTL(19) == 3) { /* full matrix */
1846: PetscMemcpy(array,mumps->id.schur,mumps->id.size_schur*mumps->id.size_schur*sizeof(PetscScalar));
1847: } else { /* ICNTL(19) == 1 lower triangular stored by rows */
1848: PetscInt i,j,N=mumps->id.size_schur;
1849: for (i=0;i<N;i++) {
1850: for (j=0;j<i+1;j++) {
1851: #if !defined(PETSC_USE_COMPLEX)
1852: PetscScalar val = mumps->id.schur[i*N+j];
1853: #else
1854: PetscScalar val = mumps->id.schur[i*N+j].r + im*mumps->id.schur[i*N+j].i;
1855: #endif
1856: array[i*N+j] = val;
1857: array[j*N+i] = val;
1858: }
1859: }
1860: }
1861: }
1862: MatDenseRestoreArray(St,&array);
1863: *S = St;
1864: return(0);
1865: }
1867: /* -------------------------------------------------------------------------------------------*/
1870: PetscErrorCode MatFactorGetSchurComplement_MUMPS(Mat F,Mat* S)
1871: {
1872: Mat St;
1873: Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1877: if (!mumps->id.ICNTL(19)) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatFactorSetSchurIS to enable it");
1878: else if (!mumps->id.size_schur) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatFactorSetSchurIS before");
1880: /* It should be the responsibility of the user to handle different ICNTL(19) cases and factorization stages if they want to work with the raw data */
1881: MatCreateSeqDense(PetscObjectComm((PetscObject)F),mumps->id.size_schur,mumps->id.size_schur,(PetscScalar*)mumps->id.schur,&St);
1882: *S = St;
1883: return(0);
1884: }
1886: /* -------------------------------------------------------------------------------------------*/
1889: PetscErrorCode MatFactorInvertSchurComplement_MUMPS(Mat F)
1890: {
1891: Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1895: if (!mumps->id.ICNTL(19)) { /* do nothing */
1896: return(0);
1897: }
1898: if (!mumps->id.size_schur) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatFactorSetSchurIS before");
1899: MatMumpsInvertSchur_Private(mumps);
1900: return(0);
1901: }
1903: /* -------------------------------------------------------------------------------------------*/
1906: PetscErrorCode MatFactorSolveSchurComplement_MUMPS(Mat F, Vec rhs, Vec sol)
1907: {
1908: Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1909: MumpsScalar *orhs;
1910: PetscScalar *osol,*nrhs,*nsol;
1911: PetscInt orhs_size,osol_size,olrhs_size;
1915: if (!mumps->id.ICNTL(19)) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatFactorSetSchurIS to enable it");
1916: if (!mumps->id.size_schur) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatFactorSetSchurIS before");
1918: /* swap pointers */
1919: orhs = mumps->id.redrhs;
1920: olrhs_size = mumps->id.lredrhs;
1921: orhs_size = mumps->sizeredrhs;
1922: osol = mumps->schur_sol;
1923: osol_size = mumps->schur_sizesol;
1924: VecGetArray(rhs,&nrhs);
1925: VecGetArray(sol,&nsol);
1926: mumps->id.redrhs = (MumpsScalar*)nrhs;
1927: VecGetLocalSize(rhs,&mumps->sizeredrhs);
1928: mumps->id.lredrhs = mumps->sizeredrhs;
1929: mumps->schur_sol = nsol;
1930: VecGetLocalSize(sol,&mumps->schur_sizesol);
1932: /* solve Schur complement */
1933: mumps->id.nrhs = 1;
1934: MatMumpsSolveSchur_Private(mumps,PETSC_FALSE);
1935: /* restore pointers */
1936: VecRestoreArray(rhs,&nrhs);
1937: VecRestoreArray(sol,&nsol);
1938: mumps->id.redrhs = orhs;
1939: mumps->id.lredrhs = olrhs_size;
1940: mumps->sizeredrhs = orhs_size;
1941: mumps->schur_sol = osol;
1942: mumps->schur_sizesol = osol_size;
1943: return(0);
1944: }
1946: /* -------------------------------------------------------------------------------------------*/
1949: PetscErrorCode MatFactorSolveSchurComplementTranspose_MUMPS(Mat F, Vec rhs, Vec sol)
1950: {
1951: Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1952: MumpsScalar *orhs;
1953: PetscScalar *osol,*nrhs,*nsol;
1954: PetscInt orhs_size,osol_size;
1958: if (!mumps->id.ICNTL(19)) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur complement mode not selected! You should call MatFactorSetSchurIS to enable it");
1959: else if (!mumps->id.size_schur) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ORDER,"Schur indices not set! You should call MatFactorSetSchurIS before");
1961: /* swap pointers */
1962: orhs = mumps->id.redrhs;
1963: orhs_size = mumps->sizeredrhs;
1964: osol = mumps->schur_sol;
1965: osol_size = mumps->schur_sizesol;
1966: VecGetArray(rhs,&nrhs);
1967: VecGetArray(sol,&nsol);
1968: mumps->id.redrhs = (MumpsScalar*)nrhs;
1969: VecGetLocalSize(rhs,&mumps->sizeredrhs);
1970: mumps->schur_sol = nsol;
1971: VecGetLocalSize(sol,&mumps->schur_sizesol);
1973: /* solve Schur complement */
1974: mumps->id.nrhs = 1;
1975: mumps->id.ICNTL(9) = 0;
1976: MatMumpsSolveSchur_Private(mumps,PETSC_FALSE);
1977: mumps->id.ICNTL(9) = 1;
1978: /* restore pointers */
1979: VecRestoreArray(rhs,&nrhs);
1980: VecRestoreArray(sol,&nsol);
1981: mumps->id.redrhs = orhs;
1982: mumps->sizeredrhs = orhs_size;
1983: mumps->schur_sol = osol;
1984: mumps->schur_sizesol = osol_size;
1985: return(0);
1986: }
1988: /* -------------------------------------------------------------------------------------------*/
1991: PetscErrorCode MatMumpsSetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt ival)
1992: {
1993: Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
1996: mumps->id.ICNTL(icntl) = ival;
1997: return(0);
1998: }
2002: PetscErrorCode MatMumpsGetIcntl_MUMPS(Mat F,PetscInt icntl,PetscInt *ival)
2003: {
2004: Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2007: *ival = mumps->id.ICNTL(icntl);
2008: return(0);
2009: }
2013: /*@
2014: MatMumpsSetIcntl - Set MUMPS parameter ICNTL()
2016: Logically Collective on Mat
2018: Input Parameters:
2019: + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2020: . icntl - index of MUMPS parameter array ICNTL()
2021: - ival - value of MUMPS ICNTL(icntl)
2023: Options Database:
2024: . -mat_mumps_icntl_<icntl> <ival>
2026: Level: beginner
2028: References:
2029: . MUMPS Users' Guide
2031: .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog()
2032: @*/
2033: PetscErrorCode MatMumpsSetIcntl(Mat F,PetscInt icntl,PetscInt ival)
2034: {
2036:
2039: if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
2042: PetscTryMethod(F,"MatMumpsSetIcntl_C",(Mat,PetscInt,PetscInt),(F,icntl,ival));
2043: return(0);
2044: }
2048: /*@
2049: MatMumpsGetIcntl - Get MUMPS parameter ICNTL()
2051: Logically Collective on Mat
2053: Input Parameters:
2054: + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2055: - icntl - index of MUMPS parameter array ICNTL()
2057: Output Parameter:
2058: . ival - value of MUMPS ICNTL(icntl)
2060: Level: beginner
2062: References:
2063: . MUMPS Users' Guide
2065: .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog()
2066: @*/
2067: PetscErrorCode MatMumpsGetIcntl(Mat F,PetscInt icntl,PetscInt *ival)
2068: {
2073: if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
2076: PetscUseMethod(F,"MatMumpsGetIcntl_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));
2077: return(0);
2078: }
2080: /* -------------------------------------------------------------------------------------------*/
2083: PetscErrorCode MatMumpsSetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal val)
2084: {
2085: Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2088: mumps->id.CNTL(icntl) = val;
2089: return(0);
2090: }
2094: PetscErrorCode MatMumpsGetCntl_MUMPS(Mat F,PetscInt icntl,PetscReal *val)
2095: {
2096: Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2099: *val = mumps->id.CNTL(icntl);
2100: return(0);
2101: }
2105: /*@
2106: MatMumpsSetCntl - Set MUMPS parameter CNTL()
2108: Logically Collective on Mat
2110: Input Parameters:
2111: + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2112: . icntl - index of MUMPS parameter array CNTL()
2113: - val - value of MUMPS CNTL(icntl)
2115: Options Database:
2116: . -mat_mumps_cntl_<icntl> <val>
2118: Level: beginner
2120: References:
2121: . MUMPS Users' Guide
2123: .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog()
2124: @*/
2125: PetscErrorCode MatMumpsSetCntl(Mat F,PetscInt icntl,PetscReal val)
2126: {
2131: if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
2134: PetscTryMethod(F,"MatMumpsSetCntl_C",(Mat,PetscInt,PetscReal),(F,icntl,val));
2135: return(0);
2136: }
2140: /*@
2141: MatMumpsGetCntl - Get MUMPS parameter CNTL()
2143: Logically Collective on Mat
2145: Input Parameters:
2146: + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2147: - icntl - index of MUMPS parameter array CNTL()
2149: Output Parameter:
2150: . val - value of MUMPS CNTL(icntl)
2152: Level: beginner
2154: References:
2155: . MUMPS Users' Guide
2157: .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog()
2158: @*/
2159: PetscErrorCode MatMumpsGetCntl(Mat F,PetscInt icntl,PetscReal *val)
2160: {
2165: if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
2168: PetscUseMethod(F,"MatMumpsGetCntl_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));
2169: return(0);
2170: }
2174: PetscErrorCode MatMumpsGetInfo_MUMPS(Mat F,PetscInt icntl,PetscInt *info)
2175: {
2176: Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2179: *info = mumps->id.INFO(icntl);
2180: return(0);
2181: }
2185: PetscErrorCode MatMumpsGetInfog_MUMPS(Mat F,PetscInt icntl,PetscInt *infog)
2186: {
2187: Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2190: *infog = mumps->id.INFOG(icntl);
2191: return(0);
2192: }
2196: PetscErrorCode MatMumpsGetRinfo_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfo)
2197: {
2198: Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2201: *rinfo = mumps->id.RINFO(icntl);
2202: return(0);
2203: }
2207: PetscErrorCode MatMumpsGetRinfog_MUMPS(Mat F,PetscInt icntl,PetscReal *rinfog)
2208: {
2209: Mat_MUMPS *mumps =(Mat_MUMPS*)F->spptr;
2212: *rinfog = mumps->id.RINFOG(icntl);
2213: return(0);
2214: }
2218: /*@
2219: MatMumpsGetInfo - Get MUMPS parameter INFO()
2221: Logically Collective on Mat
2223: Input Parameters:
2224: + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2225: - icntl - index of MUMPS parameter array INFO()
2227: Output Parameter:
2228: . ival - value of MUMPS INFO(icntl)
2230: Level: beginner
2232: References:
2233: . MUMPS Users' Guide
2235: .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog()
2236: @*/
2237: PetscErrorCode MatMumpsGetInfo(Mat F,PetscInt icntl,PetscInt *ival)
2238: {
2243: if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
2245: PetscUseMethod(F,"MatMumpsGetInfo_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));
2246: return(0);
2247: }
2251: /*@
2252: MatMumpsGetInfog - Get MUMPS parameter INFOG()
2254: Logically Collective on Mat
2256: Input Parameters:
2257: + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2258: - icntl - index of MUMPS parameter array INFOG()
2260: Output Parameter:
2261: . ival - value of MUMPS INFOG(icntl)
2263: Level: beginner
2265: References:
2266: . MUMPS Users' Guide
2268: .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog()
2269: @*/
2270: PetscErrorCode MatMumpsGetInfog(Mat F,PetscInt icntl,PetscInt *ival)
2271: {
2276: if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
2278: PetscUseMethod(F,"MatMumpsGetInfog_C",(Mat,PetscInt,PetscInt*),(F,icntl,ival));
2279: return(0);
2280: }
2284: /*@
2285: MatMumpsGetRinfo - Get MUMPS parameter RINFO()
2287: Logically Collective on Mat
2289: Input Parameters:
2290: + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2291: - icntl - index of MUMPS parameter array RINFO()
2293: Output Parameter:
2294: . val - value of MUMPS RINFO(icntl)
2296: Level: beginner
2298: References:
2299: . MUMPS Users' Guide
2301: .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog()
2302: @*/
2303: PetscErrorCode MatMumpsGetRinfo(Mat F,PetscInt icntl,PetscReal *val)
2304: {
2309: if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
2311: PetscUseMethod(F,"MatMumpsGetRinfo_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));
2312: return(0);
2313: }
2317: /*@
2318: MatMumpsGetRinfog - Get MUMPS parameter RINFOG()
2320: Logically Collective on Mat
2322: Input Parameters:
2323: + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
2324: - icntl - index of MUMPS parameter array RINFOG()
2326: Output Parameter:
2327: . val - value of MUMPS RINFOG(icntl)
2329: Level: beginner
2331: References:
2332: . MUMPS Users' Guide
2334: .seealso: MatGetFactor(), MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog()
2335: @*/
2336: PetscErrorCode MatMumpsGetRinfog(Mat F,PetscInt icntl,PetscReal *val)
2337: {
2342: if (!F->factortype) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
2344: PetscUseMethod(F,"MatMumpsGetRinfog_C",(Mat,PetscInt,PetscReal*),(F,icntl,val));
2345: return(0);
2346: }
2348: /*MC
2349: MATSOLVERMUMPS - A matrix type providing direct solvers (LU and Cholesky) for
2350: distributed and sequential matrices via the external package MUMPS.
2352: Works with MATAIJ and MATSBAIJ matrices
2354: Use ./configure --download-mumps --download-scalapack --download-parmetis --download-metis --download-ptscotch to have PETSc installed with MUMPS
2356: Use -pc_type cholesky or lu -pc_factor_mat_solver_package mumps to us this direct solver
2358: Options Database Keys:
2359: + -mat_mumps_icntl_1 - ICNTL(1): output stream for error messages
2360: . -mat_mumps_icntl_2 - ICNTL(2): output stream for diagnostic printing, statistics, and warning
2361: . -mat_mumps_icntl_3 - ICNTL(3): output stream for global information, collected on the host
2362: . -mat_mumps_icntl_4 - ICNTL(4): level of printing (0 to 4)
2363: . -mat_mumps_icntl_6 - ICNTL(6): permutes to a zero-free diagonal and/or scale the matrix (0 to 7)
2364: . -mat_mumps_icntl_7 - ICNTL(7): computes a symmetric permutation in sequential analysis (0 to 7). 3=Scotch, 4=PORD, 5=Metis
2365: . -mat_mumps_icntl_8 - ICNTL(8): scaling strategy (-2 to 8 or 77)
2366: . -mat_mumps_icntl_10 - ICNTL(10): max num of refinements
2367: . -mat_mumps_icntl_11 - ICNTL(11): statistics related to an error analysis (via -ksp_view)
2368: . -mat_mumps_icntl_12 - ICNTL(12): an ordering strategy for symmetric matrices (0 to 3)
2369: . -mat_mumps_icntl_13 - ICNTL(13): parallelism of the root node (enable ScaLAPACK) and its splitting
2370: . -mat_mumps_icntl_14 - ICNTL(14): percentage increase in the estimated working space
2371: . -mat_mumps_icntl_19 - ICNTL(19): computes the Schur complement
2372: . -mat_mumps_icntl_22 - ICNTL(22): in-core/out-of-core factorization and solve (0 or 1)
2373: . -mat_mumps_icntl_23 - ICNTL(23): max size of the working memory (MB) that can allocate per processor
2374: . -mat_mumps_icntl_24 - ICNTL(24): detection of null pivot rows (0 or 1)
2375: . -mat_mumps_icntl_25 - ICNTL(25): compute a solution of a deficient matrix and a null space basis
2376: . -mat_mumps_icntl_26 - ICNTL(26): drives the solution phase if a Schur complement matrix
2377: . -mat_mumps_icntl_28 - ICNTL(28): use 1 for sequential analysis and ictnl(7) ordering, or 2 for parallel analysis and ictnl(29) ordering
2378: . -mat_mumps_icntl_29 - ICNTL(29): parallel ordering 1 = ptscotch, 2 = parmetis
2379: . -mat_mumps_icntl_30 - ICNTL(30): compute user-specified set of entries in inv(A)
2380: . -mat_mumps_icntl_31 - ICNTL(31): indicates which factors may be discarded during factorization
2381: . -mat_mumps_icntl_33 - ICNTL(33): compute determinant
2382: . -mat_mumps_cntl_1 - CNTL(1): relative pivoting threshold
2383: . -mat_mumps_cntl_2 - CNTL(2): stopping criterion of refinement
2384: . -mat_mumps_cntl_3 - CNTL(3): absolute pivoting threshold
2385: . -mat_mumps_cntl_4 - CNTL(4): value for static pivoting
2386: - -mat_mumps_cntl_5 - CNTL(5): fixation for null pivots
2388: Level: beginner
2390: Notes: When a MUMPS factorization fails inside a KSP solve, for example with a KSP_DIVERGED_PCSETUP_FAILED, one can find the MUMPS information about the failure by calling
2391: $ KSPGetPC(ksp,&pc);
2392: $ PCFactorGetMatrix(pc,&mat);
2393: $ MatMumpsGetInfo(mat,....);
2394: $ MatMumpsGetInfog(mat,....); etc.
2395: Or you can run with -ksp_error_if_not_converged and the program will be stopped and the information printed in the error message.
2397: .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage, MatMumpsSetICntl(), MatMumpsGetIcntl(), MatMumpsSetCntl(), MatMumpsGetCntl(), MatMumpsGetInfo(), MatMumpsGetInfog(), MatMumpsGetRinfo(), MatMumpsGetRinfog(), KSPGetPC(), PCGetFactor(), PCFactorGetMatrix()
2399: M*/
2403: static PetscErrorCode MatFactorGetSolverPackage_mumps(Mat A,const MatSolverPackage *type)
2404: {
2406: *type = MATSOLVERMUMPS;
2407: return(0);
2408: }
2410: /* MatGetFactor for Seq and MPI AIJ matrices */
2413: static PetscErrorCode MatGetFactor_aij_mumps(Mat A,MatFactorType ftype,Mat *F)
2414: {
2415: Mat B;
2417: Mat_MUMPS *mumps;
2418: PetscBool isSeqAIJ;
2421: /* Create the factorization matrix */
2422: PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);
2423: MatCreate(PetscObjectComm((PetscObject)A),&B);
2424: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
2425: MatSetType(B,((PetscObject)A)->type_name);
2426: if (isSeqAIJ) {
2427: MatSeqAIJSetPreallocation(B,0,NULL);
2428: } else {
2429: MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);
2430: }
2432: PetscNewLog(B,&mumps);
2434: B->ops->view = MatView_MUMPS;
2435: B->ops->getinfo = MatGetInfo_MUMPS;
2436: B->ops->getdiagonal = MatGetDiagonal_MUMPS;
2438: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);
2439: PetscObjectComposeFunction((PetscObject)B,"MatFactorSetSchurIS_C",MatFactorSetSchurIS_MUMPS);
2440: PetscObjectComposeFunction((PetscObject)B,"MatFactorInvertSchurComplement_C",MatFactorInvertSchurComplement_MUMPS);
2441: PetscObjectComposeFunction((PetscObject)B,"MatFactorCreateSchurComplement_C",MatFactorCreateSchurComplement_MUMPS);
2442: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSchurComplement_C",MatFactorGetSchurComplement_MUMPS);
2443: PetscObjectComposeFunction((PetscObject)B,"MatFactorSolveSchurComplement_C",MatFactorSolveSchurComplement_MUMPS);
2444: PetscObjectComposeFunction((PetscObject)B,"MatFactorSolveSchurComplementTranspose_C",MatFactorSolveSchurComplementTranspose_MUMPS);
2445: PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);
2446: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);
2447: PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);
2448: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);
2449: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);
2450: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);
2451: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);
2452: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);
2454: if (ftype == MAT_FACTOR_LU) {
2455: B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMUMPS;
2456: B->factortype = MAT_FACTOR_LU;
2457: if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqaij;
2458: else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpiaij;
2459: mumps->sym = 0;
2460: } else {
2461: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS;
2462: B->factortype = MAT_FACTOR_CHOLESKY;
2463: if (isSeqAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqaij_seqsbaij;
2464: else mumps->ConvertToTriples = MatConvertToTriples_mpiaij_mpisbaij;
2465: #if defined(PETSC_USE_COMPLEX)
2466: mumps->sym = 2;
2467: #else
2468: if (A->spd_set && A->spd) mumps->sym = 1;
2469: else mumps->sym = 2;
2470: #endif
2471: }
2473: /* set solvertype */
2474: PetscFree(B->solvertype);
2475: PetscStrallocpy(MATSOLVERMUMPS,&B->solvertype);
2477: mumps->isAIJ = PETSC_TRUE;
2478: mumps->Destroy = B->ops->destroy;
2479: B->ops->destroy = MatDestroy_MUMPS;
2480: B->spptr = (void*)mumps;
2482: PetscInitializeMUMPS(A,mumps);
2484: *F = B;
2485: return(0);
2486: }
2488: /* MatGetFactor for Seq and MPI SBAIJ matrices */
2491: static PetscErrorCode MatGetFactor_sbaij_mumps(Mat A,MatFactorType ftype,Mat *F)
2492: {
2493: Mat B;
2495: Mat_MUMPS *mumps;
2496: PetscBool isSeqSBAIJ;
2499: if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with MUMPS LU, use AIJ matrix");
2500: if (A->rmap->bs > 1) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with block size > 1 with MUMPS Cholesky, use AIJ matrix instead");
2501: PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);
2502: /* Create the factorization matrix */
2503: MatCreate(PetscObjectComm((PetscObject)A),&B);
2504: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
2505: MatSetType(B,((PetscObject)A)->type_name);
2506: PetscNewLog(B,&mumps);
2507: if (isSeqSBAIJ) {
2508: MatSeqSBAIJSetPreallocation(B,1,0,NULL);
2510: mumps->ConvertToTriples = MatConvertToTriples_seqsbaij_seqsbaij;
2511: } else {
2512: MatMPISBAIJSetPreallocation(B,1,0,NULL,0,NULL);
2514: mumps->ConvertToTriples = MatConvertToTriples_mpisbaij_mpisbaij;
2515: }
2517: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MUMPS;
2518: B->ops->view = MatView_MUMPS;
2519: B->ops->getdiagonal = MatGetDiagonal_MUMPS;
2521: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);
2522: PetscObjectComposeFunction((PetscObject)B,"MatFactorSetSchurIS_C",MatFactorSetSchurIS_MUMPS);
2523: PetscObjectComposeFunction((PetscObject)B,"MatFactorInvertSchurComplement_C",MatFactorInvertSchurComplement_MUMPS);
2524: PetscObjectComposeFunction((PetscObject)B,"MatFactorCreateSchurComplement_C",MatFactorCreateSchurComplement_MUMPS);
2525: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSchurComplement_C",MatFactorGetSchurComplement_MUMPS);
2526: PetscObjectComposeFunction((PetscObject)B,"MatFactorSolveSchurComplement_C",MatFactorSolveSchurComplement_MUMPS);
2527: PetscObjectComposeFunction((PetscObject)B,"MatFactorSolveSchurComplementTranspose_C",MatFactorSolveSchurComplementTranspose_MUMPS);
2528: PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);
2529: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);
2530: PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);
2531: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);
2532: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);
2533: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);
2534: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);
2535: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);
2537: B->factortype = MAT_FACTOR_CHOLESKY;
2538: #if defined(PETSC_USE_COMPLEX)
2539: mumps->sym = 2;
2540: #else
2541: if (A->spd_set && A->spd) mumps->sym = 1;
2542: else mumps->sym = 2;
2543: #endif
2545: /* set solvertype */
2546: PetscFree(B->solvertype);
2547: PetscStrallocpy(MATSOLVERMUMPS,&B->solvertype);
2549: mumps->isAIJ = PETSC_FALSE;
2550: mumps->Destroy = B->ops->destroy;
2551: B->ops->destroy = MatDestroy_MUMPS;
2552: B->spptr = (void*)mumps;
2554: PetscInitializeMUMPS(A,mumps);
2556: *F = B;
2557: return(0);
2558: }
2562: static PetscErrorCode MatGetFactor_baij_mumps(Mat A,MatFactorType ftype,Mat *F)
2563: {
2564: Mat B;
2566: Mat_MUMPS *mumps;
2567: PetscBool isSeqBAIJ;
2570: /* Create the factorization matrix */
2571: PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&isSeqBAIJ);
2572: MatCreate(PetscObjectComm((PetscObject)A),&B);
2573: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
2574: MatSetType(B,((PetscObject)A)->type_name);
2575: if (isSeqBAIJ) {
2576: MatSeqBAIJSetPreallocation(B,A->rmap->bs,0,NULL);
2577: } else {
2578: MatMPIBAIJSetPreallocation(B,A->rmap->bs,0,NULL,0,NULL);
2579: }
2581: PetscNewLog(B,&mumps);
2582: if (ftype == MAT_FACTOR_LU) {
2583: B->ops->lufactorsymbolic = MatLUFactorSymbolic_BAIJMUMPS;
2584: B->factortype = MAT_FACTOR_LU;
2585: if (isSeqBAIJ) mumps->ConvertToTriples = MatConvertToTriples_seqbaij_seqaij;
2586: else mumps->ConvertToTriples = MatConvertToTriples_mpibaij_mpiaij;
2587: mumps->sym = 0;
2588: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc BAIJ matrices with MUMPS Cholesky, use SBAIJ or AIJ matrix instead\n");
2590: B->ops->view = MatView_MUMPS;
2591: B->ops->getdiagonal = MatGetDiagonal_MUMPS;
2593: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_mumps);
2594: PetscObjectComposeFunction((PetscObject)B,"MatFactorSetSchurIS_C",MatFactorSetSchurIS_MUMPS);
2595: PetscObjectComposeFunction((PetscObject)B,"MatFactorInvertSchurComplement_C",MatFactorInvertSchurComplement_MUMPS);
2596: PetscObjectComposeFunction((PetscObject)B,"MatFactorCreateSchurComplement_C",MatFactorCreateSchurComplement_MUMPS);
2597: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSchurComplement_C",MatFactorGetSchurComplement_MUMPS);
2598: PetscObjectComposeFunction((PetscObject)B,"MatFactorSolveSchurComplement_C",MatFactorSolveSchurComplement_MUMPS);
2599: PetscObjectComposeFunction((PetscObject)B,"MatFactorSolveSchurComplementTranspose_C",MatFactorSolveSchurComplementTranspose_MUMPS);
2600: PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetIcntl_C",MatMumpsSetIcntl_MUMPS);
2601: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetIcntl_C",MatMumpsGetIcntl_MUMPS);
2602: PetscObjectComposeFunction((PetscObject)B,"MatMumpsSetCntl_C",MatMumpsSetCntl_MUMPS);
2603: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetCntl_C",MatMumpsGetCntl_MUMPS);
2604: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfo_C",MatMumpsGetInfo_MUMPS);
2605: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetInfog_C",MatMumpsGetInfog_MUMPS);
2606: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfo_C",MatMumpsGetRinfo_MUMPS);
2607: PetscObjectComposeFunction((PetscObject)B,"MatMumpsGetRinfog_C",MatMumpsGetRinfog_MUMPS);
2609: /* set solvertype */
2610: PetscFree(B->solvertype);
2611: PetscStrallocpy(MATSOLVERMUMPS,&B->solvertype);
2613: mumps->isAIJ = PETSC_TRUE;
2614: mumps->Destroy = B->ops->destroy;
2615: B->ops->destroy = MatDestroy_MUMPS;
2616: B->spptr = (void*)mumps;
2618: PetscInitializeMUMPS(A,mumps);
2620: *F = B;
2621: return(0);
2622: }
2626: PETSC_EXTERN PetscErrorCode MatSolverPackageRegister_MUMPS(void)
2627: {
2631: MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIAIJ,MAT_FACTOR_LU,MatGetFactor_aij_mumps);
2632: MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mumps);
2633: MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIBAIJ,MAT_FACTOR_LU,MatGetFactor_baij_mumps);
2634: MatSolverPackageRegister(MATSOLVERMUMPS,MATMPIBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_baij_mumps);
2635: MatSolverPackageRegister(MATSOLVERMUMPS,MATMPISBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_sbaij_mumps);
2636: MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQAIJ,MAT_FACTOR_LU,MatGetFactor_aij_mumps);
2637: MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mumps);
2638: MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQBAIJ,MAT_FACTOR_LU,MatGetFactor_baij_mumps);
2639: MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_baij_mumps);
2640: MatSolverPackageRegister(MATSOLVERMUMPS,MATSEQSBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_sbaij_mumps);
2641: return(0);
2642: }