Actual source code: matelem.cxx
petsc-3.7.3 2016-08-01
1: #include <../src/mat/impls/elemental/matelemimpl.h> /*I "petscmat.h" I*/
3: /*
4: The variable Petsc_Elemental_keyval is used to indicate an MPI attribute that
5: is attached to a communicator, in this case the attribute is a Mat_Elemental_Grid
6: */
7: static PetscMPIInt Petsc_Elemental_keyval = MPI_KEYVAL_INVALID;
11: /*@C
12: PetscElementalInitializePackage - Initialize Elemental package
14: Logically Collective
16: Level: developer
18: .seealso: MATELEMENTAL, PetscElementalFinalizePackage()
19: @*/
20: PetscErrorCode PetscElementalInitializePackage(void)
21: {
25: if (El::Initialized()) return(0);
26: El::Initialize(); /* called by the 1st call of MatCreate_Elemental */
27: PetscRegisterFinalize(PetscElementalFinalizePackage);
28: return(0);
29: }
33: /*@C
34: PetscElementalFinalizePackage - Finalize Elemental package
36: Logically Collective
38: Level: developer
40: .seealso: MATELEMENTAL, PetscElementalInitializePackage()
41: @*/
42: PetscErrorCode PetscElementalFinalizePackage(void)
43: {
45: El::Finalize(); /* called by PetscFinalize() */
46: return(0);
47: }
51: static PetscErrorCode MatView_Elemental(Mat A,PetscViewer viewer)
52: {
54: Mat_Elemental *a = (Mat_Elemental*)A->data;
55: PetscBool iascii;
58: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
59: if (iascii) {
60: PetscViewerFormat format;
61: PetscViewerGetFormat(viewer,&format);
62: if (format == PETSC_VIEWER_ASCII_INFO) {
63: /* call elemental viewing function */
64: PetscViewerASCIIPrintf(viewer,"Elemental run parameters:\n");
65: PetscViewerASCIIPrintf(viewer," allocated entries=%d\n",(*a->emat).AllocatedMemory());
66: PetscViewerASCIIPrintf(viewer," grid height=%d, grid width=%d\n",(*a->emat).Grid().Height(),(*a->emat).Grid().Width());
67: if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
68: /* call elemental viewing function */
69: PetscPrintf(PetscObjectComm((PetscObject)viewer),"test matview_elemental 2\n");
70: }
72: } else if (format == PETSC_VIEWER_DEFAULT) {
73: PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);
74: El::Print( *a->emat, "Elemental matrix (cyclic ordering)" );
75: PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);
76: if (A->factortype == MAT_FACTOR_NONE){
77: Mat Adense;
78: PetscPrintf(PetscObjectComm((PetscObject)viewer),"Elemental matrix (explicit ordering)\n");
79: MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);
80: MatView(Adense,viewer);
81: MatDestroy(&Adense);
82: }
83: } else SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Format");
84: } else {
85: /* convert to dense format and call MatView() */
86: Mat Adense;
87: PetscPrintf(PetscObjectComm((PetscObject)viewer),"Elemental matrix (explicit ordering)\n");
88: MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);
89: MatView(Adense,viewer);
90: MatDestroy(&Adense);
91: }
92: return(0);
93: }
97: static PetscErrorCode MatGetInfo_Elemental(Mat A,MatInfoType flag,MatInfo *info)
98: {
99: Mat_Elemental *a = (Mat_Elemental*)A->data;
102: info->block_size = 1.0;
104: if (flag == MAT_LOCAL) {
105: info->nz_allocated = (double)(*a->emat).AllocatedMemory(); /* locally allocated */
106: info->nz_used = info->nz_allocated;
107: } else if (flag == MAT_GLOBAL_MAX) {
108: //MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));
109: /* see MatGetInfo_MPIAIJ() for getting global info->nz_allocated! */
110: //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_MAX not written yet");
111: } else if (flag == MAT_GLOBAL_SUM) {
112: //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_SUM not written yet");
113: info->nz_allocated = (double)(*a->emat).AllocatedMemory(); /* locally allocated */
114: info->nz_used = info->nz_allocated; /* assume Elemental does accurate allocation */
115: //MPIU_Allreduce(isend,irecv,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));
116: //PetscPrintf(PETSC_COMM_SELF," ... [%d] locally allocated %g\n",rank,info->nz_allocated);
117: }
119: info->nz_unneeded = 0.0;
120: info->assemblies = (double)A->num_ass;
121: info->mallocs = 0;
122: info->memory = ((PetscObject)A)->mem;
123: info->fill_ratio_given = 0; /* determined by Elemental */
124: info->fill_ratio_needed = 0;
125: info->factor_mallocs = 0;
126: return(0);
127: }
131: PetscErrorCode MatSetOption_Elemental(Mat A,MatOption op,PetscBool flg)
132: {
133: Mat_Elemental *a = (Mat_Elemental*)A->data;
136: switch (op) {
137: case MAT_NEW_NONZERO_LOCATIONS:
138: case MAT_NEW_NONZERO_LOCATION_ERR:
139: case MAT_NEW_NONZERO_ALLOCATION_ERR:
140: case MAT_ROW_ORIENTED:
141: a->roworiented = flg;
142: break;
143: case MAT_SYMMETRIC:
144: break;
145: default:
146: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"unknown option %s",MatOptions[op]);
147: }
148: return(0);
149: }
153: static PetscErrorCode MatSetValues_Elemental(Mat A,PetscInt nr,const PetscInt *rows,PetscInt nc,const PetscInt *cols,const PetscScalar *vals,InsertMode imode)
154: {
155: Mat_Elemental *a = (Mat_Elemental*)A->data;
156: PetscInt i,j,rrank,ridx,crank,cidx,erow,ecol,numQueues=0;
159: // TODO: Initialize matrix to all zeros?
161: // Count the number of queues from this process
162: if (a->roworiented) {
163: for (i=0; i<nr; i++) {
164: if (rows[i] < 0) continue;
165: P2RO(A,0,rows[i],&rrank,&ridx);
166: RO2E(A,0,rrank,ridx,&erow);
167: if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
168: for (j=0; j<nc; j++) {
169: if (cols[j] < 0) continue;
170: P2RO(A,1,cols[j],&crank,&cidx);
171: RO2E(A,1,crank,cidx,&ecol);
172: if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
173: if (!a->emat->IsLocal(erow,ecol) ){ /* off-proc entry */
174: /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
175: if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
176: ++numQueues;
177: continue;
178: }
179: /* printf("Locally updating (%d,%d)\n",erow,ecol); */
180: switch (imode) {
181: case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
182: case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
183: default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
184: }
185: }
186: }
188: /* printf("numQueues=%d\n",numQueues); */
189: a->emat->Reserve( numQueues );
190: for (i=0; i<nr; i++) {
191: if (rows[i] < 0) continue;
192: P2RO(A,0,rows[i],&rrank,&ridx);
193: RO2E(A,0,rrank,ridx,&erow);
194: for (j=0; j<nc; j++) {
195: if (cols[j] < 0) continue;
196: P2RO(A,1,cols[j],&crank,&cidx);
197: RO2E(A,1,crank,cidx,&ecol);
198: if ( !a->emat->IsLocal(erow,ecol) ) { /*off-proc entry*/
199: /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
200: a->emat->QueueUpdate( erow, ecol, vals[i*nc+j] );
201: }
202: }
203: }
204: } else { /* columnoriented */
205: for (j=0; j<nc; j++) {
206: if (cols[j] < 0) continue;
207: P2RO(A,1,cols[j],&crank,&cidx);
208: RO2E(A,1,crank,cidx,&ecol);
209: if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
210: for (i=0; i<nr; i++) {
211: if (rows[i] < 0) continue;
212: P2RO(A,0,rows[i],&rrank,&ridx);
213: RO2E(A,0,rrank,ridx,&erow);
214: if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
215: if (!a->emat->IsLocal(erow,ecol) ){ /* off-proc entry */
216: /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
217: if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
218: ++numQueues;
219: continue;
220: }
221: /* printf("Locally updating (%d,%d)\n",erow,ecol); */
222: switch (imode) {
223: case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
224: case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
225: default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
226: }
227: }
228: }
230: /* printf("numQueues=%d\n",numQueues); */
231: a->emat->Reserve( numQueues );
232: for (j=0; j<nc; j++) {
233: if (cols[j] < 0) continue;
234: P2RO(A,1,cols[j],&crank,&cidx);
235: RO2E(A,1,crank,cidx,&ecol);
237: for (i=0; i<nr; i++) {
238: if (rows[i] < 0) continue;
239: P2RO(A,0,rows[i],&rrank,&ridx);
240: RO2E(A,0,rrank,ridx,&erow);
241: if ( !a->emat->IsLocal(erow,ecol) ) { /*off-proc entry*/
242: /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
243: a->emat->QueueUpdate( erow, ecol, vals[i+j*nr] );
244: }
245: }
246: }
247: }
248: return(0);
249: }
253: static PetscErrorCode MatMult_Elemental(Mat A,Vec X,Vec Y)
254: {
255: Mat_Elemental *a = (Mat_Elemental*)A->data;
256: PetscErrorCode ierr;
257: const PetscElemScalar *x;
258: PetscElemScalar *y;
259: PetscElemScalar one = 1,zero = 0;
262: VecGetArrayRead(X,(const PetscScalar **)&x);
263: VecGetArray(Y,(PetscScalar **)&y);
264: { /* Scoping so that constructor is called before pointer is returned */
265: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
266: xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
267: ye.Attach(A->rmap->N,1,*a->grid,0,0,y,A->rmap->n);
268: El::Gemv(El::NORMAL,one,*a->emat,xe,zero,ye);
269: }
270: VecRestoreArrayRead(X,(const PetscScalar **)&x);
271: VecRestoreArray(Y,(PetscScalar **)&y);
272: return(0);
273: }
277: static PetscErrorCode MatMultTranspose_Elemental(Mat A,Vec X,Vec Y)
278: {
279: Mat_Elemental *a = (Mat_Elemental*)A->data;
280: PetscErrorCode ierr;
281: const PetscElemScalar *x;
282: PetscElemScalar *y;
283: PetscElemScalar one = 1,zero = 0;
286: VecGetArrayRead(X,(const PetscScalar **)&x);
287: VecGetArray(Y,(PetscScalar **)&y);
288: { /* Scoping so that constructor is called before pointer is returned */
289: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
290: xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
291: ye.Attach(A->cmap->N,1,*a->grid,0,0,y,A->cmap->n);
292: El::Gemv(El::TRANSPOSE,one,*a->emat,xe,zero,ye);
293: }
294: VecRestoreArrayRead(X,(const PetscScalar **)&x);
295: VecRestoreArray(Y,(PetscScalar **)&y);
296: return(0);
297: }
301: static PetscErrorCode MatMultAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
302: {
303: Mat_Elemental *a = (Mat_Elemental*)A->data;
304: PetscErrorCode ierr;
305: const PetscElemScalar *x;
306: PetscElemScalar *z;
307: PetscElemScalar one = 1;
310: if (Y != Z) {VecCopy(Y,Z);}
311: VecGetArrayRead(X,(const PetscScalar **)&x);
312: VecGetArray(Z,(PetscScalar **)&z);
313: { /* Scoping so that constructor is called before pointer is returned */
314: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
315: xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
316: ze.Attach(A->rmap->N,1,*a->grid,0,0,z,A->rmap->n);
317: El::Gemv(El::NORMAL,one,*a->emat,xe,one,ze);
318: }
319: VecRestoreArrayRead(X,(const PetscScalar **)&x);
320: VecRestoreArray(Z,(PetscScalar **)&z);
321: return(0);
322: }
326: static PetscErrorCode MatMultTransposeAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
327: {
328: Mat_Elemental *a = (Mat_Elemental*)A->data;
329: PetscErrorCode ierr;
330: const PetscElemScalar *x;
331: PetscElemScalar *z;
332: PetscElemScalar one = 1;
335: if (Y != Z) {VecCopy(Y,Z);}
336: VecGetArrayRead(X,(const PetscScalar **)&x);
337: VecGetArray(Z,(PetscScalar **)&z);
338: { /* Scoping so that constructor is called before pointer is returned */
339: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
340: xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
341: ze.Attach(A->cmap->N,1,*a->grid,0,0,z,A->cmap->n);
342: El::Gemv(El::TRANSPOSE,one,*a->emat,xe,one,ze);
343: }
344: VecRestoreArrayRead(X,(const PetscScalar **)&x);
345: VecRestoreArray(Z,(PetscScalar **)&z);
346: return(0);
347: }
351: static PetscErrorCode MatMatMultNumeric_Elemental(Mat A,Mat B,Mat C)
352: {
353: Mat_Elemental *a = (Mat_Elemental*)A->data;
354: Mat_Elemental *b = (Mat_Elemental*)B->data;
355: Mat_Elemental *c = (Mat_Elemental*)C->data;
356: PetscElemScalar one = 1,zero = 0;
359: { /* Scoping so that constructor is called before pointer is returned */
360: El::Gemm(El::NORMAL,El::NORMAL,one,*a->emat,*b->emat,zero,*c->emat);
361: }
362: C->assembled = PETSC_TRUE;
363: return(0);
364: }
368: static PetscErrorCode MatMatMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat *C)
369: {
371: Mat Ce;
372: MPI_Comm comm;
375: PetscObjectGetComm((PetscObject)A,&comm);
376: MatCreate(comm,&Ce);
377: MatSetSizes(Ce,A->rmap->n,B->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
378: MatSetType(Ce,MATELEMENTAL);
379: MatSetUp(Ce);
380: *C = Ce;
381: return(0);
382: }
386: static PetscErrorCode MatMatMult_Elemental(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
387: {
391: if (scall == MAT_INITIAL_MATRIX){
392: PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);
393: MatMatMultSymbolic_Elemental(A,B,1.0,C);
394: PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);
395: }
396: PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);
397: MatMatMultNumeric_Elemental(A,B,*C);
398: PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);
399: return(0);
400: }
404: static PetscErrorCode MatMatTransposeMultNumeric_Elemental(Mat A,Mat B,Mat C)
405: {
406: Mat_Elemental *a = (Mat_Elemental*)A->data;
407: Mat_Elemental *b = (Mat_Elemental*)B->data;
408: Mat_Elemental *c = (Mat_Elemental*)C->data;
409: PetscElemScalar one = 1,zero = 0;
412: { /* Scoping so that constructor is called before pointer is returned */
413: El::Gemm(El::NORMAL,El::TRANSPOSE,one,*a->emat,*b->emat,zero,*c->emat);
414: }
415: C->assembled = PETSC_TRUE;
416: return(0);
417: }
421: static PetscErrorCode MatMatTransposeMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat *C)
422: {
424: Mat Ce;
425: MPI_Comm comm;
428: PetscObjectGetComm((PetscObject)A,&comm);
429: MatCreate(comm,&Ce);
430: MatSetSizes(Ce,A->rmap->n,B->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
431: MatSetType(Ce,MATELEMENTAL);
432: MatSetUp(Ce);
433: *C = Ce;
434: return(0);
435: }
439: static PetscErrorCode MatMatTransposeMult_Elemental(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
440: {
444: if (scall == MAT_INITIAL_MATRIX){
445: PetscLogEventBegin(MAT_MatTransposeMultSymbolic,A,B,0,0);
446: MatMatMultSymbolic_Elemental(A,B,1.0,C);
447: PetscLogEventEnd(MAT_MatTransposeMultSymbolic,A,B,0,0);
448: }
449: PetscLogEventBegin(MAT_MatTransposeMultNumeric,A,B,0,0);
450: MatMatTransposeMultNumeric_Elemental(A,B,*C);
451: PetscLogEventEnd(MAT_MatTransposeMultNumeric,A,B,0,0);
452: return(0);
453: }
457: static PetscErrorCode MatGetDiagonal_Elemental(Mat A,Vec D)
458: {
459: PetscInt i,nrows,ncols,nD,rrank,ridx,crank,cidx;
460: Mat_Elemental *a = (Mat_Elemental*)A->data;
461: PetscErrorCode ierr;
462: PetscElemScalar v;
463: MPI_Comm comm;
466: PetscObjectGetComm((PetscObject)A,&comm);
467: MatGetSize(A,&nrows,&ncols);
468: nD = nrows>ncols ? ncols : nrows;
469: for (i=0; i<nD; i++) {
470: PetscInt erow,ecol;
471: P2RO(A,0,i,&rrank,&ridx);
472: RO2E(A,0,rrank,ridx,&erow);
473: if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
474: P2RO(A,1,i,&crank,&cidx);
475: RO2E(A,1,crank,cidx,&ecol);
476: if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
477: v = a->emat->Get(erow,ecol);
478: VecSetValues(D,1,&i,(PetscScalar*)&v,INSERT_VALUES);
479: }
480: VecAssemblyBegin(D);
481: VecAssemblyEnd(D);
482: return(0);
483: }
487: static PetscErrorCode MatDiagonalScale_Elemental(Mat X,Vec L,Vec R)
488: {
489: Mat_Elemental *x = (Mat_Elemental*)X->data;
490: const PetscElemScalar *d;
491: PetscErrorCode ierr;
494: if (R) {
495: VecGetArrayRead(R,(const PetscScalar **)&d);
496: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
497: de.LockedAttach(X->cmap->N,1,*x->grid,0,0,d,X->cmap->n);
498: El::DiagonalScale(El::RIGHT,El::NORMAL,de,*x->emat);
499: VecRestoreArrayRead(R,(const PetscScalar **)&d);
500: }
501: if (L) {
502: VecGetArrayRead(L,(const PetscScalar **)&d);
503: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
504: de.LockedAttach(X->rmap->N,1,*x->grid,0,0,d,X->rmap->n);
505: El::DiagonalScale(El::LEFT,El::NORMAL,de,*x->emat);
506: VecRestoreArrayRead(L,(const PetscScalar **)&d);
507: }
508: return(0);
509: }
513: static PetscErrorCode MatScale_Elemental(Mat X,PetscScalar a)
514: {
515: Mat_Elemental *x = (Mat_Elemental*)X->data;
518: El::Scale((PetscElemScalar)a,*x->emat);
519: return(0);
520: }
522: /*
523: MatAXPY - Computes Y = a*X + Y.
524: */
527: static PetscErrorCode MatAXPY_Elemental(Mat Y,PetscScalar a,Mat X,MatStructure str)
528: {
529: Mat_Elemental *x = (Mat_Elemental*)X->data;
530: Mat_Elemental *y = (Mat_Elemental*)Y->data;
534: El::Axpy((PetscElemScalar)a,*x->emat,*y->emat);
535: PetscObjectStateIncrease((PetscObject)Y);
536: return(0);
537: }
541: static PetscErrorCode MatCopy_Elemental(Mat A,Mat B,MatStructure str)
542: {
543: Mat_Elemental *a=(Mat_Elemental*)A->data;
544: Mat_Elemental *b=(Mat_Elemental*)B->data;
547: El::Copy(*a->emat,*b->emat);
548: return(0);
549: }
553: static PetscErrorCode MatDuplicate_Elemental(Mat A,MatDuplicateOption op,Mat *B)
554: {
555: Mat Be;
556: MPI_Comm comm;
557: Mat_Elemental *a=(Mat_Elemental*)A->data;
561: PetscObjectGetComm((PetscObject)A,&comm);
562: MatCreate(comm,&Be);
563: MatSetSizes(Be,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
564: MatSetType(Be,MATELEMENTAL);
565: MatSetUp(Be);
566: *B = Be;
567: if (op == MAT_COPY_VALUES) {
568: Mat_Elemental *b=(Mat_Elemental*)Be->data;
569: El::Copy(*a->emat,*b->emat);
570: }
571: Be->assembled = PETSC_TRUE;
572: return(0);
573: }
577: static PetscErrorCode MatTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
578: {
579: Mat Be = *B;
581: MPI_Comm comm;
582: Mat_Elemental *a = (Mat_Elemental*)A->data, *b;
585: PetscObjectGetComm((PetscObject)A,&comm);
586: /* Only out-of-place supported */
587: if (reuse == MAT_INITIAL_MATRIX){
588: MatCreate(comm,&Be);
589: MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
590: MatSetType(Be,MATELEMENTAL);
591: MatSetUp(Be);
592: *B = Be;
593: }
594: b = (Mat_Elemental*)Be->data;
595: El::Transpose(*a->emat,*b->emat);
596: Be->assembled = PETSC_TRUE;
597: return(0);
598: }
602: static PetscErrorCode MatConjugate_Elemental(Mat A)
603: {
604: Mat_Elemental *a = (Mat_Elemental*)A->data;
607: El::Conjugate(*a->emat);
608: return(0);
609: }
613: static PetscErrorCode MatHermitianTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
614: {
615: Mat Be = *B;
617: MPI_Comm comm;
618: Mat_Elemental *a = (Mat_Elemental*)A->data, *b;
621: PetscObjectGetComm((PetscObject)A,&comm);
622: /* Only out-of-place supported */
623: if (reuse == MAT_INITIAL_MATRIX){
624: MatCreate(comm,&Be);
625: MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
626: MatSetType(Be,MATELEMENTAL);
627: MatSetUp(Be);
628: *B = Be;
629: }
630: b = (Mat_Elemental*)Be->data;
631: El::Adjoint(*a->emat,*b->emat);
632: Be->assembled = PETSC_TRUE;
633: return(0);
634: }
638: static PetscErrorCode MatSolve_Elemental(Mat A,Vec B,Vec X)
639: {
640: Mat_Elemental *a = (Mat_Elemental*)A->data;
641: PetscErrorCode ierr;
642: PetscElemScalar *x;
645: VecCopy(B,X);
646: VecGetArray(X,(PetscScalar **)&x);
647: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe;
648: xe.Attach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
649: El::DistMatrix<PetscElemScalar,El::MC,El::MR> xer(xe);
650: switch (A->factortype) {
651: case MAT_FACTOR_LU:
652: if ((*a->pivot).AllocatedMemory()) {
653: El::lu::SolveAfter(El::NORMAL,*a->emat,*a->pivot,xer);
654: El::Copy(xer,xe);
655: } else {
656: El::lu::SolveAfter(El::NORMAL,*a->emat,xer);
657: El::Copy(xer,xe);
658: }
659: break;
660: case MAT_FACTOR_CHOLESKY:
661: El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,xer);
662: El::Copy(xer,xe);
663: break;
664: default:
665: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
666: break;
667: }
668: VecRestoreArray(X,(PetscScalar **)&x);
669: return(0);
670: }
674: static PetscErrorCode MatSolveAdd_Elemental(Mat A,Vec B,Vec Y,Vec X)
675: {
676: PetscErrorCode ierr;
679: MatSolve_Elemental(A,B,X);
680: VecAXPY(X,1,Y);
681: return(0);
682: }
686: static PetscErrorCode MatMatSolve_Elemental(Mat A,Mat B,Mat X)
687: {
688: Mat_Elemental *a=(Mat_Elemental*)A->data;
689: Mat_Elemental *b=(Mat_Elemental*)B->data;
690: Mat_Elemental *x=(Mat_Elemental*)X->data;
693: El::Copy(*b->emat,*x->emat);
694: switch (A->factortype) {
695: case MAT_FACTOR_LU:
696: if ((*a->pivot).AllocatedMemory()) {
697: El::lu::SolveAfter(El::NORMAL,*a->emat,*a->pivot,*x->emat);
698: } else {
699: El::lu::SolveAfter(El::NORMAL,*a->emat,*x->emat);
700: }
701: break;
702: case MAT_FACTOR_CHOLESKY:
703: El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,*x->emat);
704: break;
705: default:
706: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
707: break;
708: }
709: return(0);
710: }
714: static PetscErrorCode MatLUFactor_Elemental(Mat A,IS row,IS col,const MatFactorInfo *info)
715: {
716: Mat_Elemental *a = (Mat_Elemental*)A->data;
720: if (info->dtcol){
721: El::LU(*a->emat,*a->pivot);
722: } else {
723: El::LU(*a->emat);
724: }
725: A->factortype = MAT_FACTOR_LU;
726: A->assembled = PETSC_TRUE;
728: PetscFree(A->solvertype);
729: PetscStrallocpy(MATSOLVERELEMENTAL,&A->solvertype);
730: return(0);
731: }
735: static PetscErrorCode MatLUFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
736: {
740: MatCopy(A,F,SAME_NONZERO_PATTERN);
741: MatLUFactor_Elemental(F,0,0,info);
742: return(0);
743: }
747: static PetscErrorCode MatLUFactorSymbolic_Elemental(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
748: {
750: /* F is create and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
751: return(0);
752: }
756: static PetscErrorCode MatCholeskyFactor_Elemental(Mat A,IS perm,const MatFactorInfo *info)
757: {
758: Mat_Elemental *a = (Mat_Elemental*)A->data;
759: El::DistMatrix<PetscElemScalar,El::MC,El::STAR> d;
763: El::Cholesky(El::UPPER,*a->emat);
764: A->factortype = MAT_FACTOR_CHOLESKY;
765: A->assembled = PETSC_TRUE;
767: PetscFree(A->solvertype);
768: PetscStrallocpy(MATSOLVERELEMENTAL,&A->solvertype);
769: return(0);
770: }
774: static PetscErrorCode MatCholeskyFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
775: {
779: MatCopy(A,F,SAME_NONZERO_PATTERN);
780: MatCholeskyFactor_Elemental(F,0,info);
781: return(0);
782: }
786: static PetscErrorCode MatCholeskyFactorSymbolic_Elemental(Mat F,Mat A,IS perm,const MatFactorInfo *info)
787: {
789: /* F is create and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
790: return(0);
791: }
795: PetscErrorCode MatFactorGetSolverPackage_elemental_elemental(Mat A,const MatSolverPackage *type)
796: {
798: *type = MATSOLVERELEMENTAL;
799: return(0);
800: }
804: static PetscErrorCode MatGetFactor_elemental_elemental(Mat A,MatFactorType ftype,Mat *F)
805: {
806: Mat B;
810: /* Create the factorization matrix */
811: MatCreate(PetscObjectComm((PetscObject)A),&B);
812: MatSetSizes(B,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
813: MatSetType(B,MATELEMENTAL);
814: MatSetUp(B);
815: B->factortype = ftype;
816: PetscFree(B->solvertype);
817: PetscStrallocpy(MATSOLVERELEMENTAL,&B->solvertype);
819: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_elemental_elemental);
820: *F = B;
821: return(0);
822: }
826: PETSC_EXTERN PetscErrorCode MatSolverPackageRegister_Elemental(void)
827: {
831: MatSolverPackageRegister(MATSOLVERELEMENTAL,MATELEMENTAL, MAT_FACTOR_LU,MatGetFactor_elemental_elemental);
832: MatSolverPackageRegister(MATSOLVERELEMENTAL,MATELEMENTAL, MAT_FACTOR_CHOLESKY,MatGetFactor_elemental_elemental);
833: return(0);
834: }
838: static PetscErrorCode MatNorm_Elemental(Mat A,NormType type,PetscReal *nrm)
839: {
840: Mat_Elemental *a=(Mat_Elemental*)A->data;
843: switch (type){
844: case NORM_1:
845: *nrm = El::OneNorm(*a->emat);
846: break;
847: case NORM_FROBENIUS:
848: *nrm = El::FrobeniusNorm(*a->emat);
849: break;
850: case NORM_INFINITY:
851: *nrm = El::InfinityNorm(*a->emat);
852: break;
853: default:
854: printf("Error: unsupported norm type!\n");
855: }
856: return(0);
857: }
861: static PetscErrorCode MatZeroEntries_Elemental(Mat A)
862: {
863: Mat_Elemental *a=(Mat_Elemental*)A->data;
866: El::Zero(*a->emat);
867: return(0);
868: }
872: static PetscErrorCode MatGetOwnershipIS_Elemental(Mat A,IS *rows,IS *cols)
873: {
874: Mat_Elemental *a = (Mat_Elemental*)A->data;
876: PetscInt i,m,shift,stride,*idx;
879: if (rows) {
880: m = a->emat->LocalHeight();
881: shift = a->emat->ColShift();
882: stride = a->emat->ColStride();
883: PetscMalloc1(m,&idx);
884: for (i=0; i<m; i++) {
885: PetscInt rank,offset;
886: E2RO(A,0,shift+i*stride,&rank,&offset);
887: RO2P(A,0,rank,offset,&idx[i]);
888: }
889: ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,rows);
890: }
891: if (cols) {
892: m = a->emat->LocalWidth();
893: shift = a->emat->RowShift();
894: stride = a->emat->RowStride();
895: PetscMalloc1(m,&idx);
896: for (i=0; i<m; i++) {
897: PetscInt rank,offset;
898: E2RO(A,1,shift+i*stride,&rank,&offset);
899: RO2P(A,1,rank,offset,&idx[i]);
900: }
901: ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,cols);
902: }
903: return(0);
904: }
908: static PetscErrorCode MatConvert_Elemental_Dense(Mat A,MatType newtype,MatReuse reuse,Mat *B)
909: {
910: Mat Bmpi;
911: Mat_Elemental *a = (Mat_Elemental*)A->data;
912: MPI_Comm comm;
913: PetscErrorCode ierr;
914: IS isrows,iscols;
915: PetscInt rrank,ridx,crank,cidx,nrows,ncols,i,j,erow,ecol,elrow,elcol;
916: const PetscInt *rows,*cols;
917: PetscElemScalar v;
918: const El::Grid &grid = a->emat->Grid();
921: PetscObjectGetComm((PetscObject)A,&comm);
922:
923: if (reuse == MAT_REUSE_MATRIX) {
924: Bmpi = *B;
925: } else {
926: MatCreate(comm,&Bmpi);
927: MatSetSizes(Bmpi,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
928: MatSetType(Bmpi,MATDENSE);
929: MatSetUp(Bmpi);
930: }
932: /* Get local entries of A */
933: MatGetOwnershipIS(A,&isrows,&iscols);
934: ISGetLocalSize(isrows,&nrows);
935: ISGetIndices(isrows,&rows);
936: ISGetLocalSize(iscols,&ncols);
937: ISGetIndices(iscols,&cols);
939: if (a->roworiented) {
940: for (i=0; i<nrows; i++) {
941: P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
942: RO2E(A,0,rrank,ridx,&erow);
943: if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
944: for (j=0; j<ncols; j++) {
945: P2RO(A,1,cols[j],&crank,&cidx);
946: RO2E(A,1,crank,cidx,&ecol);
947: if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
949: elrow = erow / grid.MCSize(); /* Elemental local row index */
950: elcol = ecol / grid.MRSize(); /* Elemental local column index */
951: v = a->emat->GetLocal(elrow,elcol);
952: MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);
953: }
954: }
955: } else { /* column-oriented */
956: for (j=0; j<ncols; j++) {
957: P2RO(A,1,cols[j],&crank,&cidx);
958: RO2E(A,1,crank,cidx,&ecol);
959: if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
960: for (i=0; i<nrows; i++) {
961: P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
962: RO2E(A,0,rrank,ridx,&erow);
963: if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
965: elrow = erow / grid.MCSize(); /* Elemental local row index */
966: elcol = ecol / grid.MRSize(); /* Elemental local column index */
967: v = a->emat->GetLocal(elrow,elcol);
968: MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);
969: }
970: }
971: }
972: MatAssemblyBegin(Bmpi,MAT_FINAL_ASSEMBLY);
973: MatAssemblyEnd(Bmpi,MAT_FINAL_ASSEMBLY);
974: if (reuse == MAT_INPLACE_MATRIX) {
975: MatHeaderReplace(A,&Bmpi);
976: } else {
977: *B = Bmpi;
978: }
979: ISDestroy(&isrows);
980: ISDestroy(&iscols);
981: return(0);
982: }
986: PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
987: {
988: Mat mat_elemental;
989: PetscErrorCode ierr;
990: PetscInt M=A->rmap->N,N=A->cmap->N,row,ncols;
991: const PetscInt *cols;
992: const PetscScalar *vals;
995: MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
996: MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
997: MatSetType(mat_elemental,MATELEMENTAL);
998: MatSetUp(mat_elemental);
999: for (row=0; row<M; row++) {
1000: MatGetRow(A,row,&ncols,&cols,&vals);
1001: /* PETSc-Elemental interaface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1002: MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
1003: MatRestoreRow(A,row,&ncols,&cols,&vals);
1004: }
1005: MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
1006: MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);
1008: if (reuse == MAT_INPLACE_MATRIX) {
1009: MatHeaderReplace(A,&mat_elemental);
1010: } else {
1011: *newmat = mat_elemental;
1012: }
1013: return(0);
1014: }
1018: PETSC_INTERN PetscErrorCode MatConvert_MPIAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
1019: {
1020: Mat mat_elemental;
1021: PetscErrorCode ierr;
1022: PetscInt row,ncols,rstart=A->rmap->rstart,rend=A->rmap->rend,j;
1023: const PetscInt *cols;
1024: const PetscScalar *vals;
1027: MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
1028: MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,A->rmap->N,A->cmap->N);
1029: MatSetType(mat_elemental,MATELEMENTAL);
1030: MatSetUp(mat_elemental);
1031: for (row=rstart; row<rend; row++) {
1032: MatGetRow(A,row,&ncols,&cols,&vals);
1033: for (j=0; j<ncols; j++) {
1034: /* PETSc-Elemental interaface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1035: MatSetValues(mat_elemental,1,&row,1,&cols[j],&vals[j],ADD_VALUES);
1036: }
1037: MatRestoreRow(A,row,&ncols,&cols,&vals);
1038: }
1039: MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
1040: MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);
1042: if (reuse == MAT_INPLACE_MATRIX) {
1043: MatHeaderReplace(A,&mat_elemental);
1044: } else {
1045: *newmat = mat_elemental;
1046: }
1047: return(0);
1048: }
1052: PETSC_INTERN PetscErrorCode MatConvert_SeqSBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
1053: {
1054: Mat mat_elemental;
1055: PetscErrorCode ierr;
1056: PetscInt M=A->rmap->N,N=A->cmap->N,row,ncols,j;
1057: const PetscInt *cols;
1058: const PetscScalar *vals;
1061: MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
1062: MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
1063: MatSetType(mat_elemental,MATELEMENTAL);
1064: MatSetUp(mat_elemental);
1065: MatGetRowUpperTriangular(A);
1066: for (row=0; row<M; row++) {
1067: MatGetRow(A,row,&ncols,&cols,&vals);
1068: /* PETSc-Elemental interaface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1069: MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
1070: for (j=0; j<ncols; j++) { /* lower triangular part */
1071: if (cols[j] == row) continue;
1072: MatSetValues(mat_elemental,1,&cols[j],1,&row,&vals[j],ADD_VALUES);
1073: }
1074: MatRestoreRow(A,row,&ncols,&cols,&vals);
1075: }
1076: MatRestoreRowUpperTriangular(A);
1077: MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
1078: MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);
1080: if (reuse == MAT_INPLACE_MATRIX) {
1081: MatHeaderReplace(A,&mat_elemental);
1082: } else {
1083: *newmat = mat_elemental;
1084: }
1085: return(0);
1086: }
1090: PETSC_INTERN PetscErrorCode MatConvert_MPISBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
1091: {
1092: Mat mat_elemental;
1093: PetscErrorCode ierr;
1094: PetscInt M=A->rmap->N,N=A->cmap->N,row,ncols,j,rstart=A->rmap->rstart,rend=A->rmap->rend;
1095: const PetscInt *cols;
1096: const PetscScalar *vals;
1099: MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
1100: MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
1101: MatSetType(mat_elemental,MATELEMENTAL);
1102: MatSetUp(mat_elemental);
1103: MatGetRowUpperTriangular(A);
1104: for (row=rstart; row<rend; row++) {
1105: MatGetRow(A,row,&ncols,&cols,&vals);
1106: /* PETSc-Elemental interaface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1107: MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
1108: for (j=0; j<ncols; j++) { /* lower triangular part */
1109: if (cols[j] == row) continue;
1110: MatSetValues(mat_elemental,1,&cols[j],1,&row,&vals[j],ADD_VALUES);
1111: }
1112: MatRestoreRow(A,row,&ncols,&cols,&vals);
1113: }
1114: MatRestoreRowUpperTriangular(A);
1115: MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
1116: MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);
1118: if (reuse == MAT_INPLACE_MATRIX) {
1119: MatHeaderReplace(A,&mat_elemental);
1120: } else {
1121: *newmat = mat_elemental;
1122: }
1123: return(0);
1124: }
1128: static PetscErrorCode MatDestroy_Elemental(Mat A)
1129: {
1130: Mat_Elemental *a = (Mat_Elemental*)A->data;
1131: PetscErrorCode ierr;
1132: Mat_Elemental_Grid *commgrid;
1133: PetscBool flg;
1134: MPI_Comm icomm;
1137: delete a->emat;
1138: delete a->pivot;
1140: El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
1141: PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);
1142: MPI_Attr_get(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);
1143: if (--commgrid->grid_refct == 0) {
1144: delete commgrid->grid;
1145: PetscFree(commgrid);
1146: MPI_Keyval_free(&Petsc_Elemental_keyval);
1147: }
1148: PetscCommDestroy(&icomm);
1149: PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",NULL);
1150: PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverPackage_C",NULL);
1151: PetscObjectComposeFunction((PetscObject)A,"MatElementalHermitianGenDefEig_C",NULL);
1152: PetscFree(A->data);
1153: return(0);
1154: }
1158: PetscErrorCode MatSetUp_Elemental(Mat A)
1159: {
1160: Mat_Elemental *a = (Mat_Elemental*)A->data;
1162: PetscMPIInt rsize,csize;
1165: PetscLayoutSetUp(A->rmap);
1166: PetscLayoutSetUp(A->cmap);
1168: a->emat->Resize(A->rmap->N,A->cmap->N);
1169: El::Zero(*a->emat);
1171: MPI_Comm_size(A->rmap->comm,&rsize);
1172: MPI_Comm_size(A->cmap->comm,&csize);
1173: if (csize != rsize) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Cannot use row and column communicators of different sizes");
1174: a->commsize = rsize;
1175: a->mr[0] = A->rmap->N % rsize; if (!a->mr[0]) a->mr[0] = rsize;
1176: a->mr[1] = A->cmap->N % csize; if (!a->mr[1]) a->mr[1] = csize;
1177: a->m[0] = A->rmap->N / rsize + (a->mr[0] != rsize);
1178: a->m[1] = A->cmap->N / csize + (a->mr[1] != csize);
1179: return(0);
1180: }
1184: PetscErrorCode MatAssemblyBegin_Elemental(Mat A, MatAssemblyType type)
1185: {
1186: Mat_Elemental *a = (Mat_Elemental*)A->data;
1189: /* printf("Calling ProcessQueues\n"); */
1190: a->emat->ProcessQueues();
1191: /* printf("Finished ProcessQueues\n"); */
1192: return(0);
1193: }
1197: PetscErrorCode MatAssemblyEnd_Elemental(Mat A, MatAssemblyType type)
1198: {
1200: /* Currently does nothing */
1201: return(0);
1202: }
1206: PetscErrorCode MatLoad_Elemental(Mat newMat, PetscViewer viewer)
1207: {
1209: Mat Adense,Ae;
1210: MPI_Comm comm;
1213: PetscObjectGetComm((PetscObject)newMat,&comm);
1214: MatCreate(comm,&Adense);
1215: MatSetType(Adense,MATDENSE);
1216: MatLoad(Adense,viewer);
1217: MatConvert(Adense, MATELEMENTAL, MAT_INITIAL_MATRIX,&Ae);
1218: MatDestroy(&Adense);
1219: MatHeaderReplace(newMat,&Ae);
1220: return(0);
1221: }
1225: PetscErrorCode MatElementalHermitianGenDefEig_Elemental(El::Pencil eigtype,El::UpperOrLower uplo,Mat A,Mat B,Mat *evals,Mat *evec,El::SortType sort,El::HermitianEigSubset<PetscElemScalar> subset,const El::HermitianEigCtrl<PetscElemScalar> ctrl)
1226: {
1228: Mat_Elemental *a=(Mat_Elemental*)A->data,*b=(Mat_Elemental*)B->data,*x;
1229: MPI_Comm comm;
1230: Mat EVAL;
1231: Mat_Elemental *e;
1232:
1234: /* Compute eigenvalues and eigenvectors */
1235: El::DistMatrix<PetscElemScalar,El::VR,El::STAR> w( *a->grid ); /* holding eigenvalues */
1236: El::DistMatrix<PetscElemScalar> X( *a->grid ); /* holding eigenvectors */
1237: El::HermitianGenDefEig(eigtype,uplo,*a->emat,*b->emat,w,X,sort,subset,ctrl);
1238: /* El::Print(w, "Eigenvalues"); */
1240: /* Wrap w and X into PETSc's MATMATELEMENTAL matrices */
1241: PetscObjectGetComm((PetscObject)A,&comm);
1242: MatCreate(comm,evec);
1243: MatSetSizes(*evec,PETSC_DECIDE,PETSC_DECIDE,X.Height(),X.Width());
1244: MatSetType(*evec,MATELEMENTAL);
1245: MatSetFromOptions(*evec);
1246: MatSetUp(*evec);
1247: MatAssemblyBegin(*evec,MAT_FINAL_ASSEMBLY);
1248: MatAssemblyEnd(*evec,MAT_FINAL_ASSEMBLY);
1250: x = (Mat_Elemental*)(*evec)->data;
1251: //delete x->emat; //-- memory leak???
1252: *x->emat = X;
1253:
1254: MatCreate(comm,&EVAL);
1255: MatSetSizes(EVAL,PETSC_DECIDE,PETSC_DECIDE,w.Height(),w.Width());
1256: MatSetType(EVAL,MATELEMENTAL);
1257: MatSetFromOptions(EVAL);
1258: MatSetUp(EVAL);
1259: MatAssemblyBegin(EVAL,MAT_FINAL_ASSEMBLY);
1260: MatAssemblyEnd(EVAL,MAT_FINAL_ASSEMBLY);
1261: e = (Mat_Elemental*)EVAL->data;
1262: *e->emat = w; //-- memory leak???
1263: *evals = EVAL;
1265: #if defined(MV)
1266: /* Test correctness norm = || - A*X + B*X*w || */
1267: {
1268: PetscElemScalar alpha,beta;
1269: El::DistMatrix<PetscElemScalar> Y(*a->grid); //tmp matrix
1270: alpha = 1.0; beta=0.0;
1271: El::Gemm(El::NORMAL,El::NORMAL,alpha,*b->emat,X,beta,Y); //Y = B*X
1272: El::DiagonalScale(El::RIGHT,El::NORMAL, w, Y); //Y = Y*w
1273: alpha = -1.0; beta=1.0;
1274: El::Gemm(El::NORMAL,El::NORMAL,alpha,*a->emat,X,beta,Y); //Y = - A*X + B*X*w
1276: PetscElemScalar norm = El::FrobeniusNorm(Y);
1277: if ((*a->grid).Rank()==0) printf(" norm (- A*X + B*X*w) = %g\n",norm);
1278: }
1280: {
1281: PetscMPIInt rank;
1282: MPI_Comm_rank(comm,&rank);
1283: printf("w: [%d] [%d, %d %d] %d; X: %d %d\n",rank,w.DistRank(),w.ColRank(),w.RowRank(),w.LocalHeight(),X.LocalHeight(),X.LocalWidth());
1284: }
1285: #endif
1286: return(0);
1287: }
1291: /*@
1292: MatElementalHermitianGenDefEig - Compute the set of eigenvalues of the Hermitian-definite matrix pencil determined by the subset structure
1294: Logically Collective on Mat
1296: Level: beginner
1298: References:
1299: . Elemental Users' Guide
1301: @*/
1302: PetscErrorCode MatElementalHermitianGenDefEig(El::Pencil type,El::UpperOrLower uplo,Mat A,Mat B,Mat *evals,Mat *evec,El::SortType sort,El::HermitianEigSubset<PetscElemScalar> subset,const El::HermitianEigCtrl<PetscElemScalar> ctrl)
1303: {
1307: PetscUseMethod(A,"MatElementalHermitianGenDefEig_C",(El::Pencil,El::UpperOrLower,Mat,Mat,Mat*,Mat*,El::SortType,El::HermitianEigSubset<PetscElemScalar>,const El::HermitianEigCtrl<PetscElemScalar>),(type,uplo,A,B,evals,evec,sort,subset,ctrl));
1308: return(0);
1309: }
1311: /* -------------------------------------------------------------------*/
1312: static struct _MatOps MatOps_Values = {
1313: MatSetValues_Elemental,
1314: 0,
1315: 0,
1316: MatMult_Elemental,
1317: /* 4*/ MatMultAdd_Elemental,
1318: MatMultTranspose_Elemental,
1319: MatMultTransposeAdd_Elemental,
1320: MatSolve_Elemental,
1321: MatSolveAdd_Elemental,
1322: 0,
1323: /*10*/ 0,
1324: MatLUFactor_Elemental,
1325: MatCholeskyFactor_Elemental,
1326: 0,
1327: MatTranspose_Elemental,
1328: /*15*/ MatGetInfo_Elemental,
1329: 0,
1330: MatGetDiagonal_Elemental,
1331: MatDiagonalScale_Elemental,
1332: MatNorm_Elemental,
1333: /*20*/ MatAssemblyBegin_Elemental,
1334: MatAssemblyEnd_Elemental,
1335: MatSetOption_Elemental,
1336: MatZeroEntries_Elemental,
1337: /*24*/ 0,
1338: MatLUFactorSymbolic_Elemental,
1339: MatLUFactorNumeric_Elemental,
1340: MatCholeskyFactorSymbolic_Elemental,
1341: MatCholeskyFactorNumeric_Elemental,
1342: /*29*/ MatSetUp_Elemental,
1343: 0,
1344: 0,
1345: 0,
1346: 0,
1347: /*34*/ MatDuplicate_Elemental,
1348: 0,
1349: 0,
1350: 0,
1351: 0,
1352: /*39*/ MatAXPY_Elemental,
1353: 0,
1354: 0,
1355: 0,
1356: MatCopy_Elemental,
1357: /*44*/ 0,
1358: MatScale_Elemental,
1359: MatShift_Basic,
1360: 0,
1361: 0,
1362: /*49*/ 0,
1363: 0,
1364: 0,
1365: 0,
1366: 0,
1367: /*54*/ 0,
1368: 0,
1369: 0,
1370: 0,
1371: 0,
1372: /*59*/ 0,
1373: MatDestroy_Elemental,
1374: MatView_Elemental,
1375: 0,
1376: 0,
1377: /*64*/ 0,
1378: 0,
1379: 0,
1380: 0,
1381: 0,
1382: /*69*/ 0,
1383: 0,
1384: MatConvert_Elemental_Dense,
1385: 0,
1386: 0,
1387: /*74*/ 0,
1388: 0,
1389: 0,
1390: 0,
1391: 0,
1392: /*79*/ 0,
1393: 0,
1394: 0,
1395: 0,
1396: MatLoad_Elemental,
1397: /*84*/ 0,
1398: 0,
1399: 0,
1400: 0,
1401: 0,
1402: /*89*/ MatMatMult_Elemental,
1403: MatMatMultSymbolic_Elemental,
1404: MatMatMultNumeric_Elemental,
1405: 0,
1406: 0,
1407: /*94*/ 0,
1408: MatMatTransposeMult_Elemental,
1409: MatMatTransposeMultSymbolic_Elemental,
1410: MatMatTransposeMultNumeric_Elemental,
1411: 0,
1412: /*99*/ 0,
1413: 0,
1414: 0,
1415: MatConjugate_Elemental,
1416: 0,
1417: /*104*/0,
1418: 0,
1419: 0,
1420: 0,
1421: 0,
1422: /*109*/MatMatSolve_Elemental,
1423: 0,
1424: 0,
1425: 0,
1426: 0,
1427: /*114*/0,
1428: 0,
1429: 0,
1430: 0,
1431: 0,
1432: /*119*/0,
1433: MatHermitianTranspose_Elemental,
1434: 0,
1435: 0,
1436: 0,
1437: /*124*/0,
1438: 0,
1439: 0,
1440: 0,
1441: 0,
1442: /*129*/0,
1443: 0,
1444: 0,
1445: 0,
1446: 0,
1447: /*134*/0,
1448: 0,
1449: 0,
1450: 0,
1451: 0
1452: };
1454: /*MC
1455: MATELEMENTAL = "elemental" - A matrix type for dense matrices using the Elemental package
1457: Use ./configure --download-elemental to install PETSc to use Elemental
1459: Use -pc_type lu -pc_factor_mat_solver_package elemental to us this direct solver
1461: Options Database Keys:
1462: + -mat_type elemental - sets the matrix type to "elemental" during a call to MatSetFromOptions()
1463: - -mat_elemental_grid_height - sets Grid Height for 2D cyclic ordering of internal matrix
1465: Level: beginner
1467: .seealso: MATDENSE
1468: M*/
1472: PETSC_EXTERN PetscErrorCode MatCreate_Elemental(Mat A)
1473: {
1474: Mat_Elemental *a;
1475: PetscErrorCode ierr;
1476: PetscBool flg,flg1;
1477: Mat_Elemental_Grid *commgrid;
1478: MPI_Comm icomm;
1479: PetscInt optv1;
1482: PetscElementalInitializePackage();
1483: PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));
1484: A->insertmode = NOT_SET_VALUES;
1486: PetscNewLog(A,&a);
1487: A->data = (void*)a;
1489: /* Set up the elemental matrix */
1490: El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
1492: /* Grid needs to be shared between multiple Mats on the same communicator, implement by attribute caching on the MPI_Comm */
1493: if (Petsc_Elemental_keyval == MPI_KEYVAL_INVALID) {
1494: MPI_Keyval_create(MPI_NULL_COPY_FN,MPI_NULL_DELETE_FN,&Petsc_Elemental_keyval,(void*)0);
1495: /* MPI_Comm_create_Keyval(MPI_NULL_COPY_FN,MPI_NULL_DELETE_FN,&Petsc_Elemental_keyval,(void*)0); -- new version? */
1496: }
1497: PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);
1498: MPI_Attr_get(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);
1499: if (!flg) {
1500: PetscNewLog(A,&commgrid);
1502: PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"Elemental Options","Mat");
1503: /* displayed default grid sizes (CommSize,1) are set by us arbitrarily until El::Grid() is called */
1504: PetscOptionsInt("-mat_elemental_grid_height","Grid Height","None",El::mpi::Size(cxxcomm),&optv1,&flg1);
1505: if (flg1) {
1506: if (El::mpi::Size(cxxcomm) % optv1 != 0) {
1507: SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Grid Height %D must evenly divide CommSize %D",optv1,(PetscInt)El::mpi::Size(cxxcomm));
1508: }
1509: commgrid->grid = new El::Grid(cxxcomm,optv1); /* use user-provided grid height */
1510: } else {
1511: commgrid->grid = new El::Grid(cxxcomm); /* use Elemental default grid sizes */
1512: /* printf("new commgrid->grid = %p\n",commgrid->grid); -- memory leak revealed by valgrind? */
1513: }
1514: commgrid->grid_refct = 1;
1515: MPI_Attr_put(icomm,Petsc_Elemental_keyval,(void*)commgrid);
1516: PetscOptionsEnd();
1517: } else {
1518: commgrid->grid_refct++;
1519: }
1520: PetscCommDestroy(&icomm);
1521: a->grid = commgrid->grid;
1522: a->emat = new El::DistMatrix<PetscElemScalar>(*a->grid);
1523: a->pivot = new El::DistMatrix<PetscInt,El::VC,El::STAR>(*a->grid);
1524: a->roworiented = PETSC_TRUE;
1526: PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",MatGetOwnershipIS_Elemental);
1527: PetscObjectComposeFunction((PetscObject)A,"MatElementalHermitianGenDefEig_C",MatElementalHermitianGenDefEig_Elemental);
1529: PetscObjectChangeTypeName((PetscObject)A,MATELEMENTAL);
1530: return(0);
1531: }