Actual source code: matelem.cxx
petsc-3.13.6 2020-09-29
1: #include <../src/mat/impls/elemental/matelemimpl.h>
3: /*
4: The variable Petsc_Elemental_keyval is used to indicate an MPI attribute that
5: is attached to a communicator, in this case the attribute is a Mat_Elemental_Grid
6: */
7: static PetscMPIInt Petsc_Elemental_keyval = MPI_KEYVAL_INVALID;
9: static PetscErrorCode MatView_Elemental(Mat A,PetscViewer viewer)
10: {
12: Mat_Elemental *a = (Mat_Elemental*)A->data;
13: PetscBool iascii;
16: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
17: if (iascii) {
18: PetscViewerFormat format;
19: PetscViewerGetFormat(viewer,&format);
20: if (format == PETSC_VIEWER_ASCII_INFO) {
21: /* call elemental viewing function */
22: PetscViewerASCIIPrintf(viewer,"Elemental run parameters:\n");
23: PetscViewerASCIIPrintf(viewer," allocated entries=%d\n",(*a->emat).AllocatedMemory());
24: PetscViewerASCIIPrintf(viewer," grid height=%d, grid width=%d\n",(*a->emat).Grid().Height(),(*a->emat).Grid().Width());
25: if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
26: /* call elemental viewing function */
27: PetscPrintf(PetscObjectComm((PetscObject)viewer),"test matview_elemental 2\n");
28: }
30: } else if (format == PETSC_VIEWER_DEFAULT) {
31: PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);
32: El::Print( *a->emat, "Elemental matrix (cyclic ordering)" );
33: PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);
34: if (A->factortype == MAT_FACTOR_NONE){
35: Mat Adense;
36: MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);
37: MatView(Adense,viewer);
38: MatDestroy(&Adense);
39: }
40: } else SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Format");
41: } else {
42: /* convert to dense format and call MatView() */
43: Mat Adense;
44: MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);
45: MatView(Adense,viewer);
46: MatDestroy(&Adense);
47: }
48: return(0);
49: }
51: static PetscErrorCode MatGetInfo_Elemental(Mat A,MatInfoType flag,MatInfo *info)
52: {
53: Mat_Elemental *a = (Mat_Elemental*)A->data;
56: info->block_size = 1.0;
58: if (flag == MAT_LOCAL) {
59: info->nz_allocated = (*a->emat).AllocatedMemory(); /* locally allocated */
60: info->nz_used = info->nz_allocated;
61: } else if (flag == MAT_GLOBAL_MAX) {
62: //MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));
63: /* see MatGetInfo_MPIAIJ() for getting global info->nz_allocated! */
64: //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_MAX not written yet");
65: } else if (flag == MAT_GLOBAL_SUM) {
66: //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_SUM not written yet");
67: info->nz_allocated = (*a->emat).AllocatedMemory(); /* locally allocated */
68: info->nz_used = info->nz_allocated; /* assume Elemental does accurate allocation */
69: //MPIU_Allreduce(isend,irecv,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));
70: //PetscPrintf(PETSC_COMM_SELF," ... [%d] locally allocated %g\n",rank,info->nz_allocated);
71: }
73: info->nz_unneeded = 0.0;
74: info->assemblies = A->num_ass;
75: info->mallocs = 0;
76: info->memory = ((PetscObject)A)->mem;
77: info->fill_ratio_given = 0; /* determined by Elemental */
78: info->fill_ratio_needed = 0;
79: info->factor_mallocs = 0;
80: return(0);
81: }
83: PetscErrorCode MatSetOption_Elemental(Mat A,MatOption op,PetscBool flg)
84: {
85: Mat_Elemental *a = (Mat_Elemental*)A->data;
88: switch (op) {
89: case MAT_NEW_NONZERO_LOCATIONS:
90: case MAT_NEW_NONZERO_LOCATION_ERR:
91: case MAT_NEW_NONZERO_ALLOCATION_ERR:
92: case MAT_SYMMETRIC:
93: case MAT_SORTED_FULL:
94: case MAT_HERMITIAN:
95: break;
96: case MAT_ROW_ORIENTED:
97: a->roworiented = flg;
98: break;
99: default:
100: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"unknown option %s",MatOptions[op]);
101: }
102: return(0);
103: }
105: static PetscErrorCode MatSetValues_Elemental(Mat A,PetscInt nr,const PetscInt *rows,PetscInt nc,const PetscInt *cols,const PetscScalar *vals,InsertMode imode)
106: {
107: Mat_Elemental *a = (Mat_Elemental*)A->data;
108: PetscInt i,j,rrank,ridx,crank,cidx,erow,ecol,numQueues=0;
111: // TODO: Initialize matrix to all zeros?
113: // Count the number of queues from this process
114: if (a->roworiented) {
115: for (i=0; i<nr; i++) {
116: if (rows[i] < 0) continue;
117: P2RO(A,0,rows[i],&rrank,&ridx);
118: RO2E(A,0,rrank,ridx,&erow);
119: if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
120: for (j=0; j<nc; j++) {
121: if (cols[j] < 0) continue;
122: P2RO(A,1,cols[j],&crank,&cidx);
123: RO2E(A,1,crank,cidx,&ecol);
124: if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
125: if (!a->emat->IsLocal(erow,ecol) ){ /* off-proc entry */
126: /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
127: if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
128: ++numQueues;
129: continue;
130: }
131: /* printf("Locally updating (%d,%d)\n",erow,ecol); */
132: switch (imode) {
133: case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
134: case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
135: default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
136: }
137: }
138: }
140: /* printf("numQueues=%d\n",numQueues); */
141: a->emat->Reserve( numQueues );
142: for (i=0; i<nr; i++) {
143: if (rows[i] < 0) continue;
144: P2RO(A,0,rows[i],&rrank,&ridx);
145: RO2E(A,0,rrank,ridx,&erow);
146: for (j=0; j<nc; j++) {
147: if (cols[j] < 0) continue;
148: P2RO(A,1,cols[j],&crank,&cidx);
149: RO2E(A,1,crank,cidx,&ecol);
150: if ( !a->emat->IsLocal(erow,ecol) ) { /*off-proc entry*/
151: /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
152: a->emat->QueueUpdate( erow, ecol, vals[i*nc+j] );
153: }
154: }
155: }
156: } else { /* columnoriented */
157: for (j=0; j<nc; j++) {
158: if (cols[j] < 0) continue;
159: P2RO(A,1,cols[j],&crank,&cidx);
160: RO2E(A,1,crank,cidx,&ecol);
161: if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
162: for (i=0; i<nr; i++) {
163: if (rows[i] < 0) continue;
164: P2RO(A,0,rows[i],&rrank,&ridx);
165: RO2E(A,0,rrank,ridx,&erow);
166: if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
167: if (!a->emat->IsLocal(erow,ecol) ){ /* off-proc entry */
168: /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
169: if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
170: ++numQueues;
171: continue;
172: }
173: /* printf("Locally updating (%d,%d)\n",erow,ecol); */
174: switch (imode) {
175: case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
176: case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
177: default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
178: }
179: }
180: }
182: /* printf("numQueues=%d\n",numQueues); */
183: a->emat->Reserve( numQueues );
184: for (j=0; j<nc; j++) {
185: if (cols[j] < 0) continue;
186: P2RO(A,1,cols[j],&crank,&cidx);
187: RO2E(A,1,crank,cidx,&ecol);
189: for (i=0; i<nr; i++) {
190: if (rows[i] < 0) continue;
191: P2RO(A,0,rows[i],&rrank,&ridx);
192: RO2E(A,0,rrank,ridx,&erow);
193: if ( !a->emat->IsLocal(erow,ecol) ) { /*off-proc entry*/
194: /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
195: a->emat->QueueUpdate( erow, ecol, vals[i+j*nr] );
196: }
197: }
198: }
199: }
200: return(0);
201: }
203: static PetscErrorCode MatMult_Elemental(Mat A,Vec X,Vec Y)
204: {
205: Mat_Elemental *a = (Mat_Elemental*)A->data;
206: PetscErrorCode ierr;
207: const PetscElemScalar *x;
208: PetscElemScalar *y;
209: PetscElemScalar one = 1,zero = 0;
212: VecGetArrayRead(X,(const PetscScalar **)&x);
213: VecGetArray(Y,(PetscScalar **)&y);
214: { /* Scoping so that constructor is called before pointer is returned */
215: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
216: xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
217: ye.Attach(A->rmap->N,1,*a->grid,0,0,y,A->rmap->n);
218: El::Gemv(El::NORMAL,one,*a->emat,xe,zero,ye);
219: }
220: VecRestoreArrayRead(X,(const PetscScalar **)&x);
221: VecRestoreArray(Y,(PetscScalar **)&y);
222: return(0);
223: }
225: static PetscErrorCode MatMultTranspose_Elemental(Mat A,Vec X,Vec Y)
226: {
227: Mat_Elemental *a = (Mat_Elemental*)A->data;
228: PetscErrorCode ierr;
229: const PetscElemScalar *x;
230: PetscElemScalar *y;
231: PetscElemScalar one = 1,zero = 0;
234: VecGetArrayRead(X,(const PetscScalar **)&x);
235: VecGetArray(Y,(PetscScalar **)&y);
236: { /* Scoping so that constructor is called before pointer is returned */
237: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
238: xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
239: ye.Attach(A->cmap->N,1,*a->grid,0,0,y,A->cmap->n);
240: El::Gemv(El::TRANSPOSE,one,*a->emat,xe,zero,ye);
241: }
242: VecRestoreArrayRead(X,(const PetscScalar **)&x);
243: VecRestoreArray(Y,(PetscScalar **)&y);
244: return(0);
245: }
247: static PetscErrorCode MatMultAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
248: {
249: Mat_Elemental *a = (Mat_Elemental*)A->data;
250: PetscErrorCode ierr;
251: const PetscElemScalar *x;
252: PetscElemScalar *z;
253: PetscElemScalar one = 1;
256: if (Y != Z) {VecCopy(Y,Z);}
257: VecGetArrayRead(X,(const PetscScalar **)&x);
258: VecGetArray(Z,(PetscScalar **)&z);
259: { /* Scoping so that constructor is called before pointer is returned */
260: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
261: xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
262: ze.Attach(A->rmap->N,1,*a->grid,0,0,z,A->rmap->n);
263: El::Gemv(El::NORMAL,one,*a->emat,xe,one,ze);
264: }
265: VecRestoreArrayRead(X,(const PetscScalar **)&x);
266: VecRestoreArray(Z,(PetscScalar **)&z);
267: return(0);
268: }
270: static PetscErrorCode MatMultTransposeAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
271: {
272: Mat_Elemental *a = (Mat_Elemental*)A->data;
273: PetscErrorCode ierr;
274: const PetscElemScalar *x;
275: PetscElemScalar *z;
276: PetscElemScalar one = 1;
279: if (Y != Z) {VecCopy(Y,Z);}
280: VecGetArrayRead(X,(const PetscScalar **)&x);
281: VecGetArray(Z,(PetscScalar **)&z);
282: { /* Scoping so that constructor is called before pointer is returned */
283: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
284: xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
285: ze.Attach(A->cmap->N,1,*a->grid,0,0,z,A->cmap->n);
286: El::Gemv(El::TRANSPOSE,one,*a->emat,xe,one,ze);
287: }
288: VecRestoreArrayRead(X,(const PetscScalar **)&x);
289: VecRestoreArray(Z,(PetscScalar **)&z);
290: return(0);
291: }
293: PetscErrorCode MatMatMultNumeric_Elemental(Mat A,Mat B,Mat C)
294: {
295: Mat_Elemental *a = (Mat_Elemental*)A->data;
296: Mat_Elemental *b = (Mat_Elemental*)B->data;
297: Mat_Elemental *c = (Mat_Elemental*)C->data;
298: PetscElemScalar one = 1,zero = 0;
301: { /* Scoping so that constructor is called before pointer is returned */
302: El::Gemm(El::NORMAL,El::NORMAL,one,*a->emat,*b->emat,zero,*c->emat);
303: }
304: C->assembled = PETSC_TRUE;
305: return(0);
306: }
308: PetscErrorCode MatMatMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat Ce)
309: {
313: MatSetSizes(Ce,A->rmap->n,B->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
314: MatSetType(Ce,MATELEMENTAL);
315: MatSetUp(Ce);
316: Ce->ops->matmultnumeric = MatMatMultNumeric_Elemental;
317: return(0);
318: }
320: static PetscErrorCode MatMatTransposeMultNumeric_Elemental(Mat A,Mat B,Mat C)
321: {
322: Mat_Elemental *a = (Mat_Elemental*)A->data;
323: Mat_Elemental *b = (Mat_Elemental*)B->data;
324: Mat_Elemental *c = (Mat_Elemental*)C->data;
325: PetscElemScalar one = 1,zero = 0;
328: { /* Scoping so that constructor is called before pointer is returned */
329: El::Gemm(El::NORMAL,El::TRANSPOSE,one,*a->emat,*b->emat,zero,*c->emat);
330: }
331: C->assembled = PETSC_TRUE;
332: return(0);
333: }
335: static PetscErrorCode MatMatTransposeMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat C)
336: {
340: MatSetSizes(C,A->rmap->n,B->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
341: MatSetType(C,MATELEMENTAL);
342: MatSetUp(C);
343: return(0);
344: }
346: /* --------------------------------------- */
347: static PetscErrorCode MatProductSetFromOptions_Elemental_AB(Mat C)
348: {
350: C->ops->matmultsymbolic = MatMatMultSymbolic_Elemental;
351: C->ops->productsymbolic = MatProductSymbolic_AB;
352: return(0);
353: }
355: static PetscErrorCode MatProductSetFromOptions_Elemental_ABt(Mat C)
356: {
358: C->ops->mattransposemultsymbolic = MatMatTransposeMultSymbolic_Elemental;
359: C->ops->productsymbolic = MatProductSymbolic_ABt;
360: return(0);
361: }
363: PETSC_INTERN PetscErrorCode MatProductSetFromOptions_Elemental(Mat C)
364: {
366: Mat_Product *product = C->product;
369: switch (product->type) {
370: case MATPRODUCT_AB:
371: MatProductSetFromOptions_Elemental_AB(C);
372: break;
373: case MATPRODUCT_ABt:
374: MatProductSetFromOptions_Elemental_ABt(C);
375: break;
376: default: SETERRQ1(PetscObjectComm((PetscObject)C),PETSC_ERR_SUP,"MatProduct type %s is not supported for Elemental and Elemental matrices",MatProductTypes[product->type]);
377: }
378: return(0);
379: }
380: /* --------------------------------------- */
382: static PetscErrorCode MatGetDiagonal_Elemental(Mat A,Vec D)
383: {
384: PetscInt i,nrows,ncols,nD,rrank,ridx,crank,cidx;
385: Mat_Elemental *a = (Mat_Elemental*)A->data;
386: PetscErrorCode ierr;
387: PetscElemScalar v;
388: MPI_Comm comm;
391: PetscObjectGetComm((PetscObject)A,&comm);
392: MatGetSize(A,&nrows,&ncols);
393: nD = nrows>ncols ? ncols : nrows;
394: for (i=0; i<nD; i++) {
395: PetscInt erow,ecol;
396: P2RO(A,0,i,&rrank,&ridx);
397: RO2E(A,0,rrank,ridx,&erow);
398: if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
399: P2RO(A,1,i,&crank,&cidx);
400: RO2E(A,1,crank,cidx,&ecol);
401: if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
402: v = a->emat->Get(erow,ecol);
403: VecSetValues(D,1,&i,(PetscScalar*)&v,INSERT_VALUES);
404: }
405: VecAssemblyBegin(D);
406: VecAssemblyEnd(D);
407: return(0);
408: }
410: static PetscErrorCode MatDiagonalScale_Elemental(Mat X,Vec L,Vec R)
411: {
412: Mat_Elemental *x = (Mat_Elemental*)X->data;
413: const PetscElemScalar *d;
414: PetscErrorCode ierr;
417: if (R) {
418: VecGetArrayRead(R,(const PetscScalar **)&d);
419: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
420: de.LockedAttach(X->cmap->N,1,*x->grid,0,0,d,X->cmap->n);
421: El::DiagonalScale(El::RIGHT,El::NORMAL,de,*x->emat);
422: VecRestoreArrayRead(R,(const PetscScalar **)&d);
423: }
424: if (L) {
425: VecGetArrayRead(L,(const PetscScalar **)&d);
426: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
427: de.LockedAttach(X->rmap->N,1,*x->grid,0,0,d,X->rmap->n);
428: El::DiagonalScale(El::LEFT,El::NORMAL,de,*x->emat);
429: VecRestoreArrayRead(L,(const PetscScalar **)&d);
430: }
431: return(0);
432: }
434: static PetscErrorCode MatMissingDiagonal_Elemental(Mat A,PetscBool *missing,PetscInt *d)
435: {
437: *missing = PETSC_FALSE;
438: return(0);
439: }
441: static PetscErrorCode MatScale_Elemental(Mat X,PetscScalar a)
442: {
443: Mat_Elemental *x = (Mat_Elemental*)X->data;
446: El::Scale((PetscElemScalar)a,*x->emat);
447: return(0);
448: }
450: /*
451: MatAXPY - Computes Y = a*X + Y.
452: */
453: static PetscErrorCode MatAXPY_Elemental(Mat Y,PetscScalar a,Mat X,MatStructure str)
454: {
455: Mat_Elemental *x = (Mat_Elemental*)X->data;
456: Mat_Elemental *y = (Mat_Elemental*)Y->data;
460: El::Axpy((PetscElemScalar)a,*x->emat,*y->emat);
461: PetscObjectStateIncrease((PetscObject)Y);
462: return(0);
463: }
465: static PetscErrorCode MatCopy_Elemental(Mat A,Mat B,MatStructure str)
466: {
467: Mat_Elemental *a=(Mat_Elemental*)A->data;
468: Mat_Elemental *b=(Mat_Elemental*)B->data;
472: El::Copy(*a->emat,*b->emat);
473: PetscObjectStateIncrease((PetscObject)B);
474: return(0);
475: }
477: static PetscErrorCode MatDuplicate_Elemental(Mat A,MatDuplicateOption op,Mat *B)
478: {
479: Mat Be;
480: MPI_Comm comm;
481: Mat_Elemental *a=(Mat_Elemental*)A->data;
485: PetscObjectGetComm((PetscObject)A,&comm);
486: MatCreate(comm,&Be);
487: MatSetSizes(Be,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
488: MatSetType(Be,MATELEMENTAL);
489: MatSetUp(Be);
490: *B = Be;
491: if (op == MAT_COPY_VALUES) {
492: Mat_Elemental *b=(Mat_Elemental*)Be->data;
493: El::Copy(*a->emat,*b->emat);
494: }
495: Be->assembled = PETSC_TRUE;
496: return(0);
497: }
499: static PetscErrorCode MatTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
500: {
501: Mat Be = *B;
503: MPI_Comm comm;
504: Mat_Elemental *a = (Mat_Elemental*)A->data, *b;
507: PetscObjectGetComm((PetscObject)A,&comm);
508: /* Only out-of-place supported */
509: if (reuse == MAT_INPLACE_MATRIX) SETERRQ(comm,PETSC_ERR_SUP,"Only out-of-place supported");
510: if (reuse == MAT_INITIAL_MATRIX) {
511: MatCreate(comm,&Be);
512: MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
513: MatSetType(Be,MATELEMENTAL);
514: MatSetUp(Be);
515: *B = Be;
516: }
517: b = (Mat_Elemental*)Be->data;
518: El::Transpose(*a->emat,*b->emat);
519: Be->assembled = PETSC_TRUE;
520: return(0);
521: }
523: static PetscErrorCode MatConjugate_Elemental(Mat A)
524: {
525: Mat_Elemental *a = (Mat_Elemental*)A->data;
528: El::Conjugate(*a->emat);
529: return(0);
530: }
532: static PetscErrorCode MatHermitianTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
533: {
534: Mat Be = *B;
536: MPI_Comm comm;
537: Mat_Elemental *a = (Mat_Elemental*)A->data, *b;
540: PetscObjectGetComm((PetscObject)A,&comm);
541: /* Only out-of-place supported */
542: if (reuse == MAT_INITIAL_MATRIX){
543: MatCreate(comm,&Be);
544: MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
545: MatSetType(Be,MATELEMENTAL);
546: MatSetUp(Be);
547: *B = Be;
548: }
549: b = (Mat_Elemental*)Be->data;
550: El::Adjoint(*a->emat,*b->emat);
551: Be->assembled = PETSC_TRUE;
552: return(0);
553: }
555: static PetscErrorCode MatSolve_Elemental(Mat A,Vec B,Vec X)
556: {
557: Mat_Elemental *a = (Mat_Elemental*)A->data;
558: PetscErrorCode ierr;
559: PetscElemScalar *x;
560: PetscInt pivoting = a->pivoting;
563: VecCopy(B,X);
564: VecGetArray(X,(PetscScalar **)&x);
566: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe;
567: xe.Attach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
568: El::DistMatrix<PetscElemScalar,El::MC,El::MR> xer(xe);
569: switch (A->factortype) {
570: case MAT_FACTOR_LU:
571: if (pivoting == 0) {
572: El::lu::SolveAfter(El::NORMAL,*a->emat,xer);
573: } else if (pivoting == 1) {
574: El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,xer);
575: } else { /* pivoting == 2 */
576: El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*a->Q,xer);
577: }
578: break;
579: case MAT_FACTOR_CHOLESKY:
580: El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,xer);
581: break;
582: default:
583: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
584: break;
585: }
586: El::Copy(xer,xe);
588: VecRestoreArray(X,(PetscScalar **)&x);
589: return(0);
590: }
592: static PetscErrorCode MatSolveAdd_Elemental(Mat A,Vec B,Vec Y,Vec X)
593: {
594: PetscErrorCode ierr;
597: MatSolve_Elemental(A,B,X);
598: VecAXPY(X,1,Y);
599: return(0);
600: }
602: static PetscErrorCode MatMatSolve_Elemental(Mat A,Mat B,Mat X)
603: {
604: Mat_Elemental *a=(Mat_Elemental*)A->data;
605: Mat_Elemental *b=(Mat_Elemental*)B->data;
606: Mat_Elemental *x=(Mat_Elemental*)X->data;
607: PetscInt pivoting = a->pivoting;
610: El::Copy(*b->emat,*x->emat);
611: switch (A->factortype) {
612: case MAT_FACTOR_LU:
613: if (pivoting == 0) {
614: El::lu::SolveAfter(El::NORMAL,*a->emat,*x->emat);
615: } else if (pivoting == 1) {
616: El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*x->emat);
617: } else {
618: El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*a->Q,*x->emat);
619: }
620: break;
621: case MAT_FACTOR_CHOLESKY:
622: El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,*x->emat);
623: break;
624: default:
625: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
626: break;
627: }
628: return(0);
629: }
631: static PetscErrorCode MatLUFactor_Elemental(Mat A,IS row,IS col,const MatFactorInfo *info)
632: {
633: Mat_Elemental *a = (Mat_Elemental*)A->data;
635: PetscInt pivoting = a->pivoting;
638: if (pivoting == 0) {
639: El::LU(*a->emat);
640: } else if (pivoting == 1) {
641: El::LU(*a->emat,*a->P);
642: } else {
643: El::LU(*a->emat,*a->P,*a->Q);
644: }
645: A->factortype = MAT_FACTOR_LU;
646: A->assembled = PETSC_TRUE;
648: PetscFree(A->solvertype);
649: PetscStrallocpy(MATSOLVERELEMENTAL,&A->solvertype);
650: return(0);
651: }
653: static PetscErrorCode MatLUFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
654: {
658: MatCopy(A,F,SAME_NONZERO_PATTERN);
659: MatLUFactor_Elemental(F,0,0,info);
660: return(0);
661: }
663: static PetscErrorCode MatLUFactorSymbolic_Elemental(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
664: {
666: /* F is create and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
667: return(0);
668: }
670: static PetscErrorCode MatCholeskyFactor_Elemental(Mat A,IS perm,const MatFactorInfo *info)
671: {
672: Mat_Elemental *a = (Mat_Elemental*)A->data;
673: El::DistMatrix<PetscElemScalar,El::MC,El::STAR> d;
677: El::Cholesky(El::UPPER,*a->emat);
678: A->factortype = MAT_FACTOR_CHOLESKY;
679: A->assembled = PETSC_TRUE;
681: PetscFree(A->solvertype);
682: PetscStrallocpy(MATSOLVERELEMENTAL,&A->solvertype);
683: return(0);
684: }
686: static PetscErrorCode MatCholeskyFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
687: {
691: MatCopy(A,F,SAME_NONZERO_PATTERN);
692: MatCholeskyFactor_Elemental(F,0,info);
693: return(0);
694: }
696: static PetscErrorCode MatCholeskyFactorSymbolic_Elemental(Mat F,Mat A,IS perm,const MatFactorInfo *info)
697: {
699: /* F is create and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
700: return(0);
701: }
703: PetscErrorCode MatFactorGetSolverType_elemental_elemental(Mat A,MatSolverType *type)
704: {
706: *type = MATSOLVERELEMENTAL;
707: return(0);
708: }
710: static PetscErrorCode MatGetFactor_elemental_elemental(Mat A,MatFactorType ftype,Mat *F)
711: {
712: Mat B;
716: /* Create the factorization matrix */
717: MatCreate(PetscObjectComm((PetscObject)A),&B);
718: MatSetSizes(B,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
719: MatSetType(B,MATELEMENTAL);
720: MatSetUp(B);
721: B->factortype = ftype;
722: PetscFree(B->solvertype);
723: PetscStrallocpy(MATSOLVERELEMENTAL,&B->solvertype);
725: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_elemental_elemental);
726: *F = B;
727: return(0);
728: }
730: PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_Elemental(void)
731: {
735: MatSolverTypeRegister(MATSOLVERELEMENTAL,MATELEMENTAL, MAT_FACTOR_LU,MatGetFactor_elemental_elemental);
736: MatSolverTypeRegister(MATSOLVERELEMENTAL,MATELEMENTAL, MAT_FACTOR_CHOLESKY,MatGetFactor_elemental_elemental);
737: return(0);
738: }
740: static PetscErrorCode MatNorm_Elemental(Mat A,NormType type,PetscReal *nrm)
741: {
742: Mat_Elemental *a=(Mat_Elemental*)A->data;
745: switch (type){
746: case NORM_1:
747: *nrm = El::OneNorm(*a->emat);
748: break;
749: case NORM_FROBENIUS:
750: *nrm = El::FrobeniusNorm(*a->emat);
751: break;
752: case NORM_INFINITY:
753: *nrm = El::InfinityNorm(*a->emat);
754: break;
755: default:
756: printf("Error: unsupported norm type!\n");
757: }
758: return(0);
759: }
761: static PetscErrorCode MatZeroEntries_Elemental(Mat A)
762: {
763: Mat_Elemental *a=(Mat_Elemental*)A->data;
766: El::Zero(*a->emat);
767: return(0);
768: }
770: static PetscErrorCode MatGetOwnershipIS_Elemental(Mat A,IS *rows,IS *cols)
771: {
772: Mat_Elemental *a = (Mat_Elemental*)A->data;
774: PetscInt i,m,shift,stride,*idx;
777: if (rows) {
778: m = a->emat->LocalHeight();
779: shift = a->emat->ColShift();
780: stride = a->emat->ColStride();
781: PetscMalloc1(m,&idx);
782: for (i=0; i<m; i++) {
783: PetscInt rank,offset;
784: E2RO(A,0,shift+i*stride,&rank,&offset);
785: RO2P(A,0,rank,offset,&idx[i]);
786: }
787: ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,rows);
788: }
789: if (cols) {
790: m = a->emat->LocalWidth();
791: shift = a->emat->RowShift();
792: stride = a->emat->RowStride();
793: PetscMalloc1(m,&idx);
794: for (i=0; i<m; i++) {
795: PetscInt rank,offset;
796: E2RO(A,1,shift+i*stride,&rank,&offset);
797: RO2P(A,1,rank,offset,&idx[i]);
798: }
799: ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,cols);
800: }
801: return(0);
802: }
804: static PetscErrorCode MatConvert_Elemental_Dense(Mat A,MatType newtype,MatReuse reuse,Mat *B)
805: {
806: Mat Bmpi;
807: Mat_Elemental *a = (Mat_Elemental*)A->data;
808: MPI_Comm comm;
809: PetscErrorCode ierr;
810: IS isrows,iscols;
811: PetscInt rrank,ridx,crank,cidx,nrows,ncols,i,j,erow,ecol,elrow,elcol;
812: const PetscInt *rows,*cols;
813: PetscElemScalar v;
814: const El::Grid &grid = a->emat->Grid();
817: PetscObjectGetComm((PetscObject)A,&comm);
819: if (reuse == MAT_REUSE_MATRIX) {
820: Bmpi = *B;
821: } else {
822: MatCreate(comm,&Bmpi);
823: MatSetSizes(Bmpi,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
824: MatSetType(Bmpi,MATDENSE);
825: MatSetUp(Bmpi);
826: }
828: /* Get local entries of A */
829: MatGetOwnershipIS(A,&isrows,&iscols);
830: ISGetLocalSize(isrows,&nrows);
831: ISGetIndices(isrows,&rows);
832: ISGetLocalSize(iscols,&ncols);
833: ISGetIndices(iscols,&cols);
835: if (a->roworiented) {
836: for (i=0; i<nrows; i++) {
837: P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
838: RO2E(A,0,rrank,ridx,&erow);
839: if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
840: for (j=0; j<ncols; j++) {
841: P2RO(A,1,cols[j],&crank,&cidx);
842: RO2E(A,1,crank,cidx,&ecol);
843: if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
845: elrow = erow / grid.MCSize(); /* Elemental local row index */
846: elcol = ecol / grid.MRSize(); /* Elemental local column index */
847: v = a->emat->GetLocal(elrow,elcol);
848: MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);
849: }
850: }
851: } else { /* column-oriented */
852: for (j=0; j<ncols; j++) {
853: P2RO(A,1,cols[j],&crank,&cidx);
854: RO2E(A,1,crank,cidx,&ecol);
855: if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
856: for (i=0; i<nrows; i++) {
857: P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
858: RO2E(A,0,rrank,ridx,&erow);
859: if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
861: elrow = erow / grid.MCSize(); /* Elemental local row index */
862: elcol = ecol / grid.MRSize(); /* Elemental local column index */
863: v = a->emat->GetLocal(elrow,elcol);
864: MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);
865: }
866: }
867: }
868: MatAssemblyBegin(Bmpi,MAT_FINAL_ASSEMBLY);
869: MatAssemblyEnd(Bmpi,MAT_FINAL_ASSEMBLY);
870: if (reuse == MAT_INPLACE_MATRIX) {
871: MatHeaderReplace(A,&Bmpi);
872: } else {
873: *B = Bmpi;
874: }
875: ISDestroy(&isrows);
876: ISDestroy(&iscols);
877: return(0);
878: }
880: PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
881: {
882: Mat mat_elemental;
883: PetscErrorCode ierr;
884: PetscInt M=A->rmap->N,N=A->cmap->N,row,ncols;
885: const PetscInt *cols;
886: const PetscScalar *vals;
889: if (reuse == MAT_REUSE_MATRIX) {
890: mat_elemental = *newmat;
891: MatZeroEntries(mat_elemental);
892: } else {
893: MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
894: MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
895: MatSetType(mat_elemental,MATELEMENTAL);
896: MatSetUp(mat_elemental);
897: }
898: for (row=0; row<M; row++) {
899: MatGetRow(A,row,&ncols,&cols,&vals);
900: /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
901: MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
902: MatRestoreRow(A,row,&ncols,&cols,&vals);
903: }
904: MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
905: MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);
907: if (reuse == MAT_INPLACE_MATRIX) {
908: MatHeaderReplace(A,&mat_elemental);
909: } else {
910: *newmat = mat_elemental;
911: }
912: return(0);
913: }
915: PETSC_INTERN PetscErrorCode MatConvert_MPIAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
916: {
917: Mat mat_elemental;
918: PetscErrorCode ierr;
919: PetscInt row,ncols,rstart=A->rmap->rstart,rend=A->rmap->rend,j;
920: const PetscInt *cols;
921: const PetscScalar *vals;
924: if (reuse == MAT_REUSE_MATRIX) {
925: mat_elemental = *newmat;
926: MatZeroEntries(mat_elemental);
927: } else {
928: MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
929: MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,A->rmap->N,A->cmap->N);
930: MatSetType(mat_elemental,MATELEMENTAL);
931: MatSetUp(mat_elemental);
932: }
933: for (row=rstart; row<rend; row++) {
934: MatGetRow(A,row,&ncols,&cols,&vals);
935: for (j=0; j<ncols; j++) {
936: /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
937: MatSetValues(mat_elemental,1,&row,1,&cols[j],&vals[j],ADD_VALUES);
938: }
939: MatRestoreRow(A,row,&ncols,&cols,&vals);
940: }
941: MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
942: MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);
944: if (reuse == MAT_INPLACE_MATRIX) {
945: MatHeaderReplace(A,&mat_elemental);
946: } else {
947: *newmat = mat_elemental;
948: }
949: return(0);
950: }
952: PETSC_INTERN PetscErrorCode MatConvert_SeqSBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
953: {
954: Mat mat_elemental;
955: PetscErrorCode ierr;
956: PetscInt M=A->rmap->N,N=A->cmap->N,row,ncols,j;
957: const PetscInt *cols;
958: const PetscScalar *vals;
961: if (reuse == MAT_REUSE_MATRIX) {
962: mat_elemental = *newmat;
963: MatZeroEntries(mat_elemental);
964: } else {
965: MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
966: MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
967: MatSetType(mat_elemental,MATELEMENTAL);
968: MatSetUp(mat_elemental);
969: }
970: MatGetRowUpperTriangular(A);
971: for (row=0; row<M; row++) {
972: MatGetRow(A,row,&ncols,&cols,&vals);
973: /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
974: MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
975: for (j=0; j<ncols; j++) { /* lower triangular part */
976: PetscScalar v;
977: if (cols[j] == row) continue;
978: v = A->hermitian ? PetscConj(vals[j]) : vals[j];
979: MatSetValues(mat_elemental,1,&cols[j],1,&row,&v,ADD_VALUES);
980: }
981: MatRestoreRow(A,row,&ncols,&cols,&vals);
982: }
983: MatRestoreRowUpperTriangular(A);
984: MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
985: MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);
987: if (reuse == MAT_INPLACE_MATRIX) {
988: MatHeaderReplace(A,&mat_elemental);
989: } else {
990: *newmat = mat_elemental;
991: }
992: return(0);
993: }
995: PETSC_INTERN PetscErrorCode MatConvert_MPISBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
996: {
997: Mat mat_elemental;
998: PetscErrorCode ierr;
999: PetscInt M=A->rmap->N,N=A->cmap->N,row,ncols,j,rstart=A->rmap->rstart,rend=A->rmap->rend;
1000: const PetscInt *cols;
1001: const PetscScalar *vals;
1004: if (reuse == MAT_REUSE_MATRIX) {
1005: mat_elemental = *newmat;
1006: MatZeroEntries(mat_elemental);
1007: } else {
1008: MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
1009: MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
1010: MatSetType(mat_elemental,MATELEMENTAL);
1011: MatSetUp(mat_elemental);
1012: }
1013: MatGetRowUpperTriangular(A);
1014: for (row=rstart; row<rend; row++) {
1015: MatGetRow(A,row,&ncols,&cols,&vals);
1016: /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1017: MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
1018: for (j=0; j<ncols; j++) { /* lower triangular part */
1019: PetscScalar v;
1020: if (cols[j] == row) continue;
1021: v = A->hermitian ? PetscConj(vals[j]) : vals[j];
1022: MatSetValues(mat_elemental,1,&cols[j],1,&row,&v,ADD_VALUES);
1023: }
1024: MatRestoreRow(A,row,&ncols,&cols,&vals);
1025: }
1026: MatRestoreRowUpperTriangular(A);
1027: MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
1028: MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);
1030: if (reuse == MAT_INPLACE_MATRIX) {
1031: MatHeaderReplace(A,&mat_elemental);
1032: } else {
1033: *newmat = mat_elemental;
1034: }
1035: return(0);
1036: }
1038: static PetscErrorCode MatDestroy_Elemental(Mat A)
1039: {
1040: Mat_Elemental *a = (Mat_Elemental*)A->data;
1041: PetscErrorCode ierr;
1042: Mat_Elemental_Grid *commgrid;
1043: PetscBool flg;
1044: MPI_Comm icomm;
1047: delete a->emat;
1048: delete a->P;
1049: delete a->Q;
1051: El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
1052: PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);
1053: MPI_Comm_get_attr(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);
1054: if (--commgrid->grid_refct == 0) {
1055: delete commgrid->grid;
1056: PetscFree(commgrid);
1057: MPI_Comm_free_keyval(&Petsc_Elemental_keyval);
1058: }
1059: PetscCommDestroy(&icomm);
1060: PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",NULL);
1061: PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverType_C",NULL);
1062: PetscFree(A->data);
1063: return(0);
1064: }
1066: PetscErrorCode MatSetUp_Elemental(Mat A)
1067: {
1068: Mat_Elemental *a = (Mat_Elemental*)A->data;
1070: MPI_Comm comm;
1071: PetscMPIInt rsize,csize;
1072: PetscInt n;
1075: PetscLayoutSetUp(A->rmap);
1076: PetscLayoutSetUp(A->cmap);
1078: /* Check if local row and clomun sizes are equally distributed.
1079: Jed: Elemental uses "element" cyclic ordering so the sizes need to match that
1080: exactly. The strategy in MatElemental is for PETSc to implicitly permute to block ordering (like would be returned by
1081: PetscSplitOwnership(comm,&n,&N)), at which point Elemental matrices can act on PETSc vectors without redistributing the vectors. */
1082: PetscObjectGetComm((PetscObject)A,&comm);
1083: n = PETSC_DECIDE;
1084: PetscSplitOwnership(comm,&n,&A->rmap->N);
1085: if (n != A->rmap->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local row size %D of ELEMENTAL matrix must be equally distributed",A->rmap->n);
1087: n = PETSC_DECIDE;
1088: PetscSplitOwnership(comm,&n,&A->cmap->N);
1089: if (n != A->cmap->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local column size %D of ELEMENTAL matrix must be equally distributed",A->cmap->n);
1091: a->emat->Resize(A->rmap->N,A->cmap->N);
1092: El::Zero(*a->emat);
1094: MPI_Comm_size(A->rmap->comm,&rsize);
1095: MPI_Comm_size(A->cmap->comm,&csize);
1096: if (csize != rsize) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Cannot use row and column communicators of different sizes");
1097: a->commsize = rsize;
1098: a->mr[0] = A->rmap->N % rsize; if (!a->mr[0]) a->mr[0] = rsize;
1099: a->mr[1] = A->cmap->N % csize; if (!a->mr[1]) a->mr[1] = csize;
1100: a->m[0] = A->rmap->N / rsize + (a->mr[0] != rsize);
1101: a->m[1] = A->cmap->N / csize + (a->mr[1] != csize);
1102: return(0);
1103: }
1105: PetscErrorCode MatAssemblyBegin_Elemental(Mat A, MatAssemblyType type)
1106: {
1107: Mat_Elemental *a = (Mat_Elemental*)A->data;
1110: /* printf("Calling ProcessQueues\n"); */
1111: a->emat->ProcessQueues();
1112: /* printf("Finished ProcessQueues\n"); */
1113: return(0);
1114: }
1116: PetscErrorCode MatAssemblyEnd_Elemental(Mat A, MatAssemblyType type)
1117: {
1119: /* Currently does nothing */
1120: return(0);
1121: }
1123: PetscErrorCode MatLoad_Elemental(Mat newMat, PetscViewer viewer)
1124: {
1126: Mat Adense,Ae;
1127: MPI_Comm comm;
1130: PetscObjectGetComm((PetscObject)newMat,&comm);
1131: MatCreate(comm,&Adense);
1132: MatSetType(Adense,MATDENSE);
1133: MatLoad(Adense,viewer);
1134: MatConvert(Adense, MATELEMENTAL, MAT_INITIAL_MATRIX,&Ae);
1135: MatDestroy(&Adense);
1136: MatHeaderReplace(newMat,&Ae);
1137: return(0);
1138: }
1140: /* -------------------------------------------------------------------*/
1141: static struct _MatOps MatOps_Values = {
1142: MatSetValues_Elemental,
1143: 0,
1144: 0,
1145: MatMult_Elemental,
1146: /* 4*/ MatMultAdd_Elemental,
1147: MatMultTranspose_Elemental,
1148: MatMultTransposeAdd_Elemental,
1149: MatSolve_Elemental,
1150: MatSolveAdd_Elemental,
1151: 0,
1152: /*10*/ 0,
1153: MatLUFactor_Elemental,
1154: MatCholeskyFactor_Elemental,
1155: 0,
1156: MatTranspose_Elemental,
1157: /*15*/ MatGetInfo_Elemental,
1158: 0,
1159: MatGetDiagonal_Elemental,
1160: MatDiagonalScale_Elemental,
1161: MatNorm_Elemental,
1162: /*20*/ MatAssemblyBegin_Elemental,
1163: MatAssemblyEnd_Elemental,
1164: MatSetOption_Elemental,
1165: MatZeroEntries_Elemental,
1166: /*24*/ 0,
1167: MatLUFactorSymbolic_Elemental,
1168: MatLUFactorNumeric_Elemental,
1169: MatCholeskyFactorSymbolic_Elemental,
1170: MatCholeskyFactorNumeric_Elemental,
1171: /*29*/ MatSetUp_Elemental,
1172: 0,
1173: 0,
1174: 0,
1175: 0,
1176: /*34*/ MatDuplicate_Elemental,
1177: 0,
1178: 0,
1179: 0,
1180: 0,
1181: /*39*/ MatAXPY_Elemental,
1182: 0,
1183: 0,
1184: 0,
1185: MatCopy_Elemental,
1186: /*44*/ 0,
1187: MatScale_Elemental,
1188: MatShift_Basic,
1189: 0,
1190: 0,
1191: /*49*/ 0,
1192: 0,
1193: 0,
1194: 0,
1195: 0,
1196: /*54*/ 0,
1197: 0,
1198: 0,
1199: 0,
1200: 0,
1201: /*59*/ 0,
1202: MatDestroy_Elemental,
1203: MatView_Elemental,
1204: 0,
1205: 0,
1206: /*64*/ 0,
1207: 0,
1208: 0,
1209: 0,
1210: 0,
1211: /*69*/ 0,
1212: 0,
1213: MatConvert_Elemental_Dense,
1214: 0,
1215: 0,
1216: /*74*/ 0,
1217: 0,
1218: 0,
1219: 0,
1220: 0,
1221: /*79*/ 0,
1222: 0,
1223: 0,
1224: 0,
1225: MatLoad_Elemental,
1226: /*84*/ 0,
1227: 0,
1228: 0,
1229: 0,
1230: 0,
1231: /*89*/ 0,
1232: 0,
1233: MatMatMultNumeric_Elemental,
1234: 0,
1235: 0,
1236: /*94*/ 0,
1237: 0,
1238: 0,
1239: MatMatTransposeMultNumeric_Elemental,
1240: 0,
1241: /*99*/ MatProductSetFromOptions_Elemental,
1242: 0,
1243: 0,
1244: MatConjugate_Elemental,
1245: 0,
1246: /*104*/0,
1247: 0,
1248: 0,
1249: 0,
1250: 0,
1251: /*109*/MatMatSolve_Elemental,
1252: 0,
1253: 0,
1254: 0,
1255: MatMissingDiagonal_Elemental,
1256: /*114*/0,
1257: 0,
1258: 0,
1259: 0,
1260: 0,
1261: /*119*/0,
1262: MatHermitianTranspose_Elemental,
1263: 0,
1264: 0,
1265: 0,
1266: /*124*/0,
1267: 0,
1268: 0,
1269: 0,
1270: 0,
1271: /*129*/0,
1272: 0,
1273: 0,
1274: 0,
1275: 0,
1276: /*134*/0,
1277: 0,
1278: 0,
1279: 0,
1280: 0,
1281: 0,
1282: /*140*/0,
1283: 0,
1284: 0,
1285: 0,
1286: 0,
1287: /*145*/0,
1288: 0,
1289: 0
1290: };
1292: /*MC
1293: MATELEMENTAL = "elemental" - A matrix type for dense matrices using the Elemental package
1295: Use ./configure --download-elemental to install PETSc to use Elemental
1297: Use -pc_type lu -pc_factor_mat_solver_type elemental to use this direct solver
1299: Options Database Keys:
1300: + -mat_type elemental - sets the matrix type to "elemental" during a call to MatSetFromOptions()
1301: - -mat_elemental_grid_height - sets Grid Height for 2D cyclic ordering of internal matrix
1303: Level: beginner
1305: .seealso: MATDENSE
1306: M*/
1308: PETSC_EXTERN PetscErrorCode MatCreate_Elemental(Mat A)
1309: {
1310: Mat_Elemental *a;
1311: PetscErrorCode ierr;
1312: PetscBool flg,flg1;
1313: Mat_Elemental_Grid *commgrid;
1314: MPI_Comm icomm;
1315: PetscInt optv1;
1318: PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));
1319: A->insertmode = NOT_SET_VALUES;
1321: PetscNewLog(A,&a);
1322: A->data = (void*)a;
1324: /* Set up the elemental matrix */
1325: El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
1327: /* Grid needs to be shared between multiple Mats on the same communicator, implement by attribute caching on the MPI_Comm */
1328: if (Petsc_Elemental_keyval == MPI_KEYVAL_INVALID) {
1329: MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN,MPI_COMM_NULL_DELETE_FN,&Petsc_Elemental_keyval,(void*)0);
1330: }
1331: PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);
1332: MPI_Comm_get_attr(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);
1333: if (!flg) {
1334: PetscNewLog(A,&commgrid);
1336: PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"Elemental Options","Mat");
1337: /* displayed default grid sizes (CommSize,1) are set by us arbitrarily until El::Grid() is called */
1338: PetscOptionsInt("-mat_elemental_grid_height","Grid Height","None",El::mpi::Size(cxxcomm),&optv1,&flg1);
1339: if (flg1) {
1340: if (El::mpi::Size(cxxcomm) % optv1 != 0) {
1341: SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Grid Height %D must evenly divide CommSize %D",optv1,(PetscInt)El::mpi::Size(cxxcomm));
1342: }
1343: commgrid->grid = new El::Grid(cxxcomm,optv1); /* use user-provided grid height */
1344: } else {
1345: commgrid->grid = new El::Grid(cxxcomm); /* use Elemental default grid sizes */
1346: /* printf("new commgrid->grid = %p\n",commgrid->grid); -- memory leak revealed by valgrind? */
1347: }
1348: commgrid->grid_refct = 1;
1349: MPI_Comm_set_attr(icomm,Petsc_Elemental_keyval,(void*)commgrid);
1351: a->pivoting = 1;
1352: PetscOptionsInt("-mat_elemental_pivoting","Pivoting","None",a->pivoting,&a->pivoting,NULL);
1354: PetscOptionsEnd();
1355: } else {
1356: commgrid->grid_refct++;
1357: }
1358: PetscCommDestroy(&icomm);
1359: a->grid = commgrid->grid;
1360: a->emat = new El::DistMatrix<PetscElemScalar>(*a->grid);
1361: a->roworiented = PETSC_TRUE;
1363: PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",MatGetOwnershipIS_Elemental);
1364: PetscObjectChangeTypeName((PetscObject)A,MATELEMENTAL);
1365: return(0);
1366: }