Actual source code: matelem.cxx
petsc-3.12.5 2020-03-29
1: #include <../src/mat/impls/elemental/matelemimpl.h>
3: /*
4: The variable Petsc_Elemental_keyval is used to indicate an MPI attribute that
5: is attached to a communicator, in this case the attribute is a Mat_Elemental_Grid
6: */
7: static PetscMPIInt Petsc_Elemental_keyval = MPI_KEYVAL_INVALID;
9: /*@C
10: PetscElementalInitializePackage - Initialize Elemental package
12: Logically Collective
14: Level: developer
16: .seealso: MATELEMENTAL, PetscElementalFinalizePackage()
17: @*/
18: PetscErrorCode PetscElementalInitializePackage(void)
19: {
23: if (El::Initialized()) return(0);
24: El::Initialize(); /* called by the 1st call of MatCreate_Elemental */
25: PetscRegisterFinalize(PetscElementalFinalizePackage);
26: return(0);
27: }
29: /*@C
30: PetscElementalFinalizePackage - Finalize Elemental package
32: Logically Collective
34: Level: developer
36: .seealso: MATELEMENTAL, PetscElementalInitializePackage()
37: @*/
38: PetscErrorCode PetscElementalFinalizePackage(void)
39: {
41: El::Finalize(); /* called by PetscFinalize() */
42: return(0);
43: }
45: static PetscErrorCode MatView_Elemental(Mat A,PetscViewer viewer)
46: {
48: Mat_Elemental *a = (Mat_Elemental*)A->data;
49: PetscBool iascii;
52: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
53: if (iascii) {
54: PetscViewerFormat format;
55: PetscViewerGetFormat(viewer,&format);
56: if (format == PETSC_VIEWER_ASCII_INFO) {
57: /* call elemental viewing function */
58: PetscViewerASCIIPrintf(viewer,"Elemental run parameters:\n");
59: PetscViewerASCIIPrintf(viewer," allocated entries=%d\n",(*a->emat).AllocatedMemory());
60: PetscViewerASCIIPrintf(viewer," grid height=%d, grid width=%d\n",(*a->emat).Grid().Height(),(*a->emat).Grid().Width());
61: if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
62: /* call elemental viewing function */
63: PetscPrintf(PetscObjectComm((PetscObject)viewer),"test matview_elemental 2\n");
64: }
66: } else if (format == PETSC_VIEWER_DEFAULT) {
67: PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);
68: El::Print( *a->emat, "Elemental matrix (cyclic ordering)" );
69: PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);
70: if (A->factortype == MAT_FACTOR_NONE){
71: Mat Adense;
72: PetscPrintf(PetscObjectComm((PetscObject)viewer),"Elemental matrix (explicit ordering)\n");
73: MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);
74: MatView(Adense,viewer);
75: MatDestroy(&Adense);
76: }
77: } else SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Format");
78: } else {
79: /* convert to dense format and call MatView() */
80: Mat Adense;
81: PetscPrintf(PetscObjectComm((PetscObject)viewer),"Elemental matrix (explicit ordering)\n");
82: MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);
83: MatView(Adense,viewer);
84: MatDestroy(&Adense);
85: }
86: return(0);
87: }
89: static PetscErrorCode MatGetInfo_Elemental(Mat A,MatInfoType flag,MatInfo *info)
90: {
91: Mat_Elemental *a = (Mat_Elemental*)A->data;
94: info->block_size = 1.0;
96: if (flag == MAT_LOCAL) {
97: info->nz_allocated = (*a->emat).AllocatedMemory(); /* locally allocated */
98: info->nz_used = info->nz_allocated;
99: } else if (flag == MAT_GLOBAL_MAX) {
100: //MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));
101: /* see MatGetInfo_MPIAIJ() for getting global info->nz_allocated! */
102: //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_MAX not written yet");
103: } else if (flag == MAT_GLOBAL_SUM) {
104: //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_SUM not written yet");
105: info->nz_allocated = (*a->emat).AllocatedMemory(); /* locally allocated */
106: info->nz_used = info->nz_allocated; /* assume Elemental does accurate allocation */
107: //MPIU_Allreduce(isend,irecv,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));
108: //PetscPrintf(PETSC_COMM_SELF," ... [%d] locally allocated %g\n",rank,info->nz_allocated);
109: }
111: info->nz_unneeded = 0.0;
112: info->assemblies = A->num_ass;
113: info->mallocs = 0;
114: info->memory = ((PetscObject)A)->mem;
115: info->fill_ratio_given = 0; /* determined by Elemental */
116: info->fill_ratio_needed = 0;
117: info->factor_mallocs = 0;
118: return(0);
119: }
121: PetscErrorCode MatSetOption_Elemental(Mat A,MatOption op,PetscBool flg)
122: {
123: Mat_Elemental *a = (Mat_Elemental*)A->data;
126: switch (op) {
127: case MAT_NEW_NONZERO_LOCATIONS:
128: case MAT_NEW_NONZERO_LOCATION_ERR:
129: case MAT_NEW_NONZERO_ALLOCATION_ERR:
130: case MAT_SYMMETRIC:
131: case MAT_SORTED_FULL:
132: break;
133: case MAT_ROW_ORIENTED:
134: a->roworiented = flg;
135: break;
136: default:
137: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"unknown option %s",MatOptions[op]);
138: }
139: return(0);
140: }
142: static PetscErrorCode MatSetValues_Elemental(Mat A,PetscInt nr,const PetscInt *rows,PetscInt nc,const PetscInt *cols,const PetscScalar *vals,InsertMode imode)
143: {
144: Mat_Elemental *a = (Mat_Elemental*)A->data;
145: PetscInt i,j,rrank,ridx,crank,cidx,erow,ecol,numQueues=0;
148: // TODO: Initialize matrix to all zeros?
150: // Count the number of queues from this process
151: if (a->roworiented) {
152: for (i=0; i<nr; i++) {
153: if (rows[i] < 0) continue;
154: P2RO(A,0,rows[i],&rrank,&ridx);
155: RO2E(A,0,rrank,ridx,&erow);
156: if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
157: for (j=0; j<nc; j++) {
158: if (cols[j] < 0) continue;
159: P2RO(A,1,cols[j],&crank,&cidx);
160: RO2E(A,1,crank,cidx,&ecol);
161: if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
162: if (!a->emat->IsLocal(erow,ecol) ){ /* off-proc entry */
163: /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
164: if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
165: ++numQueues;
166: continue;
167: }
168: /* printf("Locally updating (%d,%d)\n",erow,ecol); */
169: switch (imode) {
170: case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
171: case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
172: default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
173: }
174: }
175: }
177: /* printf("numQueues=%d\n",numQueues); */
178: a->emat->Reserve( numQueues );
179: for (i=0; i<nr; i++) {
180: if (rows[i] < 0) continue;
181: P2RO(A,0,rows[i],&rrank,&ridx);
182: RO2E(A,0,rrank,ridx,&erow);
183: for (j=0; j<nc; j++) {
184: if (cols[j] < 0) continue;
185: P2RO(A,1,cols[j],&crank,&cidx);
186: RO2E(A,1,crank,cidx,&ecol);
187: if ( !a->emat->IsLocal(erow,ecol) ) { /*off-proc entry*/
188: /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
189: a->emat->QueueUpdate( erow, ecol, vals[i*nc+j] );
190: }
191: }
192: }
193: } else { /* columnoriented */
194: for (j=0; j<nc; j++) {
195: if (cols[j] < 0) continue;
196: P2RO(A,1,cols[j],&crank,&cidx);
197: RO2E(A,1,crank,cidx,&ecol);
198: if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
199: for (i=0; i<nr; i++) {
200: if (rows[i] < 0) continue;
201: P2RO(A,0,rows[i],&rrank,&ridx);
202: RO2E(A,0,rrank,ridx,&erow);
203: if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
204: if (!a->emat->IsLocal(erow,ecol) ){ /* off-proc entry */
205: /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
206: if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
207: ++numQueues;
208: continue;
209: }
210: /* printf("Locally updating (%d,%d)\n",erow,ecol); */
211: switch (imode) {
212: case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
213: case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
214: default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
215: }
216: }
217: }
219: /* printf("numQueues=%d\n",numQueues); */
220: a->emat->Reserve( numQueues );
221: for (j=0; j<nc; j++) {
222: if (cols[j] < 0) continue;
223: P2RO(A,1,cols[j],&crank,&cidx);
224: RO2E(A,1,crank,cidx,&ecol);
226: for (i=0; i<nr; i++) {
227: if (rows[i] < 0) continue;
228: P2RO(A,0,rows[i],&rrank,&ridx);
229: RO2E(A,0,rrank,ridx,&erow);
230: if ( !a->emat->IsLocal(erow,ecol) ) { /*off-proc entry*/
231: /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
232: a->emat->QueueUpdate( erow, ecol, vals[i+j*nr] );
233: }
234: }
235: }
236: }
237: return(0);
238: }
240: static PetscErrorCode MatMult_Elemental(Mat A,Vec X,Vec Y)
241: {
242: Mat_Elemental *a = (Mat_Elemental*)A->data;
243: PetscErrorCode ierr;
244: const PetscElemScalar *x;
245: PetscElemScalar *y;
246: PetscElemScalar one = 1,zero = 0;
249: VecGetArrayRead(X,(const PetscScalar **)&x);
250: VecGetArray(Y,(PetscScalar **)&y);
251: { /* Scoping so that constructor is called before pointer is returned */
252: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
253: xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
254: ye.Attach(A->rmap->N,1,*a->grid,0,0,y,A->rmap->n);
255: El::Gemv(El::NORMAL,one,*a->emat,xe,zero,ye);
256: }
257: VecRestoreArrayRead(X,(const PetscScalar **)&x);
258: VecRestoreArray(Y,(PetscScalar **)&y);
259: return(0);
260: }
262: static PetscErrorCode MatMultTranspose_Elemental(Mat A,Vec X,Vec Y)
263: {
264: Mat_Elemental *a = (Mat_Elemental*)A->data;
265: PetscErrorCode ierr;
266: const PetscElemScalar *x;
267: PetscElemScalar *y;
268: PetscElemScalar one = 1,zero = 0;
271: VecGetArrayRead(X,(const PetscScalar **)&x);
272: VecGetArray(Y,(PetscScalar **)&y);
273: { /* Scoping so that constructor is called before pointer is returned */
274: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
275: xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
276: ye.Attach(A->cmap->N,1,*a->grid,0,0,y,A->cmap->n);
277: El::Gemv(El::TRANSPOSE,one,*a->emat,xe,zero,ye);
278: }
279: VecRestoreArrayRead(X,(const PetscScalar **)&x);
280: VecRestoreArray(Y,(PetscScalar **)&y);
281: return(0);
282: }
284: static PetscErrorCode MatMultAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
285: {
286: Mat_Elemental *a = (Mat_Elemental*)A->data;
287: PetscErrorCode ierr;
288: const PetscElemScalar *x;
289: PetscElemScalar *z;
290: PetscElemScalar one = 1;
293: if (Y != Z) {VecCopy(Y,Z);}
294: VecGetArrayRead(X,(const PetscScalar **)&x);
295: VecGetArray(Z,(PetscScalar **)&z);
296: { /* Scoping so that constructor is called before pointer is returned */
297: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
298: xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
299: ze.Attach(A->rmap->N,1,*a->grid,0,0,z,A->rmap->n);
300: El::Gemv(El::NORMAL,one,*a->emat,xe,one,ze);
301: }
302: VecRestoreArrayRead(X,(const PetscScalar **)&x);
303: VecRestoreArray(Z,(PetscScalar **)&z);
304: return(0);
305: }
307: static PetscErrorCode MatMultTransposeAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
308: {
309: Mat_Elemental *a = (Mat_Elemental*)A->data;
310: PetscErrorCode ierr;
311: const PetscElemScalar *x;
312: PetscElemScalar *z;
313: PetscElemScalar one = 1;
316: if (Y != Z) {VecCopy(Y,Z);}
317: VecGetArrayRead(X,(const PetscScalar **)&x);
318: VecGetArray(Z,(PetscScalar **)&z);
319: { /* Scoping so that constructor is called before pointer is returned */
320: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
321: xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
322: ze.Attach(A->cmap->N,1,*a->grid,0,0,z,A->cmap->n);
323: El::Gemv(El::TRANSPOSE,one,*a->emat,xe,one,ze);
324: }
325: VecRestoreArrayRead(X,(const PetscScalar **)&x);
326: VecRestoreArray(Z,(PetscScalar **)&z);
327: return(0);
328: }
330: static PetscErrorCode MatMatMultNumeric_Elemental(Mat A,Mat B,Mat C)
331: {
332: Mat_Elemental *a = (Mat_Elemental*)A->data;
333: Mat_Elemental *b = (Mat_Elemental*)B->data;
334: Mat_Elemental *c = (Mat_Elemental*)C->data;
335: PetscElemScalar one = 1,zero = 0;
338: { /* Scoping so that constructor is called before pointer is returned */
339: El::Gemm(El::NORMAL,El::NORMAL,one,*a->emat,*b->emat,zero,*c->emat);
340: }
341: C->assembled = PETSC_TRUE;
342: return(0);
343: }
345: static PetscErrorCode MatMatMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat *C)
346: {
348: Mat Ce;
349: MPI_Comm comm;
352: PetscObjectGetComm((PetscObject)A,&comm);
353: MatCreate(comm,&Ce);
354: MatSetSizes(Ce,A->rmap->n,B->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
355: MatSetType(Ce,MATELEMENTAL);
356: MatSetUp(Ce);
357: *C = Ce;
358: return(0);
359: }
361: static PetscErrorCode MatMatMult_Elemental(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
362: {
366: if (scall == MAT_INITIAL_MATRIX){
367: PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);
368: MatMatMultSymbolic_Elemental(A,B,1.0,C);
369: PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);
370: }
371: PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);
372: MatMatMultNumeric_Elemental(A,B,*C);
373: PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);
374: return(0);
375: }
377: static PetscErrorCode MatMatTransposeMultNumeric_Elemental(Mat A,Mat B,Mat C)
378: {
379: Mat_Elemental *a = (Mat_Elemental*)A->data;
380: Mat_Elemental *b = (Mat_Elemental*)B->data;
381: Mat_Elemental *c = (Mat_Elemental*)C->data;
382: PetscElemScalar one = 1,zero = 0;
385: { /* Scoping so that constructor is called before pointer is returned */
386: El::Gemm(El::NORMAL,El::TRANSPOSE,one,*a->emat,*b->emat,zero,*c->emat);
387: }
388: C->assembled = PETSC_TRUE;
389: return(0);
390: }
392: static PetscErrorCode MatMatTransposeMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat *C)
393: {
395: Mat Ce;
396: MPI_Comm comm;
399: PetscObjectGetComm((PetscObject)A,&comm);
400: MatCreate(comm,&Ce);
401: MatSetSizes(Ce,A->rmap->n,B->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
402: MatSetType(Ce,MATELEMENTAL);
403: MatSetUp(Ce);
404: *C = Ce;
405: return(0);
406: }
408: static PetscErrorCode MatMatTransposeMult_Elemental(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
409: {
413: if (scall == MAT_INITIAL_MATRIX){
414: PetscLogEventBegin(MAT_MatTransposeMultSymbolic,A,B,0,0);
415: MatMatMultSymbolic_Elemental(A,B,1.0,C);
416: PetscLogEventEnd(MAT_MatTransposeMultSymbolic,A,B,0,0);
417: }
418: PetscLogEventBegin(MAT_MatTransposeMultNumeric,A,B,0,0);
419: MatMatTransposeMultNumeric_Elemental(A,B,*C);
420: PetscLogEventEnd(MAT_MatTransposeMultNumeric,A,B,0,0);
421: return(0);
422: }
424: static PetscErrorCode MatGetDiagonal_Elemental(Mat A,Vec D)
425: {
426: PetscInt i,nrows,ncols,nD,rrank,ridx,crank,cidx;
427: Mat_Elemental *a = (Mat_Elemental*)A->data;
428: PetscErrorCode ierr;
429: PetscElemScalar v;
430: MPI_Comm comm;
433: PetscObjectGetComm((PetscObject)A,&comm);
434: MatGetSize(A,&nrows,&ncols);
435: nD = nrows>ncols ? ncols : nrows;
436: for (i=0; i<nD; i++) {
437: PetscInt erow,ecol;
438: P2RO(A,0,i,&rrank,&ridx);
439: RO2E(A,0,rrank,ridx,&erow);
440: if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
441: P2RO(A,1,i,&crank,&cidx);
442: RO2E(A,1,crank,cidx,&ecol);
443: if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
444: v = a->emat->Get(erow,ecol);
445: VecSetValues(D,1,&i,(PetscScalar*)&v,INSERT_VALUES);
446: }
447: VecAssemblyBegin(D);
448: VecAssemblyEnd(D);
449: return(0);
450: }
452: static PetscErrorCode MatDiagonalScale_Elemental(Mat X,Vec L,Vec R)
453: {
454: Mat_Elemental *x = (Mat_Elemental*)X->data;
455: const PetscElemScalar *d;
456: PetscErrorCode ierr;
459: if (R) {
460: VecGetArrayRead(R,(const PetscScalar **)&d);
461: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
462: de.LockedAttach(X->cmap->N,1,*x->grid,0,0,d,X->cmap->n);
463: El::DiagonalScale(El::RIGHT,El::NORMAL,de,*x->emat);
464: VecRestoreArrayRead(R,(const PetscScalar **)&d);
465: }
466: if (L) {
467: VecGetArrayRead(L,(const PetscScalar **)&d);
468: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
469: de.LockedAttach(X->rmap->N,1,*x->grid,0,0,d,X->rmap->n);
470: El::DiagonalScale(El::LEFT,El::NORMAL,de,*x->emat);
471: VecRestoreArrayRead(L,(const PetscScalar **)&d);
472: }
473: return(0);
474: }
476: static PetscErrorCode MatMissingDiagonal_Elemental(Mat A,PetscBool *missing,PetscInt *d)
477: {
479: *missing = PETSC_FALSE;
480: return(0);
481: }
483: static PetscErrorCode MatScale_Elemental(Mat X,PetscScalar a)
484: {
485: Mat_Elemental *x = (Mat_Elemental*)X->data;
488: El::Scale((PetscElemScalar)a,*x->emat);
489: return(0);
490: }
492: /*
493: MatAXPY - Computes Y = a*X + Y.
494: */
495: static PetscErrorCode MatAXPY_Elemental(Mat Y,PetscScalar a,Mat X,MatStructure str)
496: {
497: Mat_Elemental *x = (Mat_Elemental*)X->data;
498: Mat_Elemental *y = (Mat_Elemental*)Y->data;
502: El::Axpy((PetscElemScalar)a,*x->emat,*y->emat);
503: PetscObjectStateIncrease((PetscObject)Y);
504: return(0);
505: }
507: static PetscErrorCode MatCopy_Elemental(Mat A,Mat B,MatStructure str)
508: {
509: Mat_Elemental *a=(Mat_Elemental*)A->data;
510: Mat_Elemental *b=(Mat_Elemental*)B->data;
514: El::Copy(*a->emat,*b->emat);
515: PetscObjectStateIncrease((PetscObject)B);
516: return(0);
517: }
519: static PetscErrorCode MatDuplicate_Elemental(Mat A,MatDuplicateOption op,Mat *B)
520: {
521: Mat Be;
522: MPI_Comm comm;
523: Mat_Elemental *a=(Mat_Elemental*)A->data;
527: PetscObjectGetComm((PetscObject)A,&comm);
528: MatCreate(comm,&Be);
529: MatSetSizes(Be,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
530: MatSetType(Be,MATELEMENTAL);
531: MatSetUp(Be);
532: *B = Be;
533: if (op == MAT_COPY_VALUES) {
534: Mat_Elemental *b=(Mat_Elemental*)Be->data;
535: El::Copy(*a->emat,*b->emat);
536: }
537: Be->assembled = PETSC_TRUE;
538: return(0);
539: }
541: static PetscErrorCode MatTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
542: {
543: Mat Be = *B;
545: MPI_Comm comm;
546: Mat_Elemental *a = (Mat_Elemental*)A->data, *b;
549: PetscObjectGetComm((PetscObject)A,&comm);
550: /* Only out-of-place supported */
551: if (reuse == MAT_INPLACE_MATRIX) SETERRQ(comm,PETSC_ERR_SUP,"Only out-of-place supported");
552: if (reuse == MAT_INITIAL_MATRIX) {
553: MatCreate(comm,&Be);
554: MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
555: MatSetType(Be,MATELEMENTAL);
556: MatSetUp(Be);
557: *B = Be;
558: }
559: b = (Mat_Elemental*)Be->data;
560: El::Transpose(*a->emat,*b->emat);
561: Be->assembled = PETSC_TRUE;
562: return(0);
563: }
565: static PetscErrorCode MatConjugate_Elemental(Mat A)
566: {
567: Mat_Elemental *a = (Mat_Elemental*)A->data;
570: El::Conjugate(*a->emat);
571: return(0);
572: }
574: static PetscErrorCode MatHermitianTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
575: {
576: Mat Be = *B;
578: MPI_Comm comm;
579: Mat_Elemental *a = (Mat_Elemental*)A->data, *b;
582: PetscObjectGetComm((PetscObject)A,&comm);
583: /* Only out-of-place supported */
584: if (reuse == MAT_INITIAL_MATRIX){
585: MatCreate(comm,&Be);
586: MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
587: MatSetType(Be,MATELEMENTAL);
588: MatSetUp(Be);
589: *B = Be;
590: }
591: b = (Mat_Elemental*)Be->data;
592: El::Adjoint(*a->emat,*b->emat);
593: Be->assembled = PETSC_TRUE;
594: return(0);
595: }
597: static PetscErrorCode MatSolve_Elemental(Mat A,Vec B,Vec X)
598: {
599: Mat_Elemental *a = (Mat_Elemental*)A->data;
600: PetscErrorCode ierr;
601: PetscElemScalar *x;
602: PetscInt pivoting = a->pivoting;
605: VecCopy(B,X);
606: VecGetArray(X,(PetscScalar **)&x);
608: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe;
609: xe.Attach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
610: El::DistMatrix<PetscElemScalar,El::MC,El::MR> xer(xe);
611: switch (A->factortype) {
612: case MAT_FACTOR_LU:
613: if (pivoting == 0) {
614: El::lu::SolveAfter(El::NORMAL,*a->emat,xer);
615: } else if (pivoting == 1) {
616: El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,xer);
617: } else { /* pivoting == 2 */
618: El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*a->Q,xer);
619: }
620: break;
621: case MAT_FACTOR_CHOLESKY:
622: El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,xer);
623: break;
624: default:
625: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
626: break;
627: }
628: El::Copy(xer,xe);
630: VecRestoreArray(X,(PetscScalar **)&x);
631: return(0);
632: }
634: static PetscErrorCode MatSolveAdd_Elemental(Mat A,Vec B,Vec Y,Vec X)
635: {
636: PetscErrorCode ierr;
639: MatSolve_Elemental(A,B,X);
640: VecAXPY(X,1,Y);
641: return(0);
642: }
644: static PetscErrorCode MatMatSolve_Elemental(Mat A,Mat B,Mat X)
645: {
646: Mat_Elemental *a=(Mat_Elemental*)A->data;
647: Mat_Elemental *b=(Mat_Elemental*)B->data;
648: Mat_Elemental *x=(Mat_Elemental*)X->data;
649: PetscInt pivoting = a->pivoting;
652: El::Copy(*b->emat,*x->emat);
653: switch (A->factortype) {
654: case MAT_FACTOR_LU:
655: if (pivoting == 0) {
656: El::lu::SolveAfter(El::NORMAL,*a->emat,*x->emat);
657: } else if (pivoting == 1) {
658: El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*x->emat);
659: } else {
660: El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*a->Q,*x->emat);
661: }
662: break;
663: case MAT_FACTOR_CHOLESKY:
664: El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,*x->emat);
665: break;
666: default:
667: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
668: break;
669: }
670: return(0);
671: }
673: static PetscErrorCode MatLUFactor_Elemental(Mat A,IS row,IS col,const MatFactorInfo *info)
674: {
675: Mat_Elemental *a = (Mat_Elemental*)A->data;
677: PetscInt pivoting = a->pivoting;
680: if (pivoting == 0) {
681: El::LU(*a->emat);
682: } else if (pivoting == 1) {
683: El::LU(*a->emat,*a->P);
684: } else {
685: El::LU(*a->emat,*a->P,*a->Q);
686: }
687: A->factortype = MAT_FACTOR_LU;
688: A->assembled = PETSC_TRUE;
690: PetscFree(A->solvertype);
691: PetscStrallocpy(MATSOLVERELEMENTAL,&A->solvertype);
692: return(0);
693: }
695: static PetscErrorCode MatLUFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
696: {
700: MatCopy(A,F,SAME_NONZERO_PATTERN);
701: MatLUFactor_Elemental(F,0,0,info);
702: return(0);
703: }
705: static PetscErrorCode MatLUFactorSymbolic_Elemental(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
706: {
708: /* F is create and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
709: return(0);
710: }
712: static PetscErrorCode MatCholeskyFactor_Elemental(Mat A,IS perm,const MatFactorInfo *info)
713: {
714: Mat_Elemental *a = (Mat_Elemental*)A->data;
715: El::DistMatrix<PetscElemScalar,El::MC,El::STAR> d;
719: El::Cholesky(El::UPPER,*a->emat);
720: A->factortype = MAT_FACTOR_CHOLESKY;
721: A->assembled = PETSC_TRUE;
723: PetscFree(A->solvertype);
724: PetscStrallocpy(MATSOLVERELEMENTAL,&A->solvertype);
725: return(0);
726: }
728: static PetscErrorCode MatCholeskyFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
729: {
733: MatCopy(A,F,SAME_NONZERO_PATTERN);
734: MatCholeskyFactor_Elemental(F,0,info);
735: return(0);
736: }
738: static PetscErrorCode MatCholeskyFactorSymbolic_Elemental(Mat F,Mat A,IS perm,const MatFactorInfo *info)
739: {
741: /* F is create and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
742: return(0);
743: }
745: PetscErrorCode MatFactorGetSolverType_elemental_elemental(Mat A,MatSolverType *type)
746: {
748: *type = MATSOLVERELEMENTAL;
749: return(0);
750: }
752: static PetscErrorCode MatGetFactor_elemental_elemental(Mat A,MatFactorType ftype,Mat *F)
753: {
754: Mat B;
758: /* Create the factorization matrix */
759: MatCreate(PetscObjectComm((PetscObject)A),&B);
760: MatSetSizes(B,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
761: MatSetType(B,MATELEMENTAL);
762: MatSetUp(B);
763: B->factortype = ftype;
764: PetscFree(B->solvertype);
765: PetscStrallocpy(MATSOLVERELEMENTAL,&B->solvertype);
767: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_elemental_elemental);
768: *F = B;
769: return(0);
770: }
772: PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_Elemental(void)
773: {
777: MatSolverTypeRegister(MATSOLVERELEMENTAL,MATELEMENTAL, MAT_FACTOR_LU,MatGetFactor_elemental_elemental);
778: MatSolverTypeRegister(MATSOLVERELEMENTAL,MATELEMENTAL, MAT_FACTOR_CHOLESKY,MatGetFactor_elemental_elemental);
779: return(0);
780: }
782: static PetscErrorCode MatNorm_Elemental(Mat A,NormType type,PetscReal *nrm)
783: {
784: Mat_Elemental *a=(Mat_Elemental*)A->data;
787: switch (type){
788: case NORM_1:
789: *nrm = El::OneNorm(*a->emat);
790: break;
791: case NORM_FROBENIUS:
792: *nrm = El::FrobeniusNorm(*a->emat);
793: break;
794: case NORM_INFINITY:
795: *nrm = El::InfinityNorm(*a->emat);
796: break;
797: default:
798: printf("Error: unsupported norm type!\n");
799: }
800: return(0);
801: }
803: static PetscErrorCode MatZeroEntries_Elemental(Mat A)
804: {
805: Mat_Elemental *a=(Mat_Elemental*)A->data;
808: El::Zero(*a->emat);
809: return(0);
810: }
812: static PetscErrorCode MatGetOwnershipIS_Elemental(Mat A,IS *rows,IS *cols)
813: {
814: Mat_Elemental *a = (Mat_Elemental*)A->data;
816: PetscInt i,m,shift,stride,*idx;
819: if (rows) {
820: m = a->emat->LocalHeight();
821: shift = a->emat->ColShift();
822: stride = a->emat->ColStride();
823: PetscMalloc1(m,&idx);
824: for (i=0; i<m; i++) {
825: PetscInt rank,offset;
826: E2RO(A,0,shift+i*stride,&rank,&offset);
827: RO2P(A,0,rank,offset,&idx[i]);
828: }
829: ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,rows);
830: }
831: if (cols) {
832: m = a->emat->LocalWidth();
833: shift = a->emat->RowShift();
834: stride = a->emat->RowStride();
835: PetscMalloc1(m,&idx);
836: for (i=0; i<m; i++) {
837: PetscInt rank,offset;
838: E2RO(A,1,shift+i*stride,&rank,&offset);
839: RO2P(A,1,rank,offset,&idx[i]);
840: }
841: ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,cols);
842: }
843: return(0);
844: }
846: static PetscErrorCode MatConvert_Elemental_Dense(Mat A,MatType newtype,MatReuse reuse,Mat *B)
847: {
848: Mat Bmpi;
849: Mat_Elemental *a = (Mat_Elemental*)A->data;
850: MPI_Comm comm;
851: PetscErrorCode ierr;
852: IS isrows,iscols;
853: PetscInt rrank,ridx,crank,cidx,nrows,ncols,i,j,erow,ecol,elrow,elcol;
854: const PetscInt *rows,*cols;
855: PetscElemScalar v;
856: const El::Grid &grid = a->emat->Grid();
859: PetscObjectGetComm((PetscObject)A,&comm);
861: if (reuse == MAT_REUSE_MATRIX) {
862: Bmpi = *B;
863: } else {
864: MatCreate(comm,&Bmpi);
865: MatSetSizes(Bmpi,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
866: MatSetType(Bmpi,MATDENSE);
867: MatSetUp(Bmpi);
868: }
870: /* Get local entries of A */
871: MatGetOwnershipIS(A,&isrows,&iscols);
872: ISGetLocalSize(isrows,&nrows);
873: ISGetIndices(isrows,&rows);
874: ISGetLocalSize(iscols,&ncols);
875: ISGetIndices(iscols,&cols);
877: if (a->roworiented) {
878: for (i=0; i<nrows; i++) {
879: P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
880: RO2E(A,0,rrank,ridx,&erow);
881: if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
882: for (j=0; j<ncols; j++) {
883: P2RO(A,1,cols[j],&crank,&cidx);
884: RO2E(A,1,crank,cidx,&ecol);
885: if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
887: elrow = erow / grid.MCSize(); /* Elemental local row index */
888: elcol = ecol / grid.MRSize(); /* Elemental local column index */
889: v = a->emat->GetLocal(elrow,elcol);
890: MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);
891: }
892: }
893: } else { /* column-oriented */
894: for (j=0; j<ncols; j++) {
895: P2RO(A,1,cols[j],&crank,&cidx);
896: RO2E(A,1,crank,cidx,&ecol);
897: if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
898: for (i=0; i<nrows; i++) {
899: P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
900: RO2E(A,0,rrank,ridx,&erow);
901: if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
903: elrow = erow / grid.MCSize(); /* Elemental local row index */
904: elcol = ecol / grid.MRSize(); /* Elemental local column index */
905: v = a->emat->GetLocal(elrow,elcol);
906: MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);
907: }
908: }
909: }
910: MatAssemblyBegin(Bmpi,MAT_FINAL_ASSEMBLY);
911: MatAssemblyEnd(Bmpi,MAT_FINAL_ASSEMBLY);
912: if (reuse == MAT_INPLACE_MATRIX) {
913: MatHeaderReplace(A,&Bmpi);
914: } else {
915: *B = Bmpi;
916: }
917: ISDestroy(&isrows);
918: ISDestroy(&iscols);
919: return(0);
920: }
922: PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
923: {
924: Mat mat_elemental;
925: PetscErrorCode ierr;
926: PetscInt M=A->rmap->N,N=A->cmap->N,row,ncols;
927: const PetscInt *cols;
928: const PetscScalar *vals;
931: if (reuse == MAT_REUSE_MATRIX) {
932: mat_elemental = *newmat;
933: MatZeroEntries(mat_elemental);
934: } else {
935: MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
936: MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
937: MatSetType(mat_elemental,MATELEMENTAL);
938: MatSetUp(mat_elemental);
939: }
940: for (row=0; row<M; row++) {
941: MatGetRow(A,row,&ncols,&cols,&vals);
942: /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
943: MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
944: MatRestoreRow(A,row,&ncols,&cols,&vals);
945: }
946: MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
947: MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);
949: if (reuse == MAT_INPLACE_MATRIX) {
950: MatHeaderReplace(A,&mat_elemental);
951: } else {
952: *newmat = mat_elemental;
953: }
954: return(0);
955: }
957: PETSC_INTERN PetscErrorCode MatConvert_MPIAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
958: {
959: Mat mat_elemental;
960: PetscErrorCode ierr;
961: PetscInt row,ncols,rstart=A->rmap->rstart,rend=A->rmap->rend,j;
962: const PetscInt *cols;
963: const PetscScalar *vals;
966: if (reuse == MAT_REUSE_MATRIX) {
967: mat_elemental = *newmat;
968: MatZeroEntries(mat_elemental);
969: } else {
970: MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
971: MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,A->rmap->N,A->cmap->N);
972: MatSetType(mat_elemental,MATELEMENTAL);
973: MatSetUp(mat_elemental);
974: }
975: for (row=rstart; row<rend; row++) {
976: MatGetRow(A,row,&ncols,&cols,&vals);
977: for (j=0; j<ncols; j++) {
978: /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
979: MatSetValues(mat_elemental,1,&row,1,&cols[j],&vals[j],ADD_VALUES);
980: }
981: MatRestoreRow(A,row,&ncols,&cols,&vals);
982: }
983: MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
984: MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);
986: if (reuse == MAT_INPLACE_MATRIX) {
987: MatHeaderReplace(A,&mat_elemental);
988: } else {
989: *newmat = mat_elemental;
990: }
991: return(0);
992: }
994: PETSC_INTERN PetscErrorCode MatConvert_SeqSBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
995: {
996: Mat mat_elemental;
997: PetscErrorCode ierr;
998: PetscInt M=A->rmap->N,N=A->cmap->N,row,ncols,j;
999: const PetscInt *cols;
1000: const PetscScalar *vals;
1003: if (reuse == MAT_REUSE_MATRIX) {
1004: mat_elemental = *newmat;
1005: MatZeroEntries(mat_elemental);
1006: } else {
1007: MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
1008: MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
1009: MatSetType(mat_elemental,MATELEMENTAL);
1010: MatSetUp(mat_elemental);
1011: }
1012: MatGetRowUpperTriangular(A);
1013: for (row=0; row<M; row++) {
1014: MatGetRow(A,row,&ncols,&cols,&vals);
1015: /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1016: MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
1017: for (j=0; j<ncols; j++) { /* lower triangular part */
1018: if (cols[j] == row) continue;
1019: MatSetValues(mat_elemental,1,&cols[j],1,&row,&vals[j],ADD_VALUES);
1020: }
1021: MatRestoreRow(A,row,&ncols,&cols,&vals);
1022: }
1023: MatRestoreRowUpperTriangular(A);
1024: MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
1025: MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);
1027: if (reuse == MAT_INPLACE_MATRIX) {
1028: MatHeaderReplace(A,&mat_elemental);
1029: } else {
1030: *newmat = mat_elemental;
1031: }
1032: return(0);
1033: }
1035: PETSC_INTERN PetscErrorCode MatConvert_MPISBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
1036: {
1037: Mat mat_elemental;
1038: PetscErrorCode ierr;
1039: PetscInt M=A->rmap->N,N=A->cmap->N,row,ncols,j,rstart=A->rmap->rstart,rend=A->rmap->rend;
1040: const PetscInt *cols;
1041: const PetscScalar *vals;
1044: if (reuse == MAT_REUSE_MATRIX) {
1045: mat_elemental = *newmat;
1046: MatZeroEntries(mat_elemental);
1047: } else {
1048: MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
1049: MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
1050: MatSetType(mat_elemental,MATELEMENTAL);
1051: MatSetUp(mat_elemental);
1052: }
1053: MatGetRowUpperTriangular(A);
1054: for (row=rstart; row<rend; row++) {
1055: MatGetRow(A,row,&ncols,&cols,&vals);
1056: /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1057: MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
1058: for (j=0; j<ncols; j++) { /* lower triangular part */
1059: if (cols[j] == row) continue;
1060: MatSetValues(mat_elemental,1,&cols[j],1,&row,&vals[j],ADD_VALUES);
1061: }
1062: MatRestoreRow(A,row,&ncols,&cols,&vals);
1063: }
1064: MatRestoreRowUpperTriangular(A);
1065: MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
1066: MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);
1068: if (reuse == MAT_INPLACE_MATRIX) {
1069: MatHeaderReplace(A,&mat_elemental);
1070: } else {
1071: *newmat = mat_elemental;
1072: }
1073: return(0);
1074: }
1076: static PetscErrorCode MatDestroy_Elemental(Mat A)
1077: {
1078: Mat_Elemental *a = (Mat_Elemental*)A->data;
1079: PetscErrorCode ierr;
1080: Mat_Elemental_Grid *commgrid;
1081: PetscBool flg;
1082: MPI_Comm icomm;
1085: delete a->emat;
1086: delete a->P;
1087: delete a->Q;
1089: El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
1090: PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);
1091: MPI_Comm_get_attr(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);
1092: if (--commgrid->grid_refct == 0) {
1093: delete commgrid->grid;
1094: PetscFree(commgrid);
1095: MPI_Comm_free_keyval(&Petsc_Elemental_keyval);
1096: }
1097: PetscCommDestroy(&icomm);
1098: PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",NULL);
1099: PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverType_C",NULL);
1100: PetscFree(A->data);
1101: return(0);
1102: }
1104: PetscErrorCode MatSetUp_Elemental(Mat A)
1105: {
1106: Mat_Elemental *a = (Mat_Elemental*)A->data;
1108: MPI_Comm comm;
1109: PetscMPIInt rsize,csize;
1110: PetscInt n;
1113: PetscLayoutSetUp(A->rmap);
1114: PetscLayoutSetUp(A->cmap);
1116: /* Check if local row and clomun sizes are equally distributed.
1117: Jed: Elemental uses "element" cyclic ordering so the sizes need to match that
1118: exactly. The strategy in MatElemental is for PETSc to implicitly permute to block ordering (like would be returned by
1119: PetscSplitOwnership(comm,&n,&N)), at which point Elemental matrices can act on PETSc vectors without redistributing the vectors. */
1120: PetscObjectGetComm((PetscObject)A,&comm);
1121: n = PETSC_DECIDE;
1122: PetscSplitOwnership(comm,&n,&A->rmap->N);
1123: if (n != A->rmap->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local row size %D of ELEMENTAL matrix must be equally distributed",A->rmap->n);
1125: n = PETSC_DECIDE;
1126: PetscSplitOwnership(comm,&n,&A->cmap->N);
1127: if (n != A->cmap->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local column size %D of ELEMENTAL matrix must be equally distributed",A->cmap->n);
1129: a->emat->Resize(A->rmap->N,A->cmap->N);
1130: El::Zero(*a->emat);
1132: MPI_Comm_size(A->rmap->comm,&rsize);
1133: MPI_Comm_size(A->cmap->comm,&csize);
1134: if (csize != rsize) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Cannot use row and column communicators of different sizes");
1135: a->commsize = rsize;
1136: a->mr[0] = A->rmap->N % rsize; if (!a->mr[0]) a->mr[0] = rsize;
1137: a->mr[1] = A->cmap->N % csize; if (!a->mr[1]) a->mr[1] = csize;
1138: a->m[0] = A->rmap->N / rsize + (a->mr[0] != rsize);
1139: a->m[1] = A->cmap->N / csize + (a->mr[1] != csize);
1140: return(0);
1141: }
1143: PetscErrorCode MatAssemblyBegin_Elemental(Mat A, MatAssemblyType type)
1144: {
1145: Mat_Elemental *a = (Mat_Elemental*)A->data;
1148: /* printf("Calling ProcessQueues\n"); */
1149: a->emat->ProcessQueues();
1150: /* printf("Finished ProcessQueues\n"); */
1151: return(0);
1152: }
1154: PetscErrorCode MatAssemblyEnd_Elemental(Mat A, MatAssemblyType type)
1155: {
1157: /* Currently does nothing */
1158: return(0);
1159: }
1161: PetscErrorCode MatLoad_Elemental(Mat newMat, PetscViewer viewer)
1162: {
1164: Mat Adense,Ae;
1165: MPI_Comm comm;
1168: PetscObjectGetComm((PetscObject)newMat,&comm);
1169: MatCreate(comm,&Adense);
1170: MatSetType(Adense,MATDENSE);
1171: MatLoad(Adense,viewer);
1172: MatConvert(Adense, MATELEMENTAL, MAT_INITIAL_MATRIX,&Ae);
1173: MatDestroy(&Adense);
1174: MatHeaderReplace(newMat,&Ae);
1175: return(0);
1176: }
1178: /* -------------------------------------------------------------------*/
1179: static struct _MatOps MatOps_Values = {
1180: MatSetValues_Elemental,
1181: 0,
1182: 0,
1183: MatMult_Elemental,
1184: /* 4*/ MatMultAdd_Elemental,
1185: MatMultTranspose_Elemental,
1186: MatMultTransposeAdd_Elemental,
1187: MatSolve_Elemental,
1188: MatSolveAdd_Elemental,
1189: 0,
1190: /*10*/ 0,
1191: MatLUFactor_Elemental,
1192: MatCholeskyFactor_Elemental,
1193: 0,
1194: MatTranspose_Elemental,
1195: /*15*/ MatGetInfo_Elemental,
1196: 0,
1197: MatGetDiagonal_Elemental,
1198: MatDiagonalScale_Elemental,
1199: MatNorm_Elemental,
1200: /*20*/ MatAssemblyBegin_Elemental,
1201: MatAssemblyEnd_Elemental,
1202: MatSetOption_Elemental,
1203: MatZeroEntries_Elemental,
1204: /*24*/ 0,
1205: MatLUFactorSymbolic_Elemental,
1206: MatLUFactorNumeric_Elemental,
1207: MatCholeskyFactorSymbolic_Elemental,
1208: MatCholeskyFactorNumeric_Elemental,
1209: /*29*/ MatSetUp_Elemental,
1210: 0,
1211: 0,
1212: 0,
1213: 0,
1214: /*34*/ MatDuplicate_Elemental,
1215: 0,
1216: 0,
1217: 0,
1218: 0,
1219: /*39*/ MatAXPY_Elemental,
1220: 0,
1221: 0,
1222: 0,
1223: MatCopy_Elemental,
1224: /*44*/ 0,
1225: MatScale_Elemental,
1226: MatShift_Basic,
1227: 0,
1228: 0,
1229: /*49*/ 0,
1230: 0,
1231: 0,
1232: 0,
1233: 0,
1234: /*54*/ 0,
1235: 0,
1236: 0,
1237: 0,
1238: 0,
1239: /*59*/ 0,
1240: MatDestroy_Elemental,
1241: MatView_Elemental,
1242: 0,
1243: 0,
1244: /*64*/ 0,
1245: 0,
1246: 0,
1247: 0,
1248: 0,
1249: /*69*/ 0,
1250: 0,
1251: MatConvert_Elemental_Dense,
1252: 0,
1253: 0,
1254: /*74*/ 0,
1255: 0,
1256: 0,
1257: 0,
1258: 0,
1259: /*79*/ 0,
1260: 0,
1261: 0,
1262: 0,
1263: MatLoad_Elemental,
1264: /*84*/ 0,
1265: 0,
1266: 0,
1267: 0,
1268: 0,
1269: /*89*/ MatMatMult_Elemental,
1270: MatMatMultSymbolic_Elemental,
1271: MatMatMultNumeric_Elemental,
1272: 0,
1273: 0,
1274: /*94*/ 0,
1275: MatMatTransposeMult_Elemental,
1276: MatMatTransposeMultSymbolic_Elemental,
1277: MatMatTransposeMultNumeric_Elemental,
1278: 0,
1279: /*99*/ 0,
1280: 0,
1281: 0,
1282: MatConjugate_Elemental,
1283: 0,
1284: /*104*/0,
1285: 0,
1286: 0,
1287: 0,
1288: 0,
1289: /*109*/MatMatSolve_Elemental,
1290: 0,
1291: 0,
1292: 0,
1293: MatMissingDiagonal_Elemental,
1294: /*114*/0,
1295: 0,
1296: 0,
1297: 0,
1298: 0,
1299: /*119*/0,
1300: MatHermitianTranspose_Elemental,
1301: 0,
1302: 0,
1303: 0,
1304: /*124*/0,
1305: 0,
1306: 0,
1307: 0,
1308: 0,
1309: /*129*/0,
1310: 0,
1311: 0,
1312: 0,
1313: 0,
1314: /*134*/0,
1315: 0,
1316: 0,
1317: 0,
1318: 0
1319: };
1321: /*MC
1322: MATELEMENTAL = "elemental" - A matrix type for dense matrices using the Elemental package
1324: Use ./configure --download-elemental to install PETSc to use Elemental
1326: Use -pc_type lu -pc_factor_mat_solver_type elemental to use this direct solver
1328: Options Database Keys:
1329: + -mat_type elemental - sets the matrix type to "elemental" during a call to MatSetFromOptions()
1330: - -mat_elemental_grid_height - sets Grid Height for 2D cyclic ordering of internal matrix
1332: Level: beginner
1334: .seealso: MATDENSE
1335: M*/
1337: PETSC_EXTERN PetscErrorCode MatCreate_Elemental(Mat A)
1338: {
1339: Mat_Elemental *a;
1340: PetscErrorCode ierr;
1341: PetscBool flg,flg1;
1342: Mat_Elemental_Grid *commgrid;
1343: MPI_Comm icomm;
1344: PetscInt optv1;
1347: PetscElementalInitializePackage();
1348: PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));
1349: A->insertmode = NOT_SET_VALUES;
1351: PetscNewLog(A,&a);
1352: A->data = (void*)a;
1354: /* Set up the elemental matrix */
1355: El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
1357: /* Grid needs to be shared between multiple Mats on the same communicator, implement by attribute caching on the MPI_Comm */
1358: if (Petsc_Elemental_keyval == MPI_KEYVAL_INVALID) {
1359: MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN,MPI_COMM_NULL_DELETE_FN,&Petsc_Elemental_keyval,(void*)0);
1360: }
1361: PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);
1362: MPI_Comm_get_attr(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);
1363: if (!flg) {
1364: PetscNewLog(A,&commgrid);
1366: PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"Elemental Options","Mat");
1367: /* displayed default grid sizes (CommSize,1) are set by us arbitrarily until El::Grid() is called */
1368: PetscOptionsInt("-mat_elemental_grid_height","Grid Height","None",El::mpi::Size(cxxcomm),&optv1,&flg1);
1369: if (flg1) {
1370: if (El::mpi::Size(cxxcomm) % optv1 != 0) {
1371: SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Grid Height %D must evenly divide CommSize %D",optv1,(PetscInt)El::mpi::Size(cxxcomm));
1372: }
1373: commgrid->grid = new El::Grid(cxxcomm,optv1); /* use user-provided grid height */
1374: } else {
1375: commgrid->grid = new El::Grid(cxxcomm); /* use Elemental default grid sizes */
1376: /* printf("new commgrid->grid = %p\n",commgrid->grid); -- memory leak revealed by valgrind? */
1377: }
1378: commgrid->grid_refct = 1;
1379: MPI_Comm_set_attr(icomm,Petsc_Elemental_keyval,(void*)commgrid);
1381: a->pivoting = 1;
1382: PetscOptionsInt("-mat_elemental_pivoting","Pivoting","None",a->pivoting,&a->pivoting,NULL);
1384: PetscOptionsEnd();
1385: } else {
1386: commgrid->grid_refct++;
1387: }
1388: PetscCommDestroy(&icomm);
1389: a->grid = commgrid->grid;
1390: a->emat = new El::DistMatrix<PetscElemScalar>(*a->grid);
1391: a->roworiented = PETSC_TRUE;
1393: PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",MatGetOwnershipIS_Elemental);
1394: PetscObjectChangeTypeName((PetscObject)A,MATELEMENTAL);
1395: return(0);
1396: }