Actual source code: matelem.cxx

petsc-3.6.1 2015-08-06
Report Typos and Errors
  1: #include <../src/mat/impls/elemental/matelemimpl.h> /*I "petscmat.h" I*/

  3: /*
  4:     The variable Petsc_Elemental_keyval is used to indicate an MPI attribute that
  5:   is attached to a communicator, in this case the attribute is a Mat_Elemental_Grid
  6: */
  7: static PetscMPIInt Petsc_Elemental_keyval = MPI_KEYVAL_INVALID;

 11: /*@C
 12:    PetscElementalInitializePackage - Initialize Elemental package

 14:    Logically Collective

 16:    Level: developer

 18: .seealso: MATELEMENTAL, PetscElementalFinalizePackage()
 19: @*/
 20: PetscErrorCode PetscElementalInitializePackage(void)
 21: {

 25:   if (El::Initialized()) return(0);
 26:   { /* We have already initialized MPI, so this song and dance is just to pass these variables (which won't be used by Elemental) through the interface that needs references */
 27:     int zero = 0;
 28:     char **nothing = 0;
 29:     El::Initialize(zero,nothing);   /* called by the 1st call of MatCreate_Elemental */
 30:   }
 31:   PetscRegisterFinalize(PetscElementalFinalizePackage);
 32:   return(0);
 33: }

 37: /*@C
 38:    PetscElementalFinalizePackage - Finalize Elemental package

 40:    Logically Collective

 42:    Level: developer

 44: .seealso: MATELEMENTAL, PetscElementalInitializePackage()
 45: @*/
 46: PetscErrorCode PetscElementalFinalizePackage(void)
 47: {
 49:   El::Finalize();  /* called by PetscFinalize() */
 50:   return(0);
 51: }

 55: static PetscErrorCode MatView_Elemental(Mat A,PetscViewer viewer)
 56: {
 58:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
 59:   PetscBool      iascii;

 62:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
 63:   if (iascii) {
 64:     PetscViewerFormat format;
 65:     PetscViewerGetFormat(viewer,&format);
 66:     if (format == PETSC_VIEWER_ASCII_INFO) {
 67:       /* call elemental viewing function */
 68:       PetscViewerASCIIPrintf(viewer,"Elemental run parameters:\n");
 69:       PetscViewerASCIIPrintf(viewer,"  allocated entries=%d\n",(*a->emat).AllocatedMemory());
 70:       PetscViewerASCIIPrintf(viewer,"  grid height=%d, grid width=%d\n",(*a->emat).Grid().Height(),(*a->emat).Grid().Width());
 71:       if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
 72:         /* call elemental viewing function */
 73:         PetscPrintf(PetscObjectComm((PetscObject)viewer),"test matview_elemental 2\n");
 74:       }

 76:     } else if (format == PETSC_VIEWER_DEFAULT) {
 77:       PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);
 78:       El::Print( *a->emat, "Elemental matrix (cyclic ordering)" );
 79:       PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);
 80:       if (A->factortype == MAT_FACTOR_NONE){
 81:         Mat Adense;
 82:         PetscPrintf(PetscObjectComm((PetscObject)viewer),"Elemental matrix (explicit ordering)\n");
 83:         MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);
 84:         MatView(Adense,viewer);
 85:         MatDestroy(&Adense);
 86:       }
 87:     } else SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Format");
 88:   } else {
 89:     /* convert to dense format and call MatView() */
 90:     Mat Adense;
 91:     PetscPrintf(PetscObjectComm((PetscObject)viewer),"Elemental matrix (explicit ordering)\n");
 92:     MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);
 93:     MatView(Adense,viewer);
 94:     MatDestroy(&Adense);
 95:   }
 96:   return(0);
 97: }

101: static PetscErrorCode MatGetInfo_Elemental(Mat A,MatInfoType flag,MatInfo *info)
102: {
103:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
104:   PetscMPIInt    rank;

107:   MPI_Comm_rank(PetscObjectComm((PetscObject)A),&rank);

109:   /* if (!rank) printf("          .........MatGetInfo_Elemental ...\n"); */
110:   info->block_size     = 1.0;

112:   if (flag == MAT_LOCAL) {
113:     info->nz_allocated   = (double)(*a->emat).AllocatedMemory(); /* locally allocated */
114:     info->nz_used        = info->nz_allocated;
115:   } else if (flag == MAT_GLOBAL_MAX) {
116:     //MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));
117:     /* see MatGetInfo_MPIAIJ() for getting global info->nz_allocated! */
118:     //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_MAX not written yet");
119:   } else if (flag == MAT_GLOBAL_SUM) {
120:     //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_SUM not written yet");
121:     info->nz_allocated   = (double)(*a->emat).AllocatedMemory(); /* locally allocated */
122:     info->nz_used        = info->nz_allocated; /* assume Elemental does accurate allocation */
123:     //MPI_Allreduce(isend,irecv,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));
124:     //PetscPrintf(PETSC_COMM_SELF,"    ... [%d] locally allocated %g\n",rank,info->nz_allocated);
125:   }

127:   info->nz_unneeded       = 0.0;
128:   info->assemblies        = (double)A->num_ass;
129:   info->mallocs           = 0;
130:   info->memory            = ((PetscObject)A)->mem;
131:   info->fill_ratio_given  = 0; /* determined by Elemental */
132:   info->fill_ratio_needed = 0;
133:   info->factor_mallocs    = 0;
134:   return(0);
135: }

139: static PetscErrorCode MatSetValues_Elemental(Mat A,PetscInt nr,const PetscInt *rows,PetscInt nc,const PetscInt *cols,const PetscScalar *vals,InsertMode imode)
140: {
142:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
143:   PetscMPIInt    rank;
144:   PetscInt       i,j,rrank,ridx,crank,cidx;

147:   MPI_Comm_rank(PetscObjectComm((PetscObject)A),&rank);

149:   const El::Grid &grid = a->emat->Grid();
150:   for (i=0; i<nr; i++) {
151:     PetscInt erow,ecol,elrow,elcol;
152:     if (rows[i] < 0) continue;
153:     P2RO(A,0,rows[i],&rrank,&ridx);
154:     RO2E(A,0,rrank,ridx,&erow);
155:     if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
156:     for (j=0; j<nc; j++) {
157:       if (cols[j] < 0) continue;
158:       P2RO(A,1,cols[j],&crank,&cidx);
159:       RO2E(A,1,crank,cidx,&ecol);
160:       if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
161:       if (erow % grid.MCSize() != grid.MCRank() || ecol % grid.MRSize() != grid.MRRank()){ /* off-proc entry */
162:         if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
163:         /* PetscPrintf(PETSC_COMM_SELF,"[%D] add off-proc entry (%D,%D, %g) (%D %D)\n",rank,rows[i],cols[j],*(vals+i*nc),erow,ecol); */
164:         a->esubmat->Set(0,0, (PetscElemScalar)vals[i*nc+j]);
165:         a->interface->Axpy(1.0,*(a->esubmat),erow,ecol);
166:         continue;
167:       }
168:       elrow = erow / grid.MCSize();
169:       elcol = ecol / grid.MRSize();
170:       switch (imode) {
171:       case INSERT_VALUES: a->emat->SetLocal(elrow,elcol,(PetscElemScalar)vals[i*nc+j]); break;
172:       case ADD_VALUES: a->emat->UpdateLocal(elrow,elcol,(PetscElemScalar)vals[i*nc+j]); break;
173:       default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
174:       }
175:     }
176:   }
177:   return(0);
178: }

182: static PetscErrorCode MatMult_Elemental(Mat A,Vec X,Vec Y)
183: {
184:   Mat_Elemental         *a = (Mat_Elemental*)A->data;
185:   PetscErrorCode        ierr;
186:   const PetscElemScalar *x;
187:   PetscElemScalar       *y;
188:   PetscElemScalar       one = 1,zero = 0;

191:   VecGetArrayRead(X,(const PetscScalar **)&x);
192:   VecGetArray(Y,(PetscScalar **)&y);
193:   { /* Scoping so that constructor is called before pointer is returned */
194:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
195:     xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
196:     ye.Attach(A->rmap->N,1,*a->grid,0,0,y,A->rmap->n);
197:     El::Gemv(El::NORMAL,one,*a->emat,xe,zero,ye);
198:   }
199:   VecRestoreArrayRead(X,(const PetscScalar **)&x);
200:   VecRestoreArray(Y,(PetscScalar **)&y);
201:   return(0);
202: }

206: static PetscErrorCode MatMultTranspose_Elemental(Mat A,Vec X,Vec Y)
207: {
208:   Mat_Elemental         *a = (Mat_Elemental*)A->data;
209:   PetscErrorCode        ierr;
210:   const PetscElemScalar *x;
211:   PetscElemScalar       *y;
212:   PetscElemScalar       one = 1,zero = 0;

215:   VecGetArrayRead(X,(const PetscScalar **)&x);
216:   VecGetArray(Y,(PetscScalar **)&y);
217:   { /* Scoping so that constructor is called before pointer is returned */
218:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
219:     xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
220:     ye.Attach(A->cmap->N,1,*a->grid,0,0,y,A->cmap->n);
221:     El::Gemv(El::TRANSPOSE,one,*a->emat,xe,zero,ye);
222:   }
223:   VecRestoreArrayRead(X,(const PetscScalar **)&x);
224:   VecRestoreArray(Y,(PetscScalar **)&y);
225:   return(0);
226: }

230: static PetscErrorCode MatMultAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
231: {
232:   Mat_Elemental         *a = (Mat_Elemental*)A->data;
233:   PetscErrorCode        ierr;
234:   const PetscElemScalar *x;
235:   PetscElemScalar       *z;
236:   PetscElemScalar       one = 1;

239:   if (Y != Z) {VecCopy(Y,Z);}
240:   VecGetArrayRead(X,(const PetscScalar **)&x);
241:   VecGetArray(Z,(PetscScalar **)&z);
242:   { /* Scoping so that constructor is called before pointer is returned */
243:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
244:     xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
245:     ze.Attach(A->rmap->N,1,*a->grid,0,0,z,A->rmap->n);
246:     El::Gemv(El::NORMAL,one,*a->emat,xe,one,ze);
247:   }
248:   VecRestoreArrayRead(X,(const PetscScalar **)&x);
249:   VecRestoreArray(Z,(PetscScalar **)&z);
250:   return(0);
251: }

255: static PetscErrorCode MatMultTransposeAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
256: {
257:   Mat_Elemental         *a = (Mat_Elemental*)A->data;
258:   PetscErrorCode        ierr;
259:   const PetscElemScalar *x;
260:   PetscElemScalar       *z;
261:   PetscElemScalar       one = 1;

264:   if (Y != Z) {VecCopy(Y,Z);}
265:   VecGetArrayRead(X,(const PetscScalar **)&x);
266:   VecGetArray(Z,(PetscScalar **)&z);
267:   { /* Scoping so that constructor is called before pointer is returned */
268:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
269:     xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
270:     ze.Attach(A->cmap->N,1,*a->grid,0,0,z,A->cmap->n);
271:     El::Gemv(El::TRANSPOSE,one,*a->emat,xe,one,ze);
272:   }
273:   VecRestoreArrayRead(X,(const PetscScalar **)&x);
274:   VecRestoreArray(Z,(PetscScalar **)&z);
275:   return(0);
276: }

280: static PetscErrorCode MatMatMultNumeric_Elemental(Mat A,Mat B,Mat C)
281: {
282:   Mat_Elemental    *a = (Mat_Elemental*)A->data;
283:   Mat_Elemental    *b = (Mat_Elemental*)B->data;
284:   Mat_Elemental    *c = (Mat_Elemental*)C->data;
285:   PetscElemScalar  one = 1,zero = 0;

288:   { /* Scoping so that constructor is called before pointer is returned */
289:     El::Gemm(El::NORMAL,El::NORMAL,one,*a->emat,*b->emat,zero,*c->emat);
290:   }
291:   C->assembled = PETSC_TRUE;
292:   return(0);
293: }

297: static PetscErrorCode MatMatMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat *C)
298: {
300:   Mat            Ce;
301:   MPI_Comm       comm;

304:   PetscObjectGetComm((PetscObject)A,&comm);
305:   MatCreate(comm,&Ce);
306:   MatSetSizes(Ce,A->rmap->n,B->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
307:   MatSetType(Ce,MATELEMENTAL);
308:   MatSetUp(Ce);
309:   *C = Ce;
310:   return(0);
311: }

315: static PetscErrorCode MatMatMult_Elemental(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
316: {

320:   if (scall == MAT_INITIAL_MATRIX){
321:     PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);
322:     MatMatMultSymbolic_Elemental(A,B,1.0,C);
323:     PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);
324:   }
325:   PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);
326:   MatMatMultNumeric_Elemental(A,B,*C);
327:   PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);
328:   return(0);
329: }

333: static PetscErrorCode MatMatTransposeMultNumeric_Elemental(Mat A,Mat B,Mat C)
334: {
335:   Mat_Elemental      *a = (Mat_Elemental*)A->data;
336:   Mat_Elemental      *b = (Mat_Elemental*)B->data;
337:   Mat_Elemental      *c = (Mat_Elemental*)C->data;
338:   PetscElemScalar    one = 1,zero = 0;

341:   { /* Scoping so that constructor is called before pointer is returned */
342:     El::Gemm(El::NORMAL,El::TRANSPOSE,one,*a->emat,*b->emat,zero,*c->emat);
343:   }
344:   C->assembled = PETSC_TRUE;
345:   return(0);
346: }

350: static PetscErrorCode MatMatTransposeMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat *C)
351: {
353:   Mat            Ce;
354:   MPI_Comm       comm;

357:   PetscObjectGetComm((PetscObject)A,&comm);
358:   MatCreate(comm,&Ce);
359:   MatSetSizes(Ce,A->rmap->n,B->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
360:   MatSetType(Ce,MATELEMENTAL);
361:   MatSetUp(Ce);
362:   *C = Ce;
363:   return(0);
364: }

368: static PetscErrorCode MatMatTransposeMult_Elemental(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
369: {

373:   if (scall == MAT_INITIAL_MATRIX){
374:     PetscLogEventBegin(MAT_MatTransposeMultSymbolic,A,B,0,0);
375:     MatMatMultSymbolic_Elemental(A,B,1.0,C);
376:     PetscLogEventEnd(MAT_MatTransposeMultSymbolic,A,B,0,0);
377:   }
378:   PetscLogEventBegin(MAT_MatTransposeMultNumeric,A,B,0,0);
379:   MatMatTransposeMultNumeric_Elemental(A,B,*C);
380:   PetscLogEventEnd(MAT_MatTransposeMultNumeric,A,B,0,0);
381:   return(0);
382: }

386: static PetscErrorCode MatGetDiagonal_Elemental(Mat A,Vec D)
387: {
388:   PetscInt        i,nrows,ncols,nD,rrank,ridx,crank,cidx;
389:   Mat_Elemental   *a = (Mat_Elemental*)A->data;
390:   PetscErrorCode  ierr;
391:   PetscElemScalar v;
392:   MPI_Comm        comm;

395:   PetscObjectGetComm((PetscObject)A,&comm);
396:   MatGetSize(A,&nrows,&ncols);
397:   nD = nrows>ncols ? ncols : nrows;
398:   for (i=0; i<nD; i++) {
399:     PetscInt erow,ecol;
400:     P2RO(A,0,i,&rrank,&ridx);
401:     RO2E(A,0,rrank,ridx,&erow);
402:     if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
403:     P2RO(A,1,i,&crank,&cidx);
404:     RO2E(A,1,crank,cidx,&ecol);
405:     if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
406:     v = a->emat->Get(erow,ecol);
407:     VecSetValues(D,1,&i,(PetscScalar*)&v,INSERT_VALUES);
408:   }
409:   VecAssemblyBegin(D);
410:   VecAssemblyEnd(D);
411:   return(0);
412: }

416: static PetscErrorCode MatDiagonalScale_Elemental(Mat X,Vec L,Vec R)
417: {
418:   Mat_Elemental         *x = (Mat_Elemental*)X->data;
419:   const PetscElemScalar *d;
420:   PetscErrorCode        ierr;

423:   if (R) {
424:     VecGetArrayRead(R,(const PetscScalar **)&d);
425:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
426:     de.LockedAttach(X->cmap->N,1,*x->grid,0,0,d,X->cmap->n);
427:     El::DiagonalScale(El::RIGHT,El::NORMAL,de,*x->emat);
428:     VecRestoreArrayRead(R,(const PetscScalar **)&d);
429:   }
430:   if (L) {
431:     VecGetArrayRead(L,(const PetscScalar **)&d);
432:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
433:     de.LockedAttach(X->rmap->N,1,*x->grid,0,0,d,X->rmap->n);
434:     El::DiagonalScale(El::LEFT,El::NORMAL,de,*x->emat);
435:     VecRestoreArrayRead(L,(const PetscScalar **)&d);
436:   }
437:   return(0);
438: }

442: static PetscErrorCode MatScale_Elemental(Mat X,PetscScalar a)
443: {
444:   Mat_Elemental  *x = (Mat_Elemental*)X->data;

447:   El::Scale((PetscElemScalar)a,*x->emat);
448:   return(0);
449: }

451: /*
452:   MatAXPY - Computes Y = a*X + Y.
453: */
456: static PetscErrorCode MatAXPY_Elemental(Mat Y,PetscScalar a,Mat X,MatStructure str)
457: {
458:   Mat_Elemental  *x = (Mat_Elemental*)X->data;
459:   Mat_Elemental  *y = (Mat_Elemental*)Y->data;

463:   El::Axpy((PetscElemScalar)a,*x->emat,*y->emat);
464:   PetscObjectStateIncrease((PetscObject)Y);
465:   return(0);
466: }

470: static PetscErrorCode MatCopy_Elemental(Mat A,Mat B,MatStructure str)
471: {
472:   Mat_Elemental *a=(Mat_Elemental*)A->data;
473:   Mat_Elemental *b=(Mat_Elemental*)B->data;

476:   El::Copy(*a->emat,*b->emat);
477:   return(0);
478: }

482: static PetscErrorCode MatDuplicate_Elemental(Mat A,MatDuplicateOption op,Mat *B)
483: {
484:   Mat            Be;
485:   MPI_Comm       comm;
486:   Mat_Elemental  *a=(Mat_Elemental*)A->data;

490:   PetscObjectGetComm((PetscObject)A,&comm);
491:   MatCreate(comm,&Be);
492:   MatSetSizes(Be,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
493:   MatSetType(Be,MATELEMENTAL);
494:   MatSetUp(Be);
495:   *B = Be;
496:   if (op == MAT_COPY_VALUES) {
497:     Mat_Elemental *b=(Mat_Elemental*)Be->data;
498:     El::Copy(*a->emat,*b->emat);
499:   }
500:   Be->assembled = PETSC_TRUE;
501:   return(0);
502: }

506: static PetscErrorCode MatTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
507: {
508:   Mat            Be = *B;
510:   MPI_Comm       comm;
511:   Mat_Elemental  *a = (Mat_Elemental*)A->data, *b;

514:   PetscObjectGetComm((PetscObject)A,&comm);
515:   /* Only out-of-place supported */
516:   if (reuse == MAT_INITIAL_MATRIX){
517:     MatCreate(comm,&Be);
518:     MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
519:     MatSetType(Be,MATELEMENTAL);
520:     MatSetUp(Be);
521:     *B = Be;
522:   }
523:   b = (Mat_Elemental*)Be->data;
524:   El::Transpose(*a->emat,*b->emat);
525:   Be->assembled = PETSC_TRUE;
526:   return(0);
527: }

531: static PetscErrorCode MatConjugate_Elemental(Mat A)
532: {
533:   Mat_Elemental  *a = (Mat_Elemental*)A->data;

536:   El::Conjugate(*a->emat);
537:   return(0);
538: }

542: static PetscErrorCode MatHermitianTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
543: {
544:   Mat            Be = *B;
546:   MPI_Comm       comm;
547:   Mat_Elemental  *a = (Mat_Elemental*)A->data, *b;

550:   PetscObjectGetComm((PetscObject)A,&comm);
551:   /* Only out-of-place supported */
552:   if (reuse == MAT_INITIAL_MATRIX){
553:     MatCreate(comm,&Be);
554:     MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
555:     MatSetType(Be,MATELEMENTAL);
556:     MatSetUp(Be);
557:     *B = Be;
558:   }
559:   b = (Mat_Elemental*)Be->data;
560:   El::Adjoint(*a->emat,*b->emat);
561:   Be->assembled = PETSC_TRUE;
562:   return(0);
563: }

567: static PetscErrorCode MatSolve_Elemental(Mat A,Vec B,Vec X)
568: {
569:   Mat_Elemental     *a = (Mat_Elemental*)A->data;
570:   PetscErrorCode    ierr;
571:   PetscElemScalar   *x;

574:   VecCopy(B,X);
575:   VecGetArray(X,(PetscScalar **)&x);
576:   El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe;
577:   xe.Attach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
578:   El::DistMatrix<PetscElemScalar,El::MC,El::MR> xer(xe);
579:   switch (A->factortype) {
580:   case MAT_FACTOR_LU:
581:     if ((*a->pivot).AllocatedMemory()) {
582:       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->pivot,xer);
583:       El::Copy(xer,xe);
584:     } else {
585:       El::lu::SolveAfter(El::NORMAL,*a->emat,xer);
586:       El::Copy(xer,xe);
587:     }
588:     break;
589:   case MAT_FACTOR_CHOLESKY:
590:     El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,xer);
591:     El::Copy(xer,xe);
592:     break;
593:   default:
594:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
595:     break;
596:   }
597:   VecRestoreArray(X,(PetscScalar **)&x);
598:   return(0);
599: }

603: static PetscErrorCode MatSolveAdd_Elemental(Mat A,Vec B,Vec Y,Vec X)
604: {
605:   PetscErrorCode    ierr;

608:   MatSolve_Elemental(A,B,X);
609:   VecAXPY(X,1,Y);
610:   return(0);
611: }

615: static PetscErrorCode MatMatSolve_Elemental(Mat A,Mat B,Mat X)
616: {
617:   Mat_Elemental *a=(Mat_Elemental*)A->data;
618:   Mat_Elemental *b=(Mat_Elemental*)B->data;
619:   Mat_Elemental *x=(Mat_Elemental*)X->data;

622:   El::Copy(*b->emat,*x->emat);
623:   switch (A->factortype) {
624:   case MAT_FACTOR_LU:
625:     if ((*a->pivot).AllocatedMemory()) {
626:       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->pivot,*x->emat);
627:     } else {
628:       El::lu::SolveAfter(El::NORMAL,*a->emat,*x->emat);
629:     }
630:     break;
631:   case MAT_FACTOR_CHOLESKY:
632:     El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,*x->emat);
633:     break;
634:   default:
635:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
636:     break;
637:   }
638:   return(0);
639: }

643: static PetscErrorCode MatLUFactor_Elemental(Mat A,IS row,IS col,const MatFactorInfo *info)
644: {
645:   Mat_Elemental  *a = (Mat_Elemental*)A->data;

648:   if (info->dtcol){
649:     El::LU(*a->emat,*a->pivot);
650:   } else {
651:     El::LU(*a->emat);
652:   }
653:   A->factortype = MAT_FACTOR_LU;
654:   A->assembled  = PETSC_TRUE;
655:   return(0);
656: }

660: static PetscErrorCode  MatLUFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
661: {

665:   MatCopy(A,F,SAME_NONZERO_PATTERN);
666:   MatLUFactor_Elemental(F,0,0,info);
667:   return(0);
668: }

672: static PetscErrorCode  MatLUFactorSymbolic_Elemental(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
673: {
675:   /* F is create and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
676:   return(0);
677: }

681: static PetscErrorCode MatCholeskyFactor_Elemental(Mat A,IS perm,const MatFactorInfo *info)
682: {
683:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
684:   El::DistMatrix<PetscElemScalar,El::MC,El::STAR> d;

687:   El::Cholesky(El::UPPER,*a->emat);
688:   A->factortype = MAT_FACTOR_CHOLESKY;
689:   A->assembled  = PETSC_TRUE;
690:   return(0);
691: }

695: static PetscErrorCode MatCholeskyFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
696: {

700:   MatCopy(A,F,SAME_NONZERO_PATTERN);
701:   MatCholeskyFactor_Elemental(F,0,info);
702:   return(0);
703: }

707: static PetscErrorCode MatCholeskyFactorSymbolic_Elemental(Mat F,Mat A,IS perm,const MatFactorInfo *info)
708: {
710:   /* F is create and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
711:   return(0);
712: }

716: PetscErrorCode MatFactorGetSolverPackage_elemental_elemental(Mat A,const MatSolverPackage *type)
717: {
719:   *type = MATSOLVERELEMENTAL;
720:   return(0);
721: }

725: static PetscErrorCode MatGetFactor_elemental_elemental(Mat A,MatFactorType ftype,Mat *F)
726: {
727:   Mat            B;

731:   /* Create the factorization matrix */
732:   MatCreate(PetscObjectComm((PetscObject)A),&B);
733:   MatSetSizes(B,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
734:   MatSetType(B,MATELEMENTAL);
735:   MatSetUp(B);
736:   B->factortype = ftype;
737:   PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_elemental_elemental);
738:   *F            = B;
739:   return(0);
740: }

744: PETSC_EXTERN PetscErrorCode MatSolverPackageRegister_Elemental(void)
745: {

749:   MatSolverPackageRegister(MATSOLVERELEMENTAL,MATELEMENTAL,        MAT_FACTOR_LU,MatGetFactor_elemental_elemental);
750:   MatSolverPackageRegister(MATSOLVERELEMENTAL,MATELEMENTAL,        MAT_FACTOR_CHOLESKY,MatGetFactor_elemental_elemental);
751:   return(0);
752: }

756: static PetscErrorCode MatNorm_Elemental(Mat A,NormType type,PetscReal *nrm)
757: {
758:   Mat_Elemental *a=(Mat_Elemental*)A->data;

761:   switch (type){
762:   case NORM_1:
763:     *nrm = El::OneNorm(*a->emat);
764:     break;
765:   case NORM_FROBENIUS:
766:     *nrm = El::FrobeniusNorm(*a->emat);
767:     break;
768:   case NORM_INFINITY:
769:     *nrm = El::InfinityNorm(*a->emat);
770:     break;
771:   default:
772:     printf("Error: unsupported norm type!\n");
773:   }
774:   return(0);
775: }

779: static PetscErrorCode MatZeroEntries_Elemental(Mat A)
780: {
781:   Mat_Elemental *a=(Mat_Elemental*)A->data;

784:   El::Zero(*a->emat);
785:   return(0);
786: }

790: static PetscErrorCode MatGetOwnershipIS_Elemental(Mat A,IS *rows,IS *cols)
791: {
792:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
794:   PetscInt       i,m,shift,stride,*idx;

797:   if (rows) {
798:     m = a->emat->LocalHeight();
799:     shift = a->emat->ColShift();
800:     stride = a->emat->ColStride();
801:     PetscMalloc1(m,&idx);
802:     for (i=0; i<m; i++) {
803:       PetscInt rank,offset;
804:       E2RO(A,0,shift+i*stride,&rank,&offset);
805:       RO2P(A,0,rank,offset,&idx[i]);
806:     }
807:     ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,rows);
808:   }
809:   if (cols) {
810:     m = a->emat->LocalWidth();
811:     shift = a->emat->RowShift();
812:     stride = a->emat->RowStride();
813:     PetscMalloc1(m,&idx);
814:     for (i=0; i<m; i++) {
815:       PetscInt rank,offset;
816:       E2RO(A,1,shift+i*stride,&rank,&offset);
817:       RO2P(A,1,rank,offset,&idx[i]);
818:     }
819:     ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,cols);
820:   }
821:   return(0);
822: }

826: static PetscErrorCode MatConvert_Elemental_Dense(Mat A,MatType newtype,MatReuse reuse,Mat *B)
827: {
828:   Mat                Bmpi;
829:   Mat_Elemental      *a = (Mat_Elemental*)A->data;
830:   MPI_Comm           comm;
831:   PetscErrorCode     ierr;
832:   PetscInt           rrank,ridx,crank,cidx,nrows,ncols,i,j;
833:   PetscElemScalar    v;
834:   PetscBool          s1,s2,s3;

837:   PetscObjectGetComm((PetscObject)A,&comm);
838:   PetscStrcmp(newtype,MATDENSE,&s1);
839:   PetscStrcmp(newtype,MATSEQDENSE,&s2);
840:   PetscStrcmp(newtype,MATMPIDENSE,&s3);
841:   if (!s1 && !s2 && !s3) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported New MatType: must be MATDENSE, MATSEQDENSE or MATMPIDENSE");
842:   MatCreate(comm,&Bmpi);
843:   MatSetSizes(Bmpi,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
844:   MatSetType(Bmpi,MATDENSE);
845:   MatSetUp(Bmpi);
846:   MatGetSize(A,&nrows,&ncols);
847:   for (i=0; i<nrows; i++) {
848:     PetscInt erow,ecol;
849:     P2RO(A,0,i,&rrank,&ridx);
850:     RO2E(A,0,rrank,ridx,&erow);
851:     if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
852:     for (j=0; j<ncols; j++) {
853:       P2RO(A,1,j,&crank,&cidx);
854:       RO2E(A,1,crank,cidx,&ecol);
855:       if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
856:       v = a->emat->Get(erow,ecol);
857:       MatSetValues(Bmpi,1,&i,1,&j,(PetscScalar *)&v,INSERT_VALUES);
858:     }
859:   }
860:   MatAssemblyBegin(Bmpi,MAT_FINAL_ASSEMBLY);
861:   MatAssemblyEnd(Bmpi,MAT_FINAL_ASSEMBLY);
862:   if (reuse == MAT_REUSE_MATRIX) {
863:     MatHeaderReplace(A,Bmpi);
864:   } else {
865:     *B = Bmpi;
866:   }
867:   return(0);
868: }

872: PETSC_EXTERN PetscErrorCode MatConvert_SeqAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
873: {
874:   Mat               mat_elemental;
875:   PetscErrorCode    ierr;
876:   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols;
877:   const PetscInt    *cols;
878:   const PetscScalar *vals;

881:   MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
882:   MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
883:   MatSetType(mat_elemental,MATELEMENTAL);
884:   MatSetUp(mat_elemental);
885:   for (row=0; row<M; row++) {
886:     MatGetRow(A,row,&ncols,&cols,&vals);
887:     /* PETSc-Elemental interaface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
888:     MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
889:     MatRestoreRow(A,row,&ncols,&cols,&vals);
890:   }
891:   MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
892:   MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);

894:   if (reuse == MAT_REUSE_MATRIX) {
895:     MatHeaderReplace(A,mat_elemental);
896:   } else {
897:     *newmat = mat_elemental;
898:   }
899:   return(0);
900: }

904: PETSC_EXTERN PetscErrorCode MatConvert_MPIAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
905: {
906:   Mat               mat_elemental;
907:   PetscErrorCode    ierr;
908:   PetscInt          row,ncols,rstart=A->rmap->rstart,rend=A->rmap->rend,j;
909:   const PetscInt    *cols;
910:   const PetscScalar *vals;

913:   MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
914:   MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,A->rmap->N,A->cmap->N);
915:   MatSetType(mat_elemental,MATELEMENTAL);
916:   MatSetUp(mat_elemental);
917:   for (row=rstart; row<rend; row++) {
918:     MatGetRow(A,row,&ncols,&cols,&vals);
919:     for (j=0; j<ncols; j++) {
920:       /* PETSc-Elemental interaface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
921:       MatSetValues(mat_elemental,1,&row,1,&cols[j],&vals[j],ADD_VALUES);
922:     }
923:     MatRestoreRow(A,row,&ncols,&cols,&vals);
924:   }
925:   MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
926:   MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);

928:   if (reuse == MAT_REUSE_MATRIX) {
929:     MatHeaderReplace(A,mat_elemental);
930:   } else {
931:     *newmat = mat_elemental;
932:   }
933:   return(0);
934: }

938: PETSC_EXTERN PetscErrorCode MatConvert_SeqSBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
939: {
940:   Mat               mat_elemental;
941:   PetscErrorCode    ierr;
942:   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols,j;
943:   const PetscInt    *cols;
944:   const PetscScalar *vals;

947:   MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
948:   MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
949:   MatSetType(mat_elemental,MATELEMENTAL);
950:   MatSetUp(mat_elemental);
951:   MatGetRowUpperTriangular(A);
952:   for (row=0; row<M; row++) {
953:     MatGetRow(A,row,&ncols,&cols,&vals);
954:     /* PETSc-Elemental interaface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
955:     MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
956:     for (j=0; j<ncols; j++) { /* lower triangular part */
957:       if (cols[j] == row) continue;
958:       MatSetValues(mat_elemental,1,&cols[j],1,&row,&vals[j],ADD_VALUES);
959:     }
960:     MatRestoreRow(A,row,&ncols,&cols,&vals);
961:   }
962:   MatRestoreRowUpperTriangular(A);
963:   MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
964:   MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);

966:   if (reuse == MAT_REUSE_MATRIX) {
967:     MatHeaderReplace(A,mat_elemental);
968:   } else {
969:     *newmat = mat_elemental;
970:   }
971:   return(0);
972: }

976: PETSC_EXTERN PetscErrorCode MatConvert_MPISBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
977: {
978:   Mat               mat_elemental;
979:   PetscErrorCode    ierr;
980:   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols,j,rstart=A->rmap->rstart,rend=A->rmap->rend;
981:   const PetscInt    *cols;
982:   const PetscScalar *vals;

985:   MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
986:   MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
987:   MatSetType(mat_elemental,MATELEMENTAL);
988:   MatSetUp(mat_elemental);
989:   MatGetRowUpperTriangular(A);
990:   for (row=rstart; row<rend; row++) {
991:     MatGetRow(A,row,&ncols,&cols,&vals);
992:     /* PETSc-Elemental interaface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
993:     MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
994:     for (j=0; j<ncols; j++) { /* lower triangular part */
995:       if (cols[j] == row) continue;
996:       MatSetValues(mat_elemental,1,&cols[j],1,&row,&vals[j],ADD_VALUES);
997:     }
998:     MatRestoreRow(A,row,&ncols,&cols,&vals);
999:   }
1000:   MatRestoreRowUpperTriangular(A);
1001:   MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
1002:   MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);

1004:   if (reuse == MAT_REUSE_MATRIX) {
1005:     MatHeaderReplace(A,mat_elemental);
1006:   } else {
1007:     *newmat = mat_elemental;
1008:   }
1009:   return(0);
1010: }

1014: static PetscErrorCode MatDestroy_Elemental(Mat A)
1015: {
1016:   Mat_Elemental      *a = (Mat_Elemental*)A->data;
1017:   PetscErrorCode     ierr;
1018:   Mat_Elemental_Grid *commgrid;
1019:   PetscBool          flg;
1020:   MPI_Comm           icomm;

1023:   a->interface->Detach();
1024:   delete a->interface;
1025:   delete a->esubmat;
1026:   delete a->emat;
1027:   delete a->pivot;

1029:   El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
1030:   PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);
1031:   MPI_Attr_get(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);
1032:   /* printf("commgrid->grid_refct = %d, grid=%p\n",commgrid->grid_refct,commgrid->grid); -- memory leak revealed by valgrind? */
1033:   if (--commgrid->grid_refct == 0) {
1034:     delete commgrid->grid;
1035:     PetscFree(commgrid);
1036:   }
1037:   PetscCommDestroy(&icomm);
1038:   PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",NULL);
1039:   PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverPackage_C",NULL);
1040:   PetscObjectComposeFunction((PetscObject)A,"MatElementalHermitianGenDefEig_C",NULL);
1041:   PetscFree(A->data);
1042:   return(0);
1043: }

1047: PetscErrorCode MatSetUp_Elemental(Mat A)
1048: {
1049:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
1051:   PetscMPIInt    rsize,csize;

1054:   PetscLayoutSetUp(A->rmap);
1055:   PetscLayoutSetUp(A->cmap);

1057:   a->emat->Resize(A->rmap->N,A->cmap->N);
1058:   El::Zero(*a->emat);

1060:   MPI_Comm_size(A->rmap->comm,&rsize);
1061:   MPI_Comm_size(A->cmap->comm,&csize);
1062:   if (csize != rsize) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Cannot use row and column communicators of different sizes");
1063:   a->commsize = rsize;
1064:   a->mr[0] = A->rmap->N % rsize; if (!a->mr[0]) a->mr[0] = rsize;
1065:   a->mr[1] = A->cmap->N % csize; if (!a->mr[1]) a->mr[1] = csize;
1066:   a->m[0]  = A->rmap->N / rsize + (a->mr[0] != rsize);
1067:   a->m[1]  = A->cmap->N / csize + (a->mr[1] != csize);
1068:   return(0);
1069: }

1073: PetscErrorCode MatAssemblyBegin_Elemental(Mat A, MatAssemblyType type)
1074: {
1075:   Mat_Elemental  *a = (Mat_Elemental*)A->data;

1078:   a->interface->Detach();
1079:   a->interface->Attach(El::LOCAL_TO_GLOBAL,*(a->emat));
1080:   return(0);
1081: }

1085: PetscErrorCode MatAssemblyEnd_Elemental(Mat A, MatAssemblyType type)
1086: {
1088:   /* Currently does nothing */
1089:   return(0);
1090: }

1094: PetscErrorCode MatLoad_Elemental(Mat newMat, PetscViewer viewer)
1095: {
1097:   Mat            Adense,Ae;
1098:   MPI_Comm       comm;

1101:   PetscObjectGetComm((PetscObject)newMat,&comm);
1102:   MatCreate(comm,&Adense);
1103:   MatSetType(Adense,MATDENSE);
1104:   MatLoad(Adense,viewer);
1105:   MatConvert(Adense, MATELEMENTAL, MAT_INITIAL_MATRIX,&Ae);
1106:   MatDestroy(&Adense);
1107:   MatHeaderReplace(newMat,Ae);
1108:   return(0);
1109: }

1113: PetscErrorCode MatElementalHermitianGenDefEig_Elemental(El::Pencil eigtype,El::UpperOrLower uplo,Mat A,Mat B,Mat *evals,Mat *evec,El::SortType sort,El::HermitianEigSubset<PetscElemScalar> subset,const El::HermitianEigCtrl<PetscElemScalar> ctrl)
1114: {
1116:   Mat_Elemental  *a=(Mat_Elemental*)A->data,*b=(Mat_Elemental*)B->data,*x;
1117:   MPI_Comm       comm;
1118:   Mat            EVAL;
1119:   Mat_Elemental  *e;
1120: 
1122:   /* Compute eigenvalues and eigenvectors */
1123:   El::DistMatrix<PetscElemScalar,El::VR,El::STAR> w( *a->grid ); /* holding eigenvalues */
1124:   El::DistMatrix<PetscElemScalar>                 X( *a->grid ); /* holding eigenvectors */
1125:   El::HermitianGenDefEig(eigtype,uplo,*a->emat,*b->emat,w,X,sort,subset,ctrl);
1126:   /* El::Print(w, "Eigenvalues"); */

1128:   /* Wrap w and X into PETSc's MATMATELEMENTAL matrices */
1129:   PetscObjectGetComm((PetscObject)A,&comm);
1130:   MatCreate(comm,evec);
1131:   MatSetSizes(*evec,PETSC_DECIDE,PETSC_DECIDE,X.Height(),X.Width());
1132:   MatSetType(*evec,MATELEMENTAL);
1133:   MatSetFromOptions(*evec);
1134:   MatSetUp(*evec);
1135:   MatAssemblyBegin(*evec,MAT_FINAL_ASSEMBLY);
1136:   MatAssemblyEnd(*evec,MAT_FINAL_ASSEMBLY);

1138:   x = (Mat_Elemental*)(*evec)->data;
1139:   //delete x->emat; //-- memory leak???
1140:   *x->emat = X;
1141: 
1142:   MatCreate(comm,&EVAL);
1143:   MatSetSizes(EVAL,PETSC_DECIDE,PETSC_DECIDE,w.Height(),w.Width());
1144:   MatSetType(EVAL,MATELEMENTAL);
1145:   MatSetFromOptions(EVAL);
1146:   MatSetUp(EVAL);
1147:   MatAssemblyBegin(EVAL,MAT_FINAL_ASSEMBLY);
1148:   MatAssemblyEnd(EVAL,MAT_FINAL_ASSEMBLY);
1149:   e         = (Mat_Elemental*)EVAL->data;
1150:   *e->emat = w; //-- memory leak???
1151:   *evals   = EVAL;

1153: #if defined(MV)
1154:   /* Test correctness norm = || - A*X + B*X*w || */
1155:   {
1156:     PetscElemScalar alpha,beta;
1157:     El::DistMatrix<PetscElemScalar> Y(*a->grid); //tmp matrix
1158:     alpha = 1.0; beta=0.0;
1159:     El::Gemm(El::NORMAL,El::NORMAL,alpha,*b->emat,X,beta,Y); //Y = B*X
1160:     El::DiagonalScale(El::RIGHT,El::NORMAL, w, Y); //Y = Y*w
1161:     alpha = -1.0; beta=1.0;
1162:     El::Gemm(El::NORMAL,El::NORMAL,alpha,*a->emat,X,beta,Y); //Y = - A*X + B*X*w

1164:     PetscElemScalar norm = El::FrobeniusNorm(Y);
1165:     if ((*a->grid).Rank()==0) printf("  norm (- A*X + B*X*w) = %g\n",norm);
1166:   }

1168:   {
1169:     PetscMPIInt rank;
1170:     MPI_Comm_rank(comm,&rank);
1171:     printf("w: [%d] [%d, %d %d] %d; X: %d %d\n",rank,w.DistRank(),w.ColRank(),w.RowRank(),w.LocalHeight(),X.LocalHeight(),X.LocalWidth());
1172:   }
1173: #endif
1174:   return(0);
1175: }

1179: /*@
1180:   MatElementalHermitianGenDefEig - Compute the set of eigenvalues of the Hermitian-definite matrix pencil determined by the subset structure

1182:    Logically Collective on Mat

1184:    Level: beginner

1186:    References: Elemental Users' Guide
1187: @*/
1188: PetscErrorCode MatElementalHermitianGenDefEig(El::Pencil type,El::UpperOrLower uplo,Mat A,Mat B,Mat *evals,Mat *evec,El::SortType sort,El::HermitianEigSubset<PetscElemScalar> subset,const El::HermitianEigCtrl<PetscElemScalar> ctrl)
1189: {

1193:   PetscTryMethod(A,"MatElementalHermitianGenDefEig_C",(El::Pencil,El::UpperOrLower,Mat,Mat,Mat*,Mat*,El::SortType,El::HermitianEigSubset<PetscElemScalar>,const El::HermitianEigCtrl<PetscElemScalar>),(type,uplo,A,B,evals,evec,sort,subset,ctrl));
1194:   return(0);
1195: }

1197: /* -------------------------------------------------------------------*/
1198: static struct _MatOps MatOps_Values = {
1199:        MatSetValues_Elemental,
1200:        0,
1201:        0,
1202:        MatMult_Elemental,
1203: /* 4*/ MatMultAdd_Elemental,
1204:        MatMultTranspose_Elemental,
1205:        MatMultTransposeAdd_Elemental,
1206:        MatSolve_Elemental,
1207:        MatSolveAdd_Elemental,
1208:        0,
1209: /*10*/ 0,
1210:        MatLUFactor_Elemental,
1211:        MatCholeskyFactor_Elemental,
1212:        0,
1213:        MatTranspose_Elemental,
1214: /*15*/ MatGetInfo_Elemental,
1215:        0,
1216:        MatGetDiagonal_Elemental,
1217:        MatDiagonalScale_Elemental,
1218:        MatNorm_Elemental,
1219: /*20*/ MatAssemblyBegin_Elemental,
1220:        MatAssemblyEnd_Elemental,
1221:        0,
1222:        MatZeroEntries_Elemental,
1223: /*24*/ 0,
1224:        MatLUFactorSymbolic_Elemental,
1225:        MatLUFactorNumeric_Elemental,
1226:        MatCholeskyFactorSymbolic_Elemental,
1227:        MatCholeskyFactorNumeric_Elemental,
1228: /*29*/ MatSetUp_Elemental,
1229:        0,
1230:        0,
1231:        0,
1232:        0,
1233: /*34*/ MatDuplicate_Elemental,
1234:        0,
1235:        0,
1236:        0,
1237:        0,
1238: /*39*/ MatAXPY_Elemental,
1239:        0,
1240:        0,
1241:        0,
1242:        MatCopy_Elemental,
1243: /*44*/ 0,
1244:        MatScale_Elemental,
1245:        MatShift_Basic,
1246:        0,
1247:        0,
1248: /*49*/ 0,
1249:        0,
1250:        0,
1251:        0,
1252:        0,
1253: /*54*/ 0,
1254:        0,
1255:        0,
1256:        0,
1257:        0,
1258: /*59*/ 0,
1259:        MatDestroy_Elemental,
1260:        MatView_Elemental,
1261:        0,
1262:        0,
1263: /*64*/ 0,
1264:        0,
1265:        0,
1266:        0,
1267:        0,
1268: /*69*/ 0,
1269:        0,
1270:        MatConvert_Elemental_Dense,
1271:        0,
1272:        0,
1273: /*74*/ 0,
1274:        0,
1275:        0,
1276:        0,
1277:        0,
1278: /*79*/ 0,
1279:        0,
1280:        0,
1281:        0,
1282:        MatLoad_Elemental,
1283: /*84*/ 0,
1284:        0,
1285:        0,
1286:        0,
1287:        0,
1288: /*89*/ MatMatMult_Elemental,
1289:        MatMatMultSymbolic_Elemental,
1290:        MatMatMultNumeric_Elemental,
1291:        0,
1292:        0,
1293: /*94*/ 0,
1294:        MatMatTransposeMult_Elemental,
1295:        MatMatTransposeMultSymbolic_Elemental,
1296:        MatMatTransposeMultNumeric_Elemental,
1297:        0,
1298: /*99*/ 0,
1299:        0,
1300:        0,
1301:        MatConjugate_Elemental,
1302:        0,
1303: /*104*/0,
1304:        0,
1305:        0,
1306:        0,
1307:        0,
1308: /*109*/MatMatSolve_Elemental,
1309:        0,
1310:        0,
1311:        0,
1312:        0,
1313: /*114*/0,
1314:        0,
1315:        0,
1316:        0,
1317:        0,
1318: /*119*/0,
1319:        MatHermitianTranspose_Elemental,
1320:        0,
1321:        0,
1322:        0,
1323: /*124*/0,
1324:        0,
1325:        0,
1326:        0,
1327:        0,
1328: /*129*/0,
1329:        0,
1330:        0,
1331:        0,
1332:        0,
1333: /*134*/0,
1334:        0,
1335:        0,
1336:        0,
1337:        0
1338: };

1340: /*MC
1341:    MATELEMENTAL = "elemental" - A matrix type for dense matrices using the Elemental package

1343:   Use ./configure --download-elemental to install PETSc to use Elemental

1345:   Use -pc_type lu -pc_factor_mat_solver_package elemental to us this direct solver

1347:    Options Database Keys:
1348: + -mat_type elemental - sets the matrix type to "elemental" during a call to MatSetFromOptions()
1349: - -mat_elemental_grid_height - sets Grid Height for 2D cyclic ordering of internal matrix

1351:   Level: beginner

1353: .seealso: MATDENSE
1354: M*/

1358: PETSC_EXTERN PetscErrorCode MatCreate_Elemental(Mat A)
1359: {
1360:   Mat_Elemental      *a;
1361:   PetscErrorCode     ierr;
1362:   PetscBool          flg,flg1;
1363:   Mat_Elemental_Grid *commgrid;
1364:   MPI_Comm           icomm;
1365:   PetscInt           optv1;

1368:   PetscElementalInitializePackage();
1369:   PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));
1370:   A->insertmode = NOT_SET_VALUES;

1372:   PetscNewLog(A,&a);
1373:   A->data = (void*)a;

1375:   /* Set up the elemental matrix */
1376:   El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));

1378:   /* Grid needs to be shared between multiple Mats on the same communicator, implement by attribute caching on the MPI_Comm */
1379:   if (Petsc_Elemental_keyval == MPI_KEYVAL_INVALID) {
1380:     MPI_Keyval_create(MPI_NULL_COPY_FN,MPI_NULL_DELETE_FN,&Petsc_Elemental_keyval,(void*)0);
1381:   }
1382:   PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);
1383:   MPI_Attr_get(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);
1384:   if (!flg) {
1385:     PetscNewLog(A,&commgrid);

1387:     PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"Elemental Options","Mat");
1388:     /* displayed default grid sizes (CommSize,1) are set by us arbitrarily until El::Grid() is called */
1389:     PetscOptionsInt("-mat_elemental_grid_height","Grid Height","None",El::mpi::Size(cxxcomm),&optv1,&flg1);
1390:     if (flg1) {
1391:       if (El::mpi::Size(cxxcomm) % optv1 != 0) {
1392:         SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Grid Height %D must evenly divide CommSize %D",optv1,(PetscInt)El::mpi::Size(cxxcomm));
1393:       }
1394:       commgrid->grid = new El::Grid(cxxcomm,optv1); /* use user-provided grid height */
1395:     } else {
1396:       commgrid->grid = new El::Grid(cxxcomm); /* use Elemental default grid sizes */
1397:       /* printf("new commgrid->grid = %p\n",commgrid->grid);  -- memory leak revealed by valgrind? */
1398:     }
1399:     commgrid->grid_refct = 1;
1400:     MPI_Attr_put(icomm,Petsc_Elemental_keyval,(void*)commgrid);
1401:     PetscOptionsEnd();
1402:   } else {
1403:     commgrid->grid_refct++;
1404:   }
1405:   PetscCommDestroy(&icomm);
1406:   a->grid      = commgrid->grid;
1407:   a->emat      = new El::DistMatrix<PetscElemScalar>(*a->grid);
1408:   a->esubmat   = new El::Matrix<PetscElemScalar>(1,1);
1409:   a->interface = new El::AxpyInterface<PetscElemScalar>;
1410:   a->pivot     = new El::DistMatrix<PetscInt,El::VC,El::STAR>;

1412:   /* build cache for off array entries formed */
1413:   a->interface->Attach(El::LOCAL_TO_GLOBAL,*(a->emat));

1415:   PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",MatGetOwnershipIS_Elemental);
1416:   PetscObjectComposeFunction((PetscObject)A,"MatElementalHermitianGenDefEig_C",MatElementalHermitianGenDefEig_Elemental);

1418:   PetscObjectChangeTypeName((PetscObject)A,MATELEMENTAL);
1419:   return(0);
1420: }