Actual source code: matelem.cxx

  1: #include <petsc/private/petscelemental.h>

  3: const char ElementalCitation[] = "@Article{Elemental2012,\n"
  4: "  author  = {Jack Poulson and Bryan Marker and Jeff R. Hammond and Nichols A. Romero and Robert {v}an~{d}e~{G}eijn},\n"
  5: "  title   = {Elemental: A New Framework for Distributed Memory Dense Matrix Computations},\n"
  6: "  journal = {{ACM} Transactions on Mathematical Software},\n"
  7: "  volume  = {39},\n"
  8: "  number  = {2},\n"
  9: "  year    = {2013}\n"
 10: "}\n";
 11: static PetscBool ElementalCite = PETSC_FALSE;

 13: /*
 14:     The variable Petsc_Elemental_keyval is used to indicate an MPI attribute that
 15:   is attached to a communicator, in this case the attribute is a Mat_Elemental_Grid
 16: */
 17: static PetscMPIInt Petsc_Elemental_keyval = MPI_KEYVAL_INVALID;

 19: static PetscErrorCode MatView_Elemental(Mat A,PetscViewer viewer)
 20: {
 21:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
 22:   PetscBool      iascii;

 24:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
 25:   if (iascii) {
 26:     PetscViewerFormat format;
 27:     PetscViewerGetFormat(viewer,&format);
 28:     if (format == PETSC_VIEWER_ASCII_INFO) {
 29:       /* call elemental viewing function */
 30:       PetscViewerASCIIPrintf(viewer,"Elemental run parameters:\n");
 31:       PetscViewerASCIIPrintf(viewer,"  allocated entries=%d\n",(*a->emat).AllocatedMemory());
 32:       PetscViewerASCIIPrintf(viewer,"  grid height=%d, grid width=%d\n",(*a->emat).Grid().Height(),(*a->emat).Grid().Width());
 33:       if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
 34:         /* call elemental viewing function */
 35:         PetscPrintf(PetscObjectComm((PetscObject)viewer),"test matview_elemental 2\n");
 36:       }

 38:     } else if (format == PETSC_VIEWER_DEFAULT) {
 39:       PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);
 40:       El::Print( *a->emat, "Elemental matrix (cyclic ordering)");
 41:       PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);
 42:       if (A->factortype == MAT_FACTOR_NONE) {
 43:         Mat Adense;
 44:         MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);
 45:         MatView(Adense,viewer);
 46:         MatDestroy(&Adense);
 47:       }
 48:     } else SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Format");
 49:   } else {
 50:     /* convert to dense format and call MatView() */
 51:     Mat Adense;
 52:     MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);
 53:     MatView(Adense,viewer);
 54:     MatDestroy(&Adense);
 55:   }
 56:   return 0;
 57: }

 59: static PetscErrorCode MatGetInfo_Elemental(Mat A,MatInfoType flag,MatInfo *info)
 60: {
 61:   Mat_Elemental  *a = (Mat_Elemental*)A->data;

 63:   info->block_size = 1.0;

 65:   if (flag == MAT_LOCAL) {
 66:     info->nz_allocated   = (*a->emat).AllocatedMemory(); /* locally allocated */
 67:     info->nz_used        = info->nz_allocated;
 68:   } else if (flag == MAT_GLOBAL_MAX) {
 69:     //MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));
 70:     /* see MatGetInfo_MPIAIJ() for getting global info->nz_allocated! */
 71:     //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_MAX not written yet");
 72:   } else if (flag == MAT_GLOBAL_SUM) {
 73:     //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_SUM not written yet");
 74:     info->nz_allocated   = (*a->emat).AllocatedMemory(); /* locally allocated */
 75:     info->nz_used        = info->nz_allocated; /* assume Elemental does accurate allocation */
 76:     //MPIU_Allreduce(isend,irecv,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));
 77:     //PetscPrintf(PETSC_COMM_SELF,"    ... [%d] locally allocated %g\n",rank,info->nz_allocated);
 78:   }

 80:   info->nz_unneeded       = 0.0;
 81:   info->assemblies        = A->num_ass;
 82:   info->mallocs           = 0;
 83:   info->memory            = ((PetscObject)A)->mem;
 84:   info->fill_ratio_given  = 0; /* determined by Elemental */
 85:   info->fill_ratio_needed = 0;
 86:   info->factor_mallocs    = 0;
 87:   return 0;
 88: }

 90: PetscErrorCode MatSetOption_Elemental(Mat A,MatOption op,PetscBool flg)
 91: {
 92:   Mat_Elemental  *a = (Mat_Elemental*)A->data;

 94:   switch (op) {
 95:   case MAT_NEW_NONZERO_LOCATIONS:
 96:   case MAT_NEW_NONZERO_LOCATION_ERR:
 97:   case MAT_NEW_NONZERO_ALLOCATION_ERR:
 98:   case MAT_SYMMETRIC:
 99:   case MAT_SORTED_FULL:
100:   case MAT_HERMITIAN:
101:     break;
102:   case MAT_ROW_ORIENTED:
103:     a->roworiented = flg;
104:     break;
105:   default:
106:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"unknown option %s",MatOptions[op]);
107:   }
108:   return 0;
109: }

111: static PetscErrorCode MatSetValues_Elemental(Mat A,PetscInt nr,const PetscInt *rows,PetscInt nc,const PetscInt *cols,const PetscScalar *vals,InsertMode imode)
112: {
113:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
114:   PetscInt       i,j,rrank,ridx,crank,cidx,erow,ecol,numQueues=0;

116:   // TODO: Initialize matrix to all zeros?

118:   // Count the number of queues from this process
119:   if (a->roworiented) {
120:     for (i=0; i<nr; i++) {
121:       if (rows[i] < 0) continue;
122:       P2RO(A,0,rows[i],&rrank,&ridx);
123:       RO2E(A,0,rrank,ridx,&erow);
125:       for (j=0; j<nc; j++) {
126:         if (cols[j] < 0) continue;
127:         P2RO(A,1,cols[j],&crank,&cidx);
128:         RO2E(A,1,crank,cidx,&ecol);
130:         if (!a->emat->IsLocal(erow,ecol)) { /* off-proc entry */
131:           /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
133:           ++numQueues;
134:           continue;
135:         }
136:         /* printf("Locally updating (%d,%d)\n",erow,ecol); */
137:         switch (imode) {
138:         case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
139:         case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
140:         default: SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
141:         }
142:       }
143:     }

145:     /* printf("numQueues=%d\n",numQueues); */
146:     a->emat->Reserve( numQueues);
147:     for (i=0; i<nr; i++) {
148:       if (rows[i] < 0) continue;
149:       P2RO(A,0,rows[i],&rrank,&ridx);
150:       RO2E(A,0,rrank,ridx,&erow);
151:       for (j=0; j<nc; j++) {
152:         if (cols[j] < 0) continue;
153:         P2RO(A,1,cols[j],&crank,&cidx);
154:         RO2E(A,1,crank,cidx,&ecol);
155:         if (!a->emat->IsLocal(erow,ecol)) { /*off-proc entry*/
156:           /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
157:           a->emat->QueueUpdate( erow, ecol, vals[i*nc+j]);
158:         }
159:       }
160:     }
161:   } else { /* columnoriented */
162:     for (j=0; j<nc; j++) {
163:       if (cols[j] < 0) continue;
164:       P2RO(A,1,cols[j],&crank,&cidx);
165:       RO2E(A,1,crank,cidx,&ecol);
167:       for (i=0; i<nr; i++) {
168:         if (rows[i] < 0) continue;
169:         P2RO(A,0,rows[i],&rrank,&ridx);
170:         RO2E(A,0,rrank,ridx,&erow);
172:         if (!a->emat->IsLocal(erow,ecol)) { /* off-proc entry */
173:           /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
175:           ++numQueues;
176:           continue;
177:         }
178:         /* printf("Locally updating (%d,%d)\n",erow,ecol); */
179:         switch (imode) {
180:         case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
181:         case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
182:         default: SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
183:         }
184:       }
185:     }

187:     /* printf("numQueues=%d\n",numQueues); */
188:     a->emat->Reserve( numQueues);
189:     for (j=0; j<nc; j++) {
190:       if (cols[j] < 0) continue;
191:       P2RO(A,1,cols[j],&crank,&cidx);
192:       RO2E(A,1,crank,cidx,&ecol);

194:       for (i=0; i<nr; i++) {
195:         if (rows[i] < 0) continue;
196:         P2RO(A,0,rows[i],&rrank,&ridx);
197:         RO2E(A,0,rrank,ridx,&erow);
198:         if (!a->emat->IsLocal(erow,ecol)) { /*off-proc entry*/
199:           /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
200:           a->emat->QueueUpdate( erow, ecol, vals[i+j*nr]);
201:         }
202:       }
203:     }
204:   }
205:   return 0;
206: }

208: static PetscErrorCode MatMult_Elemental(Mat A,Vec X,Vec Y)
209: {
210:   Mat_Elemental         *a = (Mat_Elemental*)A->data;
211:   const PetscElemScalar *x;
212:   PetscElemScalar       *y;
213:   PetscElemScalar       one = 1,zero = 0;

215:   VecGetArrayRead(X,(const PetscScalar **)&x);
216:   VecGetArray(Y,(PetscScalar **)&y);
217:   { /* Scoping so that constructor is called before pointer is returned */
218:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
219:     xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
220:     ye.Attach(A->rmap->N,1,*a->grid,0,0,y,A->rmap->n);
221:     El::Gemv(El::NORMAL,one,*a->emat,xe,zero,ye);
222:   }
223:   VecRestoreArrayRead(X,(const PetscScalar **)&x);
224:   VecRestoreArray(Y,(PetscScalar **)&y);
225:   return 0;
226: }

228: static PetscErrorCode MatMultTranspose_Elemental(Mat A,Vec X,Vec Y)
229: {
230:   Mat_Elemental         *a = (Mat_Elemental*)A->data;
231:   const PetscElemScalar *x;
232:   PetscElemScalar       *y;
233:   PetscElemScalar       one = 1,zero = 0;

235:   VecGetArrayRead(X,(const PetscScalar **)&x);
236:   VecGetArray(Y,(PetscScalar **)&y);
237:   { /* Scoping so that constructor is called before pointer is returned */
238:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
239:     xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
240:     ye.Attach(A->cmap->N,1,*a->grid,0,0,y,A->cmap->n);
241:     El::Gemv(El::TRANSPOSE,one,*a->emat,xe,zero,ye);
242:   }
243:   VecRestoreArrayRead(X,(const PetscScalar **)&x);
244:   VecRestoreArray(Y,(PetscScalar **)&y);
245:   return 0;
246: }

248: static PetscErrorCode MatMultAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
249: {
250:   Mat_Elemental         *a = (Mat_Elemental*)A->data;
251:   const PetscElemScalar *x;
252:   PetscElemScalar       *z;
253:   PetscElemScalar       one = 1;

255:   if (Y != Z) VecCopy(Y,Z);
256:   VecGetArrayRead(X,(const PetscScalar **)&x);
257:   VecGetArray(Z,(PetscScalar **)&z);
258:   { /* Scoping so that constructor is called before pointer is returned */
259:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
260:     xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
261:     ze.Attach(A->rmap->N,1,*a->grid,0,0,z,A->rmap->n);
262:     El::Gemv(El::NORMAL,one,*a->emat,xe,one,ze);
263:   }
264:   VecRestoreArrayRead(X,(const PetscScalar **)&x);
265:   VecRestoreArray(Z,(PetscScalar **)&z);
266:   return 0;
267: }

269: static PetscErrorCode MatMultTransposeAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
270: {
271:   Mat_Elemental         *a = (Mat_Elemental*)A->data;
272:   const PetscElemScalar *x;
273:   PetscElemScalar       *z;
274:   PetscElemScalar       one = 1;

276:   if (Y != Z) VecCopy(Y,Z);
277:   VecGetArrayRead(X,(const PetscScalar **)&x);
278:   VecGetArray(Z,(PetscScalar **)&z);
279:   { /* Scoping so that constructor is called before pointer is returned */
280:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
281:     xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
282:     ze.Attach(A->cmap->N,1,*a->grid,0,0,z,A->cmap->n);
283:     El::Gemv(El::TRANSPOSE,one,*a->emat,xe,one,ze);
284:   }
285:   VecRestoreArrayRead(X,(const PetscScalar **)&x);
286:   VecRestoreArray(Z,(PetscScalar **)&z);
287:   return 0;
288: }

290: PetscErrorCode MatMatMultNumeric_Elemental(Mat A,Mat B,Mat C)
291: {
292:   Mat_Elemental    *a = (Mat_Elemental*)A->data;
293:   Mat_Elemental    *b = (Mat_Elemental*)B->data;
294:   Mat_Elemental    *c = (Mat_Elemental*)C->data;
295:   PetscElemScalar  one = 1,zero = 0;

297:   { /* Scoping so that constructor is called before pointer is returned */
298:     El::Gemm(El::NORMAL,El::NORMAL,one,*a->emat,*b->emat,zero,*c->emat);
299:   }
300:   C->assembled = PETSC_TRUE;
301:   return 0;
302: }

304: PetscErrorCode MatMatMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat Ce)
305: {
306:   MatSetSizes(Ce,A->rmap->n,B->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
307:   MatSetType(Ce,MATELEMENTAL);
308:   MatSetUp(Ce);
309:   Ce->ops->matmultnumeric = MatMatMultNumeric_Elemental;
310:   return 0;
311: }

313: static PetscErrorCode MatMatTransposeMultNumeric_Elemental(Mat A,Mat B,Mat C)
314: {
315:   Mat_Elemental      *a = (Mat_Elemental*)A->data;
316:   Mat_Elemental      *b = (Mat_Elemental*)B->data;
317:   Mat_Elemental      *c = (Mat_Elemental*)C->data;
318:   PetscElemScalar    one = 1,zero = 0;

320:   { /* Scoping so that constructor is called before pointer is returned */
321:     El::Gemm(El::NORMAL,El::TRANSPOSE,one,*a->emat,*b->emat,zero,*c->emat);
322:   }
323:   C->assembled = PETSC_TRUE;
324:   return 0;
325: }

327: static PetscErrorCode MatMatTransposeMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat C)
328: {
329:   MatSetSizes(C,A->rmap->n,B->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
330:   MatSetType(C,MATELEMENTAL);
331:   MatSetUp(C);
332:   return 0;
333: }

335: /* --------------------------------------- */
336: static PetscErrorCode MatProductSetFromOptions_Elemental_AB(Mat C)
337: {
338:   C->ops->matmultsymbolic = MatMatMultSymbolic_Elemental;
339:   C->ops->productsymbolic = MatProductSymbolic_AB;
340:   return 0;
341: }

343: static PetscErrorCode MatProductSetFromOptions_Elemental_ABt(Mat C)
344: {
345:   C->ops->mattransposemultsymbolic = MatMatTransposeMultSymbolic_Elemental;
346:   C->ops->productsymbolic          = MatProductSymbolic_ABt;
347:   return 0;
348: }

350: PETSC_INTERN PetscErrorCode MatProductSetFromOptions_Elemental(Mat C)
351: {
352:   Mat_Product    *product = C->product;

354:   switch (product->type) {
355:   case MATPRODUCT_AB:
356:     MatProductSetFromOptions_Elemental_AB(C);
357:     break;
358:   case MATPRODUCT_ABt:
359:     MatProductSetFromOptions_Elemental_ABt(C);
360:     break;
361:   default:
362:     break;
363:   }
364:   return 0;
365: }

367: PetscErrorCode MatMatMultNumeric_Elemental_MPIDense(Mat A,Mat B,Mat C)
368: {
369:   Mat            Be,Ce;

371:   MatConvert(B,MATELEMENTAL,MAT_INITIAL_MATRIX,&Be);
372:   MatMatMult(A,Be,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&Ce);
373:   MatConvert(Ce,MATMPIDENSE,MAT_REUSE_MATRIX,&C);
374:   MatDestroy(&Be);
375:   MatDestroy(&Ce);
376:   return 0;
377: }

379: PetscErrorCode MatMatMultSymbolic_Elemental_MPIDense(Mat A,Mat B,PetscReal fill,Mat C)
380: {
381:   MatSetSizes(C,A->rmap->n,B->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
382:   MatSetType(C,MATMPIDENSE);
383:   MatSetUp(C);
384:   C->ops->matmultnumeric = MatMatMultNumeric_Elemental_MPIDense;
385:   return 0;
386: }

388: PetscErrorCode MatProductSetFromOptions_Elemental_MPIDense_AB(Mat C)
389: {
390:   C->ops->matmultsymbolic = MatMatMultSymbolic_Elemental_MPIDense;
391:   C->ops->productsymbolic = MatProductSymbolic_AB;
392:   return 0;
393: }

395: PetscErrorCode MatProductSetFromOptions_Elemental_MPIDense(Mat C)
396: {
397:   Mat_Product    *product = C->product;

399:   if (product->type == MATPRODUCT_AB) {
400:     MatProductSetFromOptions_Elemental_MPIDense_AB(C);
401:   }
402:   return 0;
403: }
404: /* --------------------------------------- */

406: static PetscErrorCode MatGetDiagonal_Elemental(Mat A,Vec D)
407: {
408:   PetscInt        i,nrows,ncols,nD,rrank,ridx,crank,cidx;
409:   Mat_Elemental   *a = (Mat_Elemental*)A->data;
410:   PetscElemScalar v;
411:   MPI_Comm        comm;

413:   PetscObjectGetComm((PetscObject)A,&comm);
414:   MatGetSize(A,&nrows,&ncols);
415:   nD = nrows>ncols ? ncols : nrows;
416:   for (i=0; i<nD; i++) {
417:     PetscInt erow,ecol;
418:     P2RO(A,0,i,&rrank,&ridx);
419:     RO2E(A,0,rrank,ridx,&erow);
421:     P2RO(A,1,i,&crank,&cidx);
422:     RO2E(A,1,crank,cidx,&ecol);
424:     v = a->emat->Get(erow,ecol);
425:     VecSetValues(D,1,&i,(PetscScalar*)&v,INSERT_VALUES);
426:   }
427:   VecAssemblyBegin(D);
428:   VecAssemblyEnd(D);
429:   return 0;
430: }

432: static PetscErrorCode MatDiagonalScale_Elemental(Mat X,Vec L,Vec R)
433: {
434:   Mat_Elemental         *x = (Mat_Elemental*)X->data;
435:   const PetscElemScalar *d;

437:   if (R) {
438:     VecGetArrayRead(R,(const PetscScalar **)&d);
439:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
440:     de.LockedAttach(X->cmap->N,1,*x->grid,0,0,d,X->cmap->n);
441:     El::DiagonalScale(El::RIGHT,El::NORMAL,de,*x->emat);
442:     VecRestoreArrayRead(R,(const PetscScalar **)&d);
443:   }
444:   if (L) {
445:     VecGetArrayRead(L,(const PetscScalar **)&d);
446:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
447:     de.LockedAttach(X->rmap->N,1,*x->grid,0,0,d,X->rmap->n);
448:     El::DiagonalScale(El::LEFT,El::NORMAL,de,*x->emat);
449:     VecRestoreArrayRead(L,(const PetscScalar **)&d);
450:   }
451:   return 0;
452: }

454: static PetscErrorCode MatMissingDiagonal_Elemental(Mat A,PetscBool *missing,PetscInt *d)
455: {
456:   *missing = PETSC_FALSE;
457:   return 0;
458: }

460: static PetscErrorCode MatScale_Elemental(Mat X,PetscScalar a)
461: {
462:   Mat_Elemental  *x = (Mat_Elemental*)X->data;

464:   El::Scale((PetscElemScalar)a,*x->emat);
465:   return 0;
466: }

468: /*
469:   MatAXPY - Computes Y = a*X + Y.
470: */
471: static PetscErrorCode MatAXPY_Elemental(Mat Y,PetscScalar a,Mat X,MatStructure str)
472: {
473:   Mat_Elemental  *x = (Mat_Elemental*)X->data;
474:   Mat_Elemental  *y = (Mat_Elemental*)Y->data;

476:   El::Axpy((PetscElemScalar)a,*x->emat,*y->emat);
477:   PetscObjectStateIncrease((PetscObject)Y);
478:   return 0;
479: }

481: static PetscErrorCode MatCopy_Elemental(Mat A,Mat B,MatStructure str)
482: {
483:   Mat_Elemental *a=(Mat_Elemental*)A->data;
484:   Mat_Elemental *b=(Mat_Elemental*)B->data;

486:   El::Copy(*a->emat,*b->emat);
487:   PetscObjectStateIncrease((PetscObject)B);
488:   return 0;
489: }

491: static PetscErrorCode MatDuplicate_Elemental(Mat A,MatDuplicateOption op,Mat *B)
492: {
493:   Mat            Be;
494:   MPI_Comm       comm;
495:   Mat_Elemental  *a=(Mat_Elemental*)A->data;

497:   PetscObjectGetComm((PetscObject)A,&comm);
498:   MatCreate(comm,&Be);
499:   MatSetSizes(Be,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
500:   MatSetType(Be,MATELEMENTAL);
501:   MatSetUp(Be);
502:   *B = Be;
503:   if (op == MAT_COPY_VALUES) {
504:     Mat_Elemental *b=(Mat_Elemental*)Be->data;
505:     El::Copy(*a->emat,*b->emat);
506:   }
507:   Be->assembled = PETSC_TRUE;
508:   return 0;
509: }

511: static PetscErrorCode MatTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
512: {
513:   Mat            Be = *B;
514:   MPI_Comm       comm;
515:   Mat_Elemental  *a = (Mat_Elemental*)A->data, *b;

517:   PetscObjectGetComm((PetscObject)A,&comm);
518:   /* Only out-of-place supported */
520:   if (reuse == MAT_INITIAL_MATRIX) {
521:     MatCreate(comm,&Be);
522:     MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
523:     MatSetType(Be,MATELEMENTAL);
524:     MatSetUp(Be);
525:     *B = Be;
526:   }
527:   b = (Mat_Elemental*)Be->data;
528:   El::Transpose(*a->emat,*b->emat);
529:   Be->assembled = PETSC_TRUE;
530:   return 0;
531: }

533: static PetscErrorCode MatConjugate_Elemental(Mat A)
534: {
535:   Mat_Elemental  *a = (Mat_Elemental*)A->data;

537:   El::Conjugate(*a->emat);
538:   return 0;
539: }

541: static PetscErrorCode MatHermitianTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
542: {
543:   Mat            Be = *B;
544:   MPI_Comm       comm;
545:   Mat_Elemental  *a = (Mat_Elemental*)A->data, *b;

547:   PetscObjectGetComm((PetscObject)A,&comm);
548:   /* Only out-of-place supported */
549:   if (reuse == MAT_INITIAL_MATRIX) {
550:     MatCreate(comm,&Be);
551:     MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
552:     MatSetType(Be,MATELEMENTAL);
553:     MatSetUp(Be);
554:     *B = Be;
555:   }
556:   b = (Mat_Elemental*)Be->data;
557:   El::Adjoint(*a->emat,*b->emat);
558:   Be->assembled = PETSC_TRUE;
559:   return 0;
560: }

562: static PetscErrorCode MatSolve_Elemental(Mat A,Vec B,Vec X)
563: {
564:   Mat_Elemental     *a = (Mat_Elemental*)A->data;
565:   PetscElemScalar   *x;
566:   PetscInt          pivoting = a->pivoting;

568:   VecCopy(B,X);
569:   VecGetArray(X,(PetscScalar **)&x);

571:   El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe;
572:   xe.Attach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
573:   El::DistMatrix<PetscElemScalar,El::MC,El::MR> xer(xe);
574:   switch (A->factortype) {
575:   case MAT_FACTOR_LU:
576:     if (pivoting == 0) {
577:       El::lu::SolveAfter(El::NORMAL,*a->emat,xer);
578:     } else if (pivoting == 1) {
579:       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,xer);
580:     } else { /* pivoting == 2 */
581:       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*a->Q,xer);
582:     }
583:     break;
584:   case MAT_FACTOR_CHOLESKY:
585:     El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,xer);
586:     break;
587:   default:
588:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
589:     break;
590:   }
591:   El::Copy(xer,xe);

593:   VecRestoreArray(X,(PetscScalar **)&x);
594:   return 0;
595: }

597: static PetscErrorCode MatSolveAdd_Elemental(Mat A,Vec B,Vec Y,Vec X)
598: {
599:   MatSolve_Elemental(A,B,X);
600:   VecAXPY(X,1,Y);
601:   return 0;
602: }

604: static PetscErrorCode MatMatSolve_Elemental(Mat A,Mat B,Mat X)
605: {
606:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
607:   Mat_Elemental  *x;
608:   Mat            C;
609:   PetscInt       pivoting = a->pivoting;
610:   PetscBool      flg;
611:   MatType        type;

613:   MatGetType(X,&type);
614:   PetscStrcmp(type,MATELEMENTAL,&flg);
615:   if (!flg) {
616:     MatConvert(B,MATELEMENTAL,MAT_INITIAL_MATRIX,&C);
617:     x = (Mat_Elemental*)C->data;
618:   } else {
619:     x = (Mat_Elemental*)X->data;
620:     El::Copy(*((Mat_Elemental*)B->data)->emat,*x->emat);
621:   }
622:   switch (A->factortype) {
623:   case MAT_FACTOR_LU:
624:     if (pivoting == 0) {
625:       El::lu::SolveAfter(El::NORMAL,*a->emat,*x->emat);
626:     } else if (pivoting == 1) {
627:       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*x->emat);
628:     } else {
629:       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*a->Q,*x->emat);
630:     }
631:     break;
632:   case MAT_FACTOR_CHOLESKY:
633:     El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,*x->emat);
634:     break;
635:   default:
636:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
637:     break;
638:   }
639:   if (!flg) {
640:     MatConvert(C,type,MAT_REUSE_MATRIX,&X);
641:     MatDestroy(&C);
642:   }
643:   return 0;
644: }

646: static PetscErrorCode MatLUFactor_Elemental(Mat A,IS row,IS col,const MatFactorInfo *info)
647: {
648:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
649:   PetscInt       pivoting = a->pivoting;

651:   if (pivoting == 0) {
652:     El::LU(*a->emat);
653:   } else if (pivoting == 1) {
654:     El::LU(*a->emat,*a->P);
655:   } else {
656:     El::LU(*a->emat,*a->P,*a->Q);
657:   }
658:   A->factortype = MAT_FACTOR_LU;
659:   A->assembled  = PETSC_TRUE;

661:   PetscFree(A->solvertype);
662:   PetscStrallocpy(MATSOLVERELEMENTAL,&A->solvertype);
663:   return 0;
664: }

666: static PetscErrorCode  MatLUFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
667: {
668:   MatCopy(A,F,SAME_NONZERO_PATTERN);
669:   MatLUFactor_Elemental(F,0,0,info);
670:   return 0;
671: }

673: static PetscErrorCode  MatLUFactorSymbolic_Elemental(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
674: {
675:   /* F is created and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
676:   return 0;
677: }

679: static PetscErrorCode MatCholeskyFactor_Elemental(Mat A,IS perm,const MatFactorInfo *info)
680: {
681:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
682:   El::DistMatrix<PetscElemScalar,El::MC,El::STAR> d;

684:   El::Cholesky(El::UPPER,*a->emat);
685:   A->factortype = MAT_FACTOR_CHOLESKY;
686:   A->assembled  = PETSC_TRUE;

688:   PetscFree(A->solvertype);
689:   PetscStrallocpy(MATSOLVERELEMENTAL,&A->solvertype);
690:   return 0;
691: }

693: static PetscErrorCode MatCholeskyFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
694: {
695:   MatCopy(A,F,SAME_NONZERO_PATTERN);
696:   MatCholeskyFactor_Elemental(F,0,info);
697:   return 0;
698: }

700: static PetscErrorCode MatCholeskyFactorSymbolic_Elemental(Mat F,Mat A,IS perm,const MatFactorInfo *info)
701: {
702:   /* F is created and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
703:   return 0;
704: }

706: PetscErrorCode MatFactorGetSolverType_elemental_elemental(Mat A,MatSolverType *type)
707: {
708:   *type = MATSOLVERELEMENTAL;
709:   return 0;
710: }

712: static PetscErrorCode MatGetFactor_elemental_elemental(Mat A,MatFactorType ftype,Mat *F)
713: {
714:   Mat            B;

716:   /* Create the factorization matrix */
717:   MatCreate(PetscObjectComm((PetscObject)A),&B);
718:   MatSetSizes(B,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
719:   MatSetType(B,MATELEMENTAL);
720:   MatSetUp(B);
721:   B->factortype = ftype;
722:   B->trivialsymbolic = PETSC_TRUE;
723:   PetscFree(B->solvertype);
724:   PetscStrallocpy(MATSOLVERELEMENTAL,&B->solvertype);

726:   PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_elemental_elemental);
727:   *F            = B;
728:   return 0;
729: }

731: PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_Elemental(void)
732: {
733:   MatSolverTypeRegister(MATSOLVERELEMENTAL,MATELEMENTAL,        MAT_FACTOR_LU,MatGetFactor_elemental_elemental);
734:   MatSolverTypeRegister(MATSOLVERELEMENTAL,MATELEMENTAL,        MAT_FACTOR_CHOLESKY,MatGetFactor_elemental_elemental);
735:   return 0;
736: }

738: static PetscErrorCode MatNorm_Elemental(Mat A,NormType type,PetscReal *nrm)
739: {
740:   Mat_Elemental *a=(Mat_Elemental*)A->data;

742:   switch (type) {
743:   case NORM_1:
744:     *nrm = El::OneNorm(*a->emat);
745:     break;
746:   case NORM_FROBENIUS:
747:     *nrm = El::FrobeniusNorm(*a->emat);
748:     break;
749:   case NORM_INFINITY:
750:     *nrm = El::InfinityNorm(*a->emat);
751:     break;
752:   default:
753:     SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Unsupported norm type");
754:   }
755:   return 0;
756: }

758: static PetscErrorCode MatZeroEntries_Elemental(Mat A)
759: {
760:   Mat_Elemental *a=(Mat_Elemental*)A->data;

762:   El::Zero(*a->emat);
763:   return 0;
764: }

766: static PetscErrorCode MatGetOwnershipIS_Elemental(Mat A,IS *rows,IS *cols)
767: {
768:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
769:   PetscInt       i,m,shift,stride,*idx;

771:   if (rows) {
772:     m = a->emat->LocalHeight();
773:     shift = a->emat->ColShift();
774:     stride = a->emat->ColStride();
775:     PetscMalloc1(m,&idx);
776:     for (i=0; i<m; i++) {
777:       PetscInt rank,offset;
778:       E2RO(A,0,shift+i*stride,&rank,&offset);
779:       RO2P(A,0,rank,offset,&idx[i]);
780:     }
781:     ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,rows);
782:   }
783:   if (cols) {
784:     m = a->emat->LocalWidth();
785:     shift = a->emat->RowShift();
786:     stride = a->emat->RowStride();
787:     PetscMalloc1(m,&idx);
788:     for (i=0; i<m; i++) {
789:       PetscInt rank,offset;
790:       E2RO(A,1,shift+i*stride,&rank,&offset);
791:       RO2P(A,1,rank,offset,&idx[i]);
792:     }
793:     ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,cols);
794:   }
795:   return 0;
796: }

798: static PetscErrorCode MatConvert_Elemental_Dense(Mat A,MatType newtype,MatReuse reuse,Mat *B)
799: {
800:   Mat                Bmpi;
801:   Mat_Elemental      *a = (Mat_Elemental*)A->data;
802:   MPI_Comm           comm;
803:   IS                 isrows,iscols;
804:   PetscInt           rrank,ridx,crank,cidx,nrows,ncols,i,j,erow,ecol,elrow,elcol;
805:   const PetscInt     *rows,*cols;
806:   PetscElemScalar    v;
807:   const El::Grid     &grid = a->emat->Grid();

809:   PetscObjectGetComm((PetscObject)A,&comm);

811:   if (reuse == MAT_REUSE_MATRIX) {
812:     Bmpi = *B;
813:   } else {
814:     MatCreate(comm,&Bmpi);
815:     MatSetSizes(Bmpi,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
816:     MatSetType(Bmpi,MATDENSE);
817:     MatSetUp(Bmpi);
818:   }

820:   /* Get local entries of A */
821:   MatGetOwnershipIS(A,&isrows,&iscols);
822:   ISGetLocalSize(isrows,&nrows);
823:   ISGetIndices(isrows,&rows);
824:   ISGetLocalSize(iscols,&ncols);
825:   ISGetIndices(iscols,&cols);

827:   if (a->roworiented) {
828:     for (i=0; i<nrows; i++) {
829:       P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
830:       RO2E(A,0,rrank,ridx,&erow);
832:       for (j=0; j<ncols; j++) {
833:         P2RO(A,1,cols[j],&crank,&cidx);
834:         RO2E(A,1,crank,cidx,&ecol);

837:         elrow = erow / grid.MCSize(); /* Elemental local row index */
838:         elcol = ecol / grid.MRSize(); /* Elemental local column index */
839:         v = a->emat->GetLocal(elrow,elcol);
840:         MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);
841:       }
842:     }
843:   } else { /* column-oriented */
844:     for (j=0; j<ncols; j++) {
845:       P2RO(A,1,cols[j],&crank,&cidx);
846:       RO2E(A,1,crank,cidx,&ecol);
848:       for (i=0; i<nrows; i++) {
849:         P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
850:         RO2E(A,0,rrank,ridx,&erow);

853:         elrow = erow / grid.MCSize(); /* Elemental local row index */
854:         elcol = ecol / grid.MRSize(); /* Elemental local column index */
855:         v = a->emat->GetLocal(elrow,elcol);
856:         MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);
857:       }
858:     }
859:   }
860:   MatAssemblyBegin(Bmpi,MAT_FINAL_ASSEMBLY);
861:   MatAssemblyEnd(Bmpi,MAT_FINAL_ASSEMBLY);
862:   if (reuse == MAT_INPLACE_MATRIX) {
863:     MatHeaderReplace(A,&Bmpi);
864:   } else {
865:     *B = Bmpi;
866:   }
867:   ISDestroy(&isrows);
868:   ISDestroy(&iscols);
869:   return 0;
870: }

872: PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
873: {
874:   Mat               mat_elemental;
875:   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols;
876:   const PetscInt    *cols;
877:   const PetscScalar *vals;

879:   if (reuse == MAT_REUSE_MATRIX) {
880:     mat_elemental = *newmat;
881:     MatZeroEntries(mat_elemental);
882:   } else {
883:     MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
884:     MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
885:     MatSetType(mat_elemental,MATELEMENTAL);
886:     MatSetUp(mat_elemental);
887:   }
888:   for (row=0; row<M; row++) {
889:     MatGetRow(A,row,&ncols,&cols,&vals);
890:     /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
891:     MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
892:     MatRestoreRow(A,row,&ncols,&cols,&vals);
893:   }
894:   MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
895:   MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);

897:   if (reuse == MAT_INPLACE_MATRIX) {
898:     MatHeaderReplace(A,&mat_elemental);
899:   } else {
900:     *newmat = mat_elemental;
901:   }
902:   return 0;
903: }

905: PETSC_INTERN PetscErrorCode MatConvert_MPIAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
906: {
907:   Mat               mat_elemental;
908:   PetscInt          row,ncols,rstart=A->rmap->rstart,rend=A->rmap->rend,j;
909:   const PetscInt    *cols;
910:   const PetscScalar *vals;

912:   if (reuse == MAT_REUSE_MATRIX) {
913:     mat_elemental = *newmat;
914:     MatZeroEntries(mat_elemental);
915:   } else {
916:     MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
917:     MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,A->rmap->N,A->cmap->N);
918:     MatSetType(mat_elemental,MATELEMENTAL);
919:     MatSetUp(mat_elemental);
920:   }
921:   for (row=rstart; row<rend; row++) {
922:     MatGetRow(A,row,&ncols,&cols,&vals);
923:     for (j=0; j<ncols; j++) {
924:       /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
925:       MatSetValues(mat_elemental,1,&row,1,&cols[j],&vals[j],ADD_VALUES);
926:     }
927:     MatRestoreRow(A,row,&ncols,&cols,&vals);
928:   }
929:   MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
930:   MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);

932:   if (reuse == MAT_INPLACE_MATRIX) {
933:     MatHeaderReplace(A,&mat_elemental);
934:   } else {
935:     *newmat = mat_elemental;
936:   }
937:   return 0;
938: }

940: PETSC_INTERN PetscErrorCode MatConvert_SeqSBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
941: {
942:   Mat               mat_elemental;
943:   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols,j;
944:   const PetscInt    *cols;
945:   const PetscScalar *vals;

947:   if (reuse == MAT_REUSE_MATRIX) {
948:     mat_elemental = *newmat;
949:     MatZeroEntries(mat_elemental);
950:   } else {
951:     MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
952:     MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
953:     MatSetType(mat_elemental,MATELEMENTAL);
954:     MatSetUp(mat_elemental);
955:   }
956:   MatGetRowUpperTriangular(A);
957:   for (row=0; row<M; row++) {
958:     MatGetRow(A,row,&ncols,&cols,&vals);
959:     /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
960:     MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
961:     for (j=0; j<ncols; j++) { /* lower triangular part */
962:       PetscScalar v;
963:       if (cols[j] == row) continue;
964:       v    = A->hermitian ? PetscConj(vals[j]) : vals[j];
965:       MatSetValues(mat_elemental,1,&cols[j],1,&row,&v,ADD_VALUES);
966:     }
967:     MatRestoreRow(A,row,&ncols,&cols,&vals);
968:   }
969:   MatRestoreRowUpperTriangular(A);
970:   MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
971:   MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);

973:   if (reuse == MAT_INPLACE_MATRIX) {
974:     MatHeaderReplace(A,&mat_elemental);
975:   } else {
976:     *newmat = mat_elemental;
977:   }
978:   return 0;
979: }

981: PETSC_INTERN PetscErrorCode MatConvert_MPISBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
982: {
983:   Mat               mat_elemental;
984:   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols,j,rstart=A->rmap->rstart,rend=A->rmap->rend;
985:   const PetscInt    *cols;
986:   const PetscScalar *vals;

988:   if (reuse == MAT_REUSE_MATRIX) {
989:     mat_elemental = *newmat;
990:     MatZeroEntries(mat_elemental);
991:   } else {
992:     MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
993:     MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
994:     MatSetType(mat_elemental,MATELEMENTAL);
995:     MatSetUp(mat_elemental);
996:   }
997:   MatGetRowUpperTriangular(A);
998:   for (row=rstart; row<rend; row++) {
999:     MatGetRow(A,row,&ncols,&cols,&vals);
1000:     /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1001:     MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
1002:     for (j=0; j<ncols; j++) { /* lower triangular part */
1003:       PetscScalar v;
1004:       if (cols[j] == row) continue;
1005:       v    = A->hermitian ? PetscConj(vals[j]) : vals[j];
1006:       MatSetValues(mat_elemental,1,&cols[j],1,&row,&v,ADD_VALUES);
1007:     }
1008:     MatRestoreRow(A,row,&ncols,&cols,&vals);
1009:   }
1010:   MatRestoreRowUpperTriangular(A);
1011:   MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
1012:   MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);

1014:   if (reuse == MAT_INPLACE_MATRIX) {
1015:     MatHeaderReplace(A,&mat_elemental);
1016:   } else {
1017:     *newmat = mat_elemental;
1018:   }
1019:   return 0;
1020: }

1022: static PetscErrorCode MatDestroy_Elemental(Mat A)
1023: {
1024:   Mat_Elemental      *a = (Mat_Elemental*)A->data;
1025:   Mat_Elemental_Grid *commgrid;
1026:   PetscBool          flg;
1027:   MPI_Comm           icomm;

1029:   delete a->emat;
1030:   delete a->P;
1031:   delete a->Q;

1033:   El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
1034:   PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);
1035:   MPI_Comm_get_attr(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);
1036:   if (--commgrid->grid_refct == 0) {
1037:     delete commgrid->grid;
1038:     PetscFree(commgrid);
1039:     MPI_Comm_free_keyval(&Petsc_Elemental_keyval);
1040:   }
1041:   PetscCommDestroy(&icomm);
1042:   PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",NULL);
1043:   PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverType_C",NULL);
1044:   PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_elemental_mpidense_C",NULL);
1045:   PetscFree(A->data);
1046:   return 0;
1047: }

1049: PetscErrorCode MatSetUp_Elemental(Mat A)
1050: {
1051:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
1052:   MPI_Comm       comm;
1053:   PetscMPIInt    rsize,csize;
1054:   PetscInt       n;

1056:   PetscLayoutSetUp(A->rmap);
1057:   PetscLayoutSetUp(A->cmap);

1059:   /* Check if local row and column sizes are equally distributed.
1060:      Jed: Elemental uses "element" cyclic ordering so the sizes need to match that
1061:      exactly.  The strategy in MatElemental is for PETSc to implicitly permute to block ordering (like would be returned by
1062:      PetscSplitOwnership(comm,&n,&N), at which point Elemental matrices can act on PETSc vectors without redistributing the vectors. */
1063:   PetscObjectGetComm((PetscObject)A,&comm);
1064:   n = PETSC_DECIDE;
1065:   PetscSplitOwnership(comm,&n,&A->rmap->N);

1068:   n = PETSC_DECIDE;
1069:   PetscSplitOwnership(comm,&n,&A->cmap->N);

1072:   a->emat->Resize(A->rmap->N,A->cmap->N);
1073:   El::Zero(*a->emat);

1075:   MPI_Comm_size(A->rmap->comm,&rsize);
1076:   MPI_Comm_size(A->cmap->comm,&csize);
1078:   a->commsize = rsize;
1079:   a->mr[0] = A->rmap->N % rsize; if (!a->mr[0]) a->mr[0] = rsize;
1080:   a->mr[1] = A->cmap->N % csize; if (!a->mr[1]) a->mr[1] = csize;
1081:   a->m[0]  = A->rmap->N / rsize + (a->mr[0] != rsize);
1082:   a->m[1]  = A->cmap->N / csize + (a->mr[1] != csize);
1083:   return 0;
1084: }

1086: PetscErrorCode MatAssemblyBegin_Elemental(Mat A, MatAssemblyType type)
1087: {
1088:   Mat_Elemental  *a = (Mat_Elemental*)A->data;

1090:   /* printf("Calling ProcessQueues\n"); */
1091:   a->emat->ProcessQueues();
1092:   /* printf("Finished ProcessQueues\n"); */
1093:   return 0;
1094: }

1096: PetscErrorCode MatAssemblyEnd_Elemental(Mat A, MatAssemblyType type)
1097: {
1098:   /* Currently does nothing */
1099:   return 0;
1100: }

1102: PetscErrorCode MatLoad_Elemental(Mat newMat, PetscViewer viewer)
1103: {
1104:   Mat            Adense,Ae;
1105:   MPI_Comm       comm;

1107:   PetscObjectGetComm((PetscObject)newMat,&comm);
1108:   MatCreate(comm,&Adense);
1109:   MatSetType(Adense,MATDENSE);
1110:   MatLoad(Adense,viewer);
1111:   MatConvert(Adense, MATELEMENTAL, MAT_INITIAL_MATRIX,&Ae);
1112:   MatDestroy(&Adense);
1113:   MatHeaderReplace(newMat,&Ae);
1114:   return 0;
1115: }

1117: /* -------------------------------------------------------------------*/
1118: static struct _MatOps MatOps_Values = {
1119:        MatSetValues_Elemental,
1120:        0,
1121:        0,
1122:        MatMult_Elemental,
1123: /* 4*/ MatMultAdd_Elemental,
1124:        MatMultTranspose_Elemental,
1125:        MatMultTransposeAdd_Elemental,
1126:        MatSolve_Elemental,
1127:        MatSolveAdd_Elemental,
1128:        0,
1129: /*10*/ 0,
1130:        MatLUFactor_Elemental,
1131:        MatCholeskyFactor_Elemental,
1132:        0,
1133:        MatTranspose_Elemental,
1134: /*15*/ MatGetInfo_Elemental,
1135:        0,
1136:        MatGetDiagonal_Elemental,
1137:        MatDiagonalScale_Elemental,
1138:        MatNorm_Elemental,
1139: /*20*/ MatAssemblyBegin_Elemental,
1140:        MatAssemblyEnd_Elemental,
1141:        MatSetOption_Elemental,
1142:        MatZeroEntries_Elemental,
1143: /*24*/ 0,
1144:        MatLUFactorSymbolic_Elemental,
1145:        MatLUFactorNumeric_Elemental,
1146:        MatCholeskyFactorSymbolic_Elemental,
1147:        MatCholeskyFactorNumeric_Elemental,
1148: /*29*/ MatSetUp_Elemental,
1149:        0,
1150:        0,
1151:        0,
1152:        0,
1153: /*34*/ MatDuplicate_Elemental,
1154:        0,
1155:        0,
1156:        0,
1157:        0,
1158: /*39*/ MatAXPY_Elemental,
1159:        0,
1160:        0,
1161:        0,
1162:        MatCopy_Elemental,
1163: /*44*/ 0,
1164:        MatScale_Elemental,
1165:        MatShift_Basic,
1166:        0,
1167:        0,
1168: /*49*/ 0,
1169:        0,
1170:        0,
1171:        0,
1172:        0,
1173: /*54*/ 0,
1174:        0,
1175:        0,
1176:        0,
1177:        0,
1178: /*59*/ 0,
1179:        MatDestroy_Elemental,
1180:        MatView_Elemental,
1181:        0,
1182:        0,
1183: /*64*/ 0,
1184:        0,
1185:        0,
1186:        0,
1187:        0,
1188: /*69*/ 0,
1189:        0,
1190:        MatConvert_Elemental_Dense,
1191:        0,
1192:        0,
1193: /*74*/ 0,
1194:        0,
1195:        0,
1196:        0,
1197:        0,
1198: /*79*/ 0,
1199:        0,
1200:        0,
1201:        0,
1202:        MatLoad_Elemental,
1203: /*84*/ 0,
1204:        0,
1205:        0,
1206:        0,
1207:        0,
1208: /*89*/ 0,
1209:        0,
1210:        MatMatMultNumeric_Elemental,
1211:        0,
1212:        0,
1213: /*94*/ 0,
1214:        0,
1215:        0,
1216:        MatMatTransposeMultNumeric_Elemental,
1217:        0,
1218: /*99*/ MatProductSetFromOptions_Elemental,
1219:        0,
1220:        0,
1221:        MatConjugate_Elemental,
1222:        0,
1223: /*104*/0,
1224:        0,
1225:        0,
1226:        0,
1227:        0,
1228: /*109*/MatMatSolve_Elemental,
1229:        0,
1230:        0,
1231:        0,
1232:        MatMissingDiagonal_Elemental,
1233: /*114*/0,
1234:        0,
1235:        0,
1236:        0,
1237:        0,
1238: /*119*/0,
1239:        MatHermitianTranspose_Elemental,
1240:        0,
1241:        0,
1242:        0,
1243: /*124*/0,
1244:        0,
1245:        0,
1246:        0,
1247:        0,
1248: /*129*/0,
1249:        0,
1250:        0,
1251:        0,
1252:        0,
1253: /*134*/0,
1254:        0,
1255:        0,
1256:        0,
1257:        0,
1258:        0,
1259: /*140*/0,
1260:        0,
1261:        0,
1262:        0,
1263:        0,
1264: /*145*/0,
1265:        0,
1266:        0
1267: };

1269: /*MC
1270:    MATELEMENTAL = "elemental" - A matrix type for dense matrices using the Elemental package

1272:   Use ./configure --download-elemental to install PETSc to use Elemental

1274:   Use -pc_type lu -pc_factor_mat_solver_type elemental to use this direct solver

1276:    Options Database Keys:
1277: + -mat_type elemental - sets the matrix type to "elemental" during a call to MatSetFromOptions()
1278: - -mat_elemental_grid_height - sets Grid Height for 2D cyclic ordering of internal matrix

1280:   Level: beginner

1282: .seealso: MATDENSE
1283: M*/

1285: PETSC_EXTERN PetscErrorCode MatCreate_Elemental(Mat A)
1286: {
1287:   Mat_Elemental      *a;
1288:   PetscBool          flg,flg1;
1289:   Mat_Elemental_Grid *commgrid;
1290:   MPI_Comm           icomm;
1291:   PetscInt           optv1;

1293:   PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));
1294:   A->insertmode = NOT_SET_VALUES;

1296:   PetscNewLog(A,&a);
1297:   A->data = (void*)a;

1299:   /* Set up the elemental matrix */
1300:   El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));

1302:   /* Grid needs to be shared between multiple Mats on the same communicator, implement by attribute caching on the MPI_Comm */
1303:   if (Petsc_Elemental_keyval == MPI_KEYVAL_INVALID) {
1304:     MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN,MPI_COMM_NULL_DELETE_FN,&Petsc_Elemental_keyval,(void*)0);
1305:     PetscCitationsRegister(ElementalCitation,&ElementalCite);
1306:   }
1307:   PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);
1308:   MPI_Comm_get_attr(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);
1309:   if (!flg) {

1312:     PetscNewLog(A,&commgrid);

1314:     PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"Elemental Options","Mat");
1315:     /* displayed default grid sizes (CommSize,1) are set by us arbitrarily until El::Grid() is called */
1316:     PetscOptionsInt("-mat_elemental_grid_height","Grid Height","None",El::mpi::Size(cxxcomm),&optv1,&flg1);
1317:     if (flg1) {
1319:       commgrid->grid = new El::Grid(cxxcomm,optv1); /* use user-provided grid height */
1320:     } else {
1321:       commgrid->grid = new El::Grid(cxxcomm); /* use Elemental default grid sizes */
1322:       /* printf("new commgrid->grid = %p\n",commgrid->grid);  -- memory leak revealed by valgrind? */
1323:     }
1324:     commgrid->grid_refct = 1;
1325:     MPI_Comm_set_attr(icomm,Petsc_Elemental_keyval,(void*)commgrid);

1327:     a->pivoting    = 1;
1328:     PetscOptionsInt("-mat_elemental_pivoting","Pivoting","None",a->pivoting,&a->pivoting,NULL);

1330:     PetscOptionsEnd();
1331:   } else {
1332:     commgrid->grid_refct++;
1333:   }
1334:   PetscCommDestroy(&icomm);
1335:   a->grid        = commgrid->grid;
1336:   a->emat        = new El::DistMatrix<PetscElemScalar>(*a->grid);
1337:   a->roworiented = PETSC_TRUE;

1339:   PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",MatGetOwnershipIS_Elemental);
1340:   PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_elemental_mpidense_C",MatProductSetFromOptions_Elemental_MPIDense);
1341:   PetscObjectChangeTypeName((PetscObject)A,MATELEMENTAL);
1342:   return 0;
1343: }