Actual source code: matelem.cxx

  1: #include <petsc/private/petscelemental.h>

  3: const char ElementalCitation[] = "@Article{Elemental2012,\n"
  4: "  author  = {Jack Poulson and Bryan Marker and Jeff R. Hammond and Nichols A. Romero and Robert {v}an~{d}e~{G}eijn},\n"
  5: "  title   = {Elemental: A New Framework for Distributed Memory Dense Matrix Computations},\n"
  6: "  journal = {{ACM} Transactions on Mathematical Software},\n"
  7: "  volume  = {39},\n"
  8: "  number  = {2},\n"
  9: "  year    = {2013}\n"
 10: "}\n";
 11: static PetscBool ElementalCite = PETSC_FALSE;

 13: /*
 14:     The variable Petsc_Elemental_keyval is used to indicate an MPI attribute that
 15:   is attached to a communicator, in this case the attribute is a Mat_Elemental_Grid
 16: */
 17: static PetscMPIInt Petsc_Elemental_keyval = MPI_KEYVAL_INVALID;

 19: static PetscErrorCode MatView_Elemental(Mat A,PetscViewer viewer)
 20: {
 22:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
 23:   PetscBool      iascii;

 26:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
 27:   if (iascii) {
 28:     PetscViewerFormat format;
 29:     PetscViewerGetFormat(viewer,&format);
 30:     if (format == PETSC_VIEWER_ASCII_INFO) {
 31:       /* call elemental viewing function */
 32:       PetscViewerASCIIPrintf(viewer,"Elemental run parameters:\n");
 33:       PetscViewerASCIIPrintf(viewer,"  allocated entries=%d\n",(*a->emat).AllocatedMemory());
 34:       PetscViewerASCIIPrintf(viewer,"  grid height=%d, grid width=%d\n",(*a->emat).Grid().Height(),(*a->emat).Grid().Width());
 35:       if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
 36:         /* call elemental viewing function */
 37:         PetscPrintf(PetscObjectComm((PetscObject)viewer),"test matview_elemental 2\n");
 38:       }

 40:     } else if (format == PETSC_VIEWER_DEFAULT) {
 41:       PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);
 42:       El::Print( *a->emat, "Elemental matrix (cyclic ordering)");
 43:       PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);
 44:       if (A->factortype == MAT_FACTOR_NONE) {
 45:         Mat Adense;
 46:         MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);
 47:         MatView(Adense,viewer);
 48:         MatDestroy(&Adense);
 49:       }
 50:     } else SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Format");
 51:   } else {
 52:     /* convert to dense format and call MatView() */
 53:     Mat Adense;
 54:     MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);
 55:     MatView(Adense,viewer);
 56:     MatDestroy(&Adense);
 57:   }
 58:   return(0);
 59: }

 61: static PetscErrorCode MatGetInfo_Elemental(Mat A,MatInfoType flag,MatInfo *info)
 62: {
 63:   Mat_Elemental  *a = (Mat_Elemental*)A->data;

 66:   info->block_size = 1.0;

 68:   if (flag == MAT_LOCAL) {
 69:     info->nz_allocated   = (*a->emat).AllocatedMemory(); /* locally allocated */
 70:     info->nz_used        = info->nz_allocated;
 71:   } else if (flag == MAT_GLOBAL_MAX) {
 72:     //MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));
 73:     /* see MatGetInfo_MPIAIJ() for getting global info->nz_allocated! */
 74:     //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_MAX not written yet");
 75:   } else if (flag == MAT_GLOBAL_SUM) {
 76:     //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_SUM not written yet");
 77:     info->nz_allocated   = (*a->emat).AllocatedMemory(); /* locally allocated */
 78:     info->nz_used        = info->nz_allocated; /* assume Elemental does accurate allocation */
 79:     //MPIU_Allreduce(isend,irecv,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));
 80:     //PetscPrintf(PETSC_COMM_SELF,"    ... [%d] locally allocated %g\n",rank,info->nz_allocated);
 81:   }

 83:   info->nz_unneeded       = 0.0;
 84:   info->assemblies        = A->num_ass;
 85:   info->mallocs           = 0;
 86:   info->memory            = ((PetscObject)A)->mem;
 87:   info->fill_ratio_given  = 0; /* determined by Elemental */
 88:   info->fill_ratio_needed = 0;
 89:   info->factor_mallocs    = 0;
 90:   return(0);
 91: }

 93: PetscErrorCode MatSetOption_Elemental(Mat A,MatOption op,PetscBool flg)
 94: {
 95:   Mat_Elemental  *a = (Mat_Elemental*)A->data;

 98:   switch (op) {
 99:   case MAT_NEW_NONZERO_LOCATIONS:
100:   case MAT_NEW_NONZERO_LOCATION_ERR:
101:   case MAT_NEW_NONZERO_ALLOCATION_ERR:
102:   case MAT_SYMMETRIC:
103:   case MAT_SORTED_FULL:
104:   case MAT_HERMITIAN:
105:     break;
106:   case MAT_ROW_ORIENTED:
107:     a->roworiented = flg;
108:     break;
109:   default:
110:     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"unknown option %s",MatOptions[op]);
111:   }
112:   return(0);
113: }

115: static PetscErrorCode MatSetValues_Elemental(Mat A,PetscInt nr,const PetscInt *rows,PetscInt nc,const PetscInt *cols,const PetscScalar *vals,InsertMode imode)
116: {
117:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
118:   PetscInt       i,j,rrank,ridx,crank,cidx,erow,ecol,numQueues=0;

121:   // TODO: Initialize matrix to all zeros?

123:   // Count the number of queues from this process
124:   if (a->roworiented) {
125:     for (i=0; i<nr; i++) {
126:       if (rows[i] < 0) continue;
127:       P2RO(A,0,rows[i],&rrank,&ridx);
128:       RO2E(A,0,rrank,ridx,&erow);
129:       if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
130:       for (j=0; j<nc; j++) {
131:         if (cols[j] < 0) continue;
132:         P2RO(A,1,cols[j],&crank,&cidx);
133:         RO2E(A,1,crank,cidx,&ecol);
134:         if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
135:         if (!a->emat->IsLocal(erow,ecol)) { /* off-proc entry */
136:           /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
137:           if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
138:           ++numQueues;
139:           continue;
140:         }
141:         /* printf("Locally updating (%d,%d)\n",erow,ecol); */
142:         switch (imode) {
143:         case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
144:         case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
145:         default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
146:         }
147:       }
148:     }

150:     /* printf("numQueues=%d\n",numQueues); */
151:     a->emat->Reserve( numQueues);
152:     for (i=0; i<nr; i++) {
153:       if (rows[i] < 0) continue;
154:       P2RO(A,0,rows[i],&rrank,&ridx);
155:       RO2E(A,0,rrank,ridx,&erow);
156:       for (j=0; j<nc; j++) {
157:         if (cols[j] < 0) continue;
158:         P2RO(A,1,cols[j],&crank,&cidx);
159:         RO2E(A,1,crank,cidx,&ecol);
160:         if (!a->emat->IsLocal(erow,ecol)) { /*off-proc entry*/
161:           /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
162:           a->emat->QueueUpdate( erow, ecol, vals[i*nc+j]);
163:         }
164:       }
165:     }
166:   } else { /* columnoriented */
167:     for (j=0; j<nc; j++) {
168:       if (cols[j] < 0) continue;
169:       P2RO(A,1,cols[j],&crank,&cidx);
170:       RO2E(A,1,crank,cidx,&ecol);
171:       if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
172:       for (i=0; i<nr; i++) {
173:         if (rows[i] < 0) continue;
174:         P2RO(A,0,rows[i],&rrank,&ridx);
175:         RO2E(A,0,rrank,ridx,&erow);
176:         if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
177:         if (!a->emat->IsLocal(erow,ecol)) { /* off-proc entry */
178:           /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
179:           if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
180:           ++numQueues;
181:           continue;
182:         }
183:         /* printf("Locally updating (%d,%d)\n",erow,ecol); */
184:         switch (imode) {
185:         case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
186:         case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
187:         default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
188:         }
189:       }
190:     }

192:     /* printf("numQueues=%d\n",numQueues); */
193:     a->emat->Reserve( numQueues);
194:     for (j=0; j<nc; j++) {
195:       if (cols[j] < 0) continue;
196:       P2RO(A,1,cols[j],&crank,&cidx);
197:       RO2E(A,1,crank,cidx,&ecol);

199:       for (i=0; i<nr; i++) {
200:         if (rows[i] < 0) continue;
201:         P2RO(A,0,rows[i],&rrank,&ridx);
202:         RO2E(A,0,rrank,ridx,&erow);
203:         if (!a->emat->IsLocal(erow,ecol)) { /*off-proc entry*/
204:           /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
205:           a->emat->QueueUpdate( erow, ecol, vals[i+j*nr]);
206:         }
207:       }
208:     }
209:   }
210:   return(0);
211: }

213: static PetscErrorCode MatMult_Elemental(Mat A,Vec X,Vec Y)
214: {
215:   Mat_Elemental         *a = (Mat_Elemental*)A->data;
216:   PetscErrorCode        ierr;
217:   const PetscElemScalar *x;
218:   PetscElemScalar       *y;
219:   PetscElemScalar       one = 1,zero = 0;

222:   VecGetArrayRead(X,(const PetscScalar **)&x);
223:   VecGetArray(Y,(PetscScalar **)&y);
224:   { /* Scoping so that constructor is called before pointer is returned */
225:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
226:     xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
227:     ye.Attach(A->rmap->N,1,*a->grid,0,0,y,A->rmap->n);
228:     El::Gemv(El::NORMAL,one,*a->emat,xe,zero,ye);
229:   }
230:   VecRestoreArrayRead(X,(const PetscScalar **)&x);
231:   VecRestoreArray(Y,(PetscScalar **)&y);
232:   return(0);
233: }

235: static PetscErrorCode MatMultTranspose_Elemental(Mat A,Vec X,Vec Y)
236: {
237:   Mat_Elemental         *a = (Mat_Elemental*)A->data;
238:   PetscErrorCode        ierr;
239:   const PetscElemScalar *x;
240:   PetscElemScalar       *y;
241:   PetscElemScalar       one = 1,zero = 0;

244:   VecGetArrayRead(X,(const PetscScalar **)&x);
245:   VecGetArray(Y,(PetscScalar **)&y);
246:   { /* Scoping so that constructor is called before pointer is returned */
247:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
248:     xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
249:     ye.Attach(A->cmap->N,1,*a->grid,0,0,y,A->cmap->n);
250:     El::Gemv(El::TRANSPOSE,one,*a->emat,xe,zero,ye);
251:   }
252:   VecRestoreArrayRead(X,(const PetscScalar **)&x);
253:   VecRestoreArray(Y,(PetscScalar **)&y);
254:   return(0);
255: }

257: static PetscErrorCode MatMultAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
258: {
259:   Mat_Elemental         *a = (Mat_Elemental*)A->data;
260:   PetscErrorCode        ierr;
261:   const PetscElemScalar *x;
262:   PetscElemScalar       *z;
263:   PetscElemScalar       one = 1;

266:   if (Y != Z) {VecCopy(Y,Z);}
267:   VecGetArrayRead(X,(const PetscScalar **)&x);
268:   VecGetArray(Z,(PetscScalar **)&z);
269:   { /* Scoping so that constructor is called before pointer is returned */
270:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
271:     xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
272:     ze.Attach(A->rmap->N,1,*a->grid,0,0,z,A->rmap->n);
273:     El::Gemv(El::NORMAL,one,*a->emat,xe,one,ze);
274:   }
275:   VecRestoreArrayRead(X,(const PetscScalar **)&x);
276:   VecRestoreArray(Z,(PetscScalar **)&z);
277:   return(0);
278: }

280: static PetscErrorCode MatMultTransposeAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
281: {
282:   Mat_Elemental         *a = (Mat_Elemental*)A->data;
283:   PetscErrorCode        ierr;
284:   const PetscElemScalar *x;
285:   PetscElemScalar       *z;
286:   PetscElemScalar       one = 1;

289:   if (Y != Z) {VecCopy(Y,Z);}
290:   VecGetArrayRead(X,(const PetscScalar **)&x);
291:   VecGetArray(Z,(PetscScalar **)&z);
292:   { /* Scoping so that constructor is called before pointer is returned */
293:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
294:     xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
295:     ze.Attach(A->cmap->N,1,*a->grid,0,0,z,A->cmap->n);
296:     El::Gemv(El::TRANSPOSE,one,*a->emat,xe,one,ze);
297:   }
298:   VecRestoreArrayRead(X,(const PetscScalar **)&x);
299:   VecRestoreArray(Z,(PetscScalar **)&z);
300:   return(0);
301: }

303: PetscErrorCode MatMatMultNumeric_Elemental(Mat A,Mat B,Mat C)
304: {
305:   Mat_Elemental    *a = (Mat_Elemental*)A->data;
306:   Mat_Elemental    *b = (Mat_Elemental*)B->data;
307:   Mat_Elemental    *c = (Mat_Elemental*)C->data;
308:   PetscElemScalar  one = 1,zero = 0;

311:   { /* Scoping so that constructor is called before pointer is returned */
312:     El::Gemm(El::NORMAL,El::NORMAL,one,*a->emat,*b->emat,zero,*c->emat);
313:   }
314:   C->assembled = PETSC_TRUE;
315:   return(0);
316: }

318: PetscErrorCode MatMatMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat Ce)
319: {

323:   MatSetSizes(Ce,A->rmap->n,B->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
324:   MatSetType(Ce,MATELEMENTAL);
325:   MatSetUp(Ce);
326:   Ce->ops->matmultnumeric = MatMatMultNumeric_Elemental;
327:   return(0);
328: }

330: static PetscErrorCode MatMatTransposeMultNumeric_Elemental(Mat A,Mat B,Mat C)
331: {
332:   Mat_Elemental      *a = (Mat_Elemental*)A->data;
333:   Mat_Elemental      *b = (Mat_Elemental*)B->data;
334:   Mat_Elemental      *c = (Mat_Elemental*)C->data;
335:   PetscElemScalar    one = 1,zero = 0;

338:   { /* Scoping so that constructor is called before pointer is returned */
339:     El::Gemm(El::NORMAL,El::TRANSPOSE,one,*a->emat,*b->emat,zero,*c->emat);
340:   }
341:   C->assembled = PETSC_TRUE;
342:   return(0);
343: }

345: static PetscErrorCode MatMatTransposeMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat C)
346: {

350:   MatSetSizes(C,A->rmap->n,B->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
351:   MatSetType(C,MATELEMENTAL);
352:   MatSetUp(C);
353:   return(0);
354: }

356: /* --------------------------------------- */
357: static PetscErrorCode MatProductSetFromOptions_Elemental_AB(Mat C)
358: {
360:   C->ops->matmultsymbolic = MatMatMultSymbolic_Elemental;
361:   C->ops->productsymbolic = MatProductSymbolic_AB;
362:   return(0);
363: }

365: static PetscErrorCode MatProductSetFromOptions_Elemental_ABt(Mat C)
366: {
368:   C->ops->mattransposemultsymbolic = MatMatTransposeMultSymbolic_Elemental;
369:   C->ops->productsymbolic          = MatProductSymbolic_ABt;
370:   return(0);
371: }

373: PETSC_INTERN PetscErrorCode MatProductSetFromOptions_Elemental(Mat C)
374: {
376:   Mat_Product    *product = C->product;

379:   switch (product->type) {
380:   case MATPRODUCT_AB:
381:     MatProductSetFromOptions_Elemental_AB(C);
382:     break;
383:   case MATPRODUCT_ABt:
384:     MatProductSetFromOptions_Elemental_ABt(C);
385:     break;
386:   default:
387:     break;
388:   }
389:   return(0);
390: }

392: PetscErrorCode MatMatMultNumeric_Elemental_MPIDense(Mat A,Mat B,Mat C)
393: {
394:   Mat            Be,Ce;

398:   MatConvert(B,MATELEMENTAL,MAT_INITIAL_MATRIX,&Be);
399:   MatMatMult(A,Be,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&Ce);
400:   MatConvert(Ce,MATMPIDENSE,MAT_REUSE_MATRIX,&C);
401:   MatDestroy(&Be);
402:   MatDestroy(&Ce);
403:   return(0);
404: }

406: PetscErrorCode MatMatMultSymbolic_Elemental_MPIDense(Mat A,Mat B,PetscReal fill,Mat C)
407: {

411:   MatSetSizes(C,A->rmap->n,B->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
412:   MatSetType(C,MATMPIDENSE);
413:   MatSetUp(C);
414:   C->ops->matmultnumeric = MatMatMultNumeric_Elemental_MPIDense;
415:   return(0);
416: }

418: PetscErrorCode MatProductSetFromOptions_Elemental_MPIDense_AB(Mat C)
419: {
421:   C->ops->matmultsymbolic = MatMatMultSymbolic_Elemental_MPIDense;
422:   C->ops->productsymbolic = MatProductSymbolic_AB;
423:   return(0);
424: }

426: PetscErrorCode MatProductSetFromOptions_Elemental_MPIDense(Mat C)
427: {
429:   Mat_Product    *product = C->product;

432:   if (product->type == MATPRODUCT_AB) {
433:     MatProductSetFromOptions_Elemental_MPIDense_AB(C);
434:   }
435:   return(0);
436: }
437: /* --------------------------------------- */

439: static PetscErrorCode MatGetDiagonal_Elemental(Mat A,Vec D)
440: {
441:   PetscInt        i,nrows,ncols,nD,rrank,ridx,crank,cidx;
442:   Mat_Elemental   *a = (Mat_Elemental*)A->data;
443:   PetscErrorCode  ierr;
444:   PetscElemScalar v;
445:   MPI_Comm        comm;

448:   PetscObjectGetComm((PetscObject)A,&comm);
449:   MatGetSize(A,&nrows,&ncols);
450:   nD = nrows>ncols ? ncols : nrows;
451:   for (i=0; i<nD; i++) {
452:     PetscInt erow,ecol;
453:     P2RO(A,0,i,&rrank,&ridx);
454:     RO2E(A,0,rrank,ridx,&erow);
455:     if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
456:     P2RO(A,1,i,&crank,&cidx);
457:     RO2E(A,1,crank,cidx,&ecol);
458:     if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
459:     v = a->emat->Get(erow,ecol);
460:     VecSetValues(D,1,&i,(PetscScalar*)&v,INSERT_VALUES);
461:   }
462:   VecAssemblyBegin(D);
463:   VecAssemblyEnd(D);
464:   return(0);
465: }

467: static PetscErrorCode MatDiagonalScale_Elemental(Mat X,Vec L,Vec R)
468: {
469:   Mat_Elemental         *x = (Mat_Elemental*)X->data;
470:   const PetscElemScalar *d;
471:   PetscErrorCode        ierr;

474:   if (R) {
475:     VecGetArrayRead(R,(const PetscScalar **)&d);
476:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
477:     de.LockedAttach(X->cmap->N,1,*x->grid,0,0,d,X->cmap->n);
478:     El::DiagonalScale(El::RIGHT,El::NORMAL,de,*x->emat);
479:     VecRestoreArrayRead(R,(const PetscScalar **)&d);
480:   }
481:   if (L) {
482:     VecGetArrayRead(L,(const PetscScalar **)&d);
483:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
484:     de.LockedAttach(X->rmap->N,1,*x->grid,0,0,d,X->rmap->n);
485:     El::DiagonalScale(El::LEFT,El::NORMAL,de,*x->emat);
486:     VecRestoreArrayRead(L,(const PetscScalar **)&d);
487:   }
488:   return(0);
489: }

491: static PetscErrorCode MatMissingDiagonal_Elemental(Mat A,PetscBool *missing,PetscInt *d)
492: {
494:   *missing = PETSC_FALSE;
495:   return(0);
496: }

498: static PetscErrorCode MatScale_Elemental(Mat X,PetscScalar a)
499: {
500:   Mat_Elemental  *x = (Mat_Elemental*)X->data;

503:   El::Scale((PetscElemScalar)a,*x->emat);
504:   return(0);
505: }

507: /*
508:   MatAXPY - Computes Y = a*X + Y.
509: */
510: static PetscErrorCode MatAXPY_Elemental(Mat Y,PetscScalar a,Mat X,MatStructure str)
511: {
512:   Mat_Elemental  *x = (Mat_Elemental*)X->data;
513:   Mat_Elemental  *y = (Mat_Elemental*)Y->data;

517:   El::Axpy((PetscElemScalar)a,*x->emat,*y->emat);
518:   PetscObjectStateIncrease((PetscObject)Y);
519:   return(0);
520: }

522: static PetscErrorCode MatCopy_Elemental(Mat A,Mat B,MatStructure str)
523: {
524:   Mat_Elemental *a=(Mat_Elemental*)A->data;
525:   Mat_Elemental *b=(Mat_Elemental*)B->data;

529:   El::Copy(*a->emat,*b->emat);
530:   PetscObjectStateIncrease((PetscObject)B);
531:   return(0);
532: }

534: static PetscErrorCode MatDuplicate_Elemental(Mat A,MatDuplicateOption op,Mat *B)
535: {
536:   Mat            Be;
537:   MPI_Comm       comm;
538:   Mat_Elemental  *a=(Mat_Elemental*)A->data;

542:   PetscObjectGetComm((PetscObject)A,&comm);
543:   MatCreate(comm,&Be);
544:   MatSetSizes(Be,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
545:   MatSetType(Be,MATELEMENTAL);
546:   MatSetUp(Be);
547:   *B = Be;
548:   if (op == MAT_COPY_VALUES) {
549:     Mat_Elemental *b=(Mat_Elemental*)Be->data;
550:     El::Copy(*a->emat,*b->emat);
551:   }
552:   Be->assembled = PETSC_TRUE;
553:   return(0);
554: }

556: static PetscErrorCode MatTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
557: {
558:   Mat            Be = *B;
560:   MPI_Comm       comm;
561:   Mat_Elemental  *a = (Mat_Elemental*)A->data, *b;

564:   PetscObjectGetComm((PetscObject)A,&comm);
565:   /* Only out-of-place supported */
566:   if (reuse == MAT_INPLACE_MATRIX) SETERRQ(comm,PETSC_ERR_SUP,"Only out-of-place supported");
567:   if (reuse == MAT_INITIAL_MATRIX) {
568:     MatCreate(comm,&Be);
569:     MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
570:     MatSetType(Be,MATELEMENTAL);
571:     MatSetUp(Be);
572:     *B = Be;
573:   }
574:   b = (Mat_Elemental*)Be->data;
575:   El::Transpose(*a->emat,*b->emat);
576:   Be->assembled = PETSC_TRUE;
577:   return(0);
578: }

580: static PetscErrorCode MatConjugate_Elemental(Mat A)
581: {
582:   Mat_Elemental  *a = (Mat_Elemental*)A->data;

585:   El::Conjugate(*a->emat);
586:   return(0);
587: }

589: static PetscErrorCode MatHermitianTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
590: {
591:   Mat            Be = *B;
593:   MPI_Comm       comm;
594:   Mat_Elemental  *a = (Mat_Elemental*)A->data, *b;

597:   PetscObjectGetComm((PetscObject)A,&comm);
598:   /* Only out-of-place supported */
599:   if (reuse == MAT_INITIAL_MATRIX) {
600:     MatCreate(comm,&Be);
601:     MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
602:     MatSetType(Be,MATELEMENTAL);
603:     MatSetUp(Be);
604:     *B = Be;
605:   }
606:   b = (Mat_Elemental*)Be->data;
607:   El::Adjoint(*a->emat,*b->emat);
608:   Be->assembled = PETSC_TRUE;
609:   return(0);
610: }

612: static PetscErrorCode MatSolve_Elemental(Mat A,Vec B,Vec X)
613: {
614:   Mat_Elemental     *a = (Mat_Elemental*)A->data;
615:   PetscErrorCode    ierr;
616:   PetscElemScalar   *x;
617:   PetscInt          pivoting = a->pivoting;

620:   VecCopy(B,X);
621:   VecGetArray(X,(PetscScalar **)&x);

623:   El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe;
624:   xe.Attach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
625:   El::DistMatrix<PetscElemScalar,El::MC,El::MR> xer(xe);
626:   switch (A->factortype) {
627:   case MAT_FACTOR_LU:
628:     if (pivoting == 0) {
629:       El::lu::SolveAfter(El::NORMAL,*a->emat,xer);
630:     } else if (pivoting == 1) {
631:       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,xer);
632:     } else { /* pivoting == 2 */
633:       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*a->Q,xer);
634:     }
635:     break;
636:   case MAT_FACTOR_CHOLESKY:
637:     El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,xer);
638:     break;
639:   default:
640:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
641:     break;
642:   }
643:   El::Copy(xer,xe);

645:   VecRestoreArray(X,(PetscScalar **)&x);
646:   return(0);
647: }

649: static PetscErrorCode MatSolveAdd_Elemental(Mat A,Vec B,Vec Y,Vec X)
650: {
651:   PetscErrorCode    ierr;

654:   MatSolve_Elemental(A,B,X);
655:   VecAXPY(X,1,Y);
656:   return(0);
657: }

659: static PetscErrorCode MatMatSolve_Elemental(Mat A,Mat B,Mat X)
660: {
661:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
662:   Mat_Elemental  *x;
663:   Mat            C;
664:   PetscInt       pivoting = a->pivoting;
665:   PetscBool      flg;
666:   MatType        type;

670:   MatGetType(X,&type);
671:   PetscStrcmp(type,MATELEMENTAL,&flg);
672:   if (!flg) {
673:     MatConvert(B,MATELEMENTAL,MAT_INITIAL_MATRIX,&C);
674:     x = (Mat_Elemental*)C->data;
675:   } else {
676:     x = (Mat_Elemental*)X->data;
677:     El::Copy(*((Mat_Elemental*)B->data)->emat,*x->emat);
678:   }
679:   switch (A->factortype) {
680:   case MAT_FACTOR_LU:
681:     if (pivoting == 0) {
682:       El::lu::SolveAfter(El::NORMAL,*a->emat,*x->emat);
683:     } else if (pivoting == 1) {
684:       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*x->emat);
685:     } else {
686:       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*a->Q,*x->emat);
687:     }
688:     break;
689:   case MAT_FACTOR_CHOLESKY:
690:     El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,*x->emat);
691:     break;
692:   default:
693:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
694:     break;
695:   }
696:   if (!flg) {
697:     MatConvert(C,type,MAT_REUSE_MATRIX,&X);
698:     MatDestroy(&C);
699:   }
700:   return(0);
701: }

703: static PetscErrorCode MatLUFactor_Elemental(Mat A,IS row,IS col,const MatFactorInfo *info)
704: {
705:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
707:   PetscInt       pivoting = a->pivoting;

710:   if (pivoting == 0) {
711:     El::LU(*a->emat);
712:   } else if (pivoting == 1) {
713:     El::LU(*a->emat,*a->P);
714:   } else {
715:     El::LU(*a->emat,*a->P,*a->Q);
716:   }
717:   A->factortype = MAT_FACTOR_LU;
718:   A->assembled  = PETSC_TRUE;

720:   PetscFree(A->solvertype);
721:   PetscStrallocpy(MATSOLVERELEMENTAL,&A->solvertype);
722:   return(0);
723: }

725: static PetscErrorCode  MatLUFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
726: {

730:   MatCopy(A,F,SAME_NONZERO_PATTERN);
731:   MatLUFactor_Elemental(F,0,0,info);
732:   return(0);
733: }

735: static PetscErrorCode  MatLUFactorSymbolic_Elemental(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
736: {
738:   /* F is created and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
739:   return(0);
740: }

742: static PetscErrorCode MatCholeskyFactor_Elemental(Mat A,IS perm,const MatFactorInfo *info)
743: {
744:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
745:   El::DistMatrix<PetscElemScalar,El::MC,El::STAR> d;

749:   El::Cholesky(El::UPPER,*a->emat);
750:   A->factortype = MAT_FACTOR_CHOLESKY;
751:   A->assembled  = PETSC_TRUE;

753:   PetscFree(A->solvertype);
754:   PetscStrallocpy(MATSOLVERELEMENTAL,&A->solvertype);
755:   return(0);
756: }

758: static PetscErrorCode MatCholeskyFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
759: {

763:   MatCopy(A,F,SAME_NONZERO_PATTERN);
764:   MatCholeskyFactor_Elemental(F,0,info);
765:   return(0);
766: }

768: static PetscErrorCode MatCholeskyFactorSymbolic_Elemental(Mat F,Mat A,IS perm,const MatFactorInfo *info)
769: {
771:   /* F is created and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
772:   return(0);
773: }

775: PetscErrorCode MatFactorGetSolverType_elemental_elemental(Mat A,MatSolverType *type)
776: {
778:   *type = MATSOLVERELEMENTAL;
779:   return(0);
780: }

782: static PetscErrorCode MatGetFactor_elemental_elemental(Mat A,MatFactorType ftype,Mat *F)
783: {
784:   Mat            B;

788:   /* Create the factorization matrix */
789:   MatCreate(PetscObjectComm((PetscObject)A),&B);
790:   MatSetSizes(B,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
791:   MatSetType(B,MATELEMENTAL);
792:   MatSetUp(B);
793:   B->factortype = ftype;
794:   B->trivialsymbolic = PETSC_TRUE;
795:   PetscFree(B->solvertype);
796:   PetscStrallocpy(MATSOLVERELEMENTAL,&B->solvertype);

798:   PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_elemental_elemental);
799:   *F            = B;
800:   return(0);
801: }

803: PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_Elemental(void)
804: {

808:   MatSolverTypeRegister(MATSOLVERELEMENTAL,MATELEMENTAL,        MAT_FACTOR_LU,MatGetFactor_elemental_elemental);
809:   MatSolverTypeRegister(MATSOLVERELEMENTAL,MATELEMENTAL,        MAT_FACTOR_CHOLESKY,MatGetFactor_elemental_elemental);
810:   return(0);
811: }

813: static PetscErrorCode MatNorm_Elemental(Mat A,NormType type,PetscReal *nrm)
814: {
815:   Mat_Elemental *a=(Mat_Elemental*)A->data;

818:   switch (type) {
819:   case NORM_1:
820:     *nrm = El::OneNorm(*a->emat);
821:     break;
822:   case NORM_FROBENIUS:
823:     *nrm = El::FrobeniusNorm(*a->emat);
824:     break;
825:   case NORM_INFINITY:
826:     *nrm = El::InfinityNorm(*a->emat);
827:     break;
828:   default:
829:     SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Unsupported norm type");
830:   }
831:   return(0);
832: }

834: static PetscErrorCode MatZeroEntries_Elemental(Mat A)
835: {
836:   Mat_Elemental *a=(Mat_Elemental*)A->data;

839:   El::Zero(*a->emat);
840:   return(0);
841: }

843: static PetscErrorCode MatGetOwnershipIS_Elemental(Mat A,IS *rows,IS *cols)
844: {
845:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
847:   PetscInt       i,m,shift,stride,*idx;

850:   if (rows) {
851:     m = a->emat->LocalHeight();
852:     shift = a->emat->ColShift();
853:     stride = a->emat->ColStride();
854:     PetscMalloc1(m,&idx);
855:     for (i=0; i<m; i++) {
856:       PetscInt rank,offset;
857:       E2RO(A,0,shift+i*stride,&rank,&offset);
858:       RO2P(A,0,rank,offset,&idx[i]);
859:     }
860:     ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,rows);
861:   }
862:   if (cols) {
863:     m = a->emat->LocalWidth();
864:     shift = a->emat->RowShift();
865:     stride = a->emat->RowStride();
866:     PetscMalloc1(m,&idx);
867:     for (i=0; i<m; i++) {
868:       PetscInt rank,offset;
869:       E2RO(A,1,shift+i*stride,&rank,&offset);
870:       RO2P(A,1,rank,offset,&idx[i]);
871:     }
872:     ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,cols);
873:   }
874:   return(0);
875: }

877: static PetscErrorCode MatConvert_Elemental_Dense(Mat A,MatType newtype,MatReuse reuse,Mat *B)
878: {
879:   Mat                Bmpi;
880:   Mat_Elemental      *a = (Mat_Elemental*)A->data;
881:   MPI_Comm           comm;
882:   PetscErrorCode     ierr;
883:   IS                 isrows,iscols;
884:   PetscInt           rrank,ridx,crank,cidx,nrows,ncols,i,j,erow,ecol,elrow,elcol;
885:   const PetscInt     *rows,*cols;
886:   PetscElemScalar    v;
887:   const El::Grid     &grid = a->emat->Grid();

890:   PetscObjectGetComm((PetscObject)A,&comm);

892:   if (reuse == MAT_REUSE_MATRIX) {
893:     Bmpi = *B;
894:   } else {
895:     MatCreate(comm,&Bmpi);
896:     MatSetSizes(Bmpi,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
897:     MatSetType(Bmpi,MATDENSE);
898:     MatSetUp(Bmpi);
899:   }

901:   /* Get local entries of A */
902:   MatGetOwnershipIS(A,&isrows,&iscols);
903:   ISGetLocalSize(isrows,&nrows);
904:   ISGetIndices(isrows,&rows);
905:   ISGetLocalSize(iscols,&ncols);
906:   ISGetIndices(iscols,&cols);

908:   if (a->roworiented) {
909:     for (i=0; i<nrows; i++) {
910:       P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
911:       RO2E(A,0,rrank,ridx,&erow);
912:       if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
913:       for (j=0; j<ncols; j++) {
914:         P2RO(A,1,cols[j],&crank,&cidx);
915:         RO2E(A,1,crank,cidx,&ecol);
916:         if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");

918:         elrow = erow / grid.MCSize(); /* Elemental local row index */
919:         elcol = ecol / grid.MRSize(); /* Elemental local column index */
920:         v = a->emat->GetLocal(elrow,elcol);
921:         MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);
922:       }
923:     }
924:   } else { /* column-oriented */
925:     for (j=0; j<ncols; j++) {
926:       P2RO(A,1,cols[j],&crank,&cidx);
927:       RO2E(A,1,crank,cidx,&ecol);
928:       if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
929:       for (i=0; i<nrows; i++) {
930:         P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
931:         RO2E(A,0,rrank,ridx,&erow);
932:         if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");

934:         elrow = erow / grid.MCSize(); /* Elemental local row index */
935:         elcol = ecol / grid.MRSize(); /* Elemental local column index */
936:         v = a->emat->GetLocal(elrow,elcol);
937:         MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);
938:       }
939:     }
940:   }
941:   MatAssemblyBegin(Bmpi,MAT_FINAL_ASSEMBLY);
942:   MatAssemblyEnd(Bmpi,MAT_FINAL_ASSEMBLY);
943:   if (reuse == MAT_INPLACE_MATRIX) {
944:     MatHeaderReplace(A,&Bmpi);
945:   } else {
946:     *B = Bmpi;
947:   }
948:   ISDestroy(&isrows);
949:   ISDestroy(&iscols);
950:   return(0);
951: }

953: PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
954: {
955:   Mat               mat_elemental;
956:   PetscErrorCode    ierr;
957:   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols;
958:   const PetscInt    *cols;
959:   const PetscScalar *vals;

962:   if (reuse == MAT_REUSE_MATRIX) {
963:     mat_elemental = *newmat;
964:     MatZeroEntries(mat_elemental);
965:   } else {
966:     MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
967:     MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
968:     MatSetType(mat_elemental,MATELEMENTAL);
969:     MatSetUp(mat_elemental);
970:   }
971:   for (row=0; row<M; row++) {
972:     MatGetRow(A,row,&ncols,&cols,&vals);
973:     /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
974:     MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
975:     MatRestoreRow(A,row,&ncols,&cols,&vals);
976:   }
977:   MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
978:   MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);

980:   if (reuse == MAT_INPLACE_MATRIX) {
981:     MatHeaderReplace(A,&mat_elemental);
982:   } else {
983:     *newmat = mat_elemental;
984:   }
985:   return(0);
986: }

988: PETSC_INTERN PetscErrorCode MatConvert_MPIAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
989: {
990:   Mat               mat_elemental;
991:   PetscErrorCode    ierr;
992:   PetscInt          row,ncols,rstart=A->rmap->rstart,rend=A->rmap->rend,j;
993:   const PetscInt    *cols;
994:   const PetscScalar *vals;

997:   if (reuse == MAT_REUSE_MATRIX) {
998:     mat_elemental = *newmat;
999:     MatZeroEntries(mat_elemental);
1000:   } else {
1001:     MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
1002:     MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,A->rmap->N,A->cmap->N);
1003:     MatSetType(mat_elemental,MATELEMENTAL);
1004:     MatSetUp(mat_elemental);
1005:   }
1006:   for (row=rstart; row<rend; row++) {
1007:     MatGetRow(A,row,&ncols,&cols,&vals);
1008:     for (j=0; j<ncols; j++) {
1009:       /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1010:       MatSetValues(mat_elemental,1,&row,1,&cols[j],&vals[j],ADD_VALUES);
1011:     }
1012:     MatRestoreRow(A,row,&ncols,&cols,&vals);
1013:   }
1014:   MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
1015:   MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);

1017:   if (reuse == MAT_INPLACE_MATRIX) {
1018:     MatHeaderReplace(A,&mat_elemental);
1019:   } else {
1020:     *newmat = mat_elemental;
1021:   }
1022:   return(0);
1023: }

1025: PETSC_INTERN PetscErrorCode MatConvert_SeqSBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
1026: {
1027:   Mat               mat_elemental;
1028:   PetscErrorCode    ierr;
1029:   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols,j;
1030:   const PetscInt    *cols;
1031:   const PetscScalar *vals;

1034:   if (reuse == MAT_REUSE_MATRIX) {
1035:     mat_elemental = *newmat;
1036:     MatZeroEntries(mat_elemental);
1037:   } else {
1038:     MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
1039:     MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
1040:     MatSetType(mat_elemental,MATELEMENTAL);
1041:     MatSetUp(mat_elemental);
1042:   }
1043:   MatGetRowUpperTriangular(A);
1044:   for (row=0; row<M; row++) {
1045:     MatGetRow(A,row,&ncols,&cols,&vals);
1046:     /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1047:     MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
1048:     for (j=0; j<ncols; j++) { /* lower triangular part */
1049:       PetscScalar v;
1050:       if (cols[j] == row) continue;
1051:       v    = A->hermitian ? PetscConj(vals[j]) : vals[j];
1052:       MatSetValues(mat_elemental,1,&cols[j],1,&row,&v,ADD_VALUES);
1053:     }
1054:     MatRestoreRow(A,row,&ncols,&cols,&vals);
1055:   }
1056:   MatRestoreRowUpperTriangular(A);
1057:   MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
1058:   MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);

1060:   if (reuse == MAT_INPLACE_MATRIX) {
1061:     MatHeaderReplace(A,&mat_elemental);
1062:   } else {
1063:     *newmat = mat_elemental;
1064:   }
1065:   return(0);
1066: }

1068: PETSC_INTERN PetscErrorCode MatConvert_MPISBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
1069: {
1070:   Mat               mat_elemental;
1071:   PetscErrorCode    ierr;
1072:   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols,j,rstart=A->rmap->rstart,rend=A->rmap->rend;
1073:   const PetscInt    *cols;
1074:   const PetscScalar *vals;

1077:   if (reuse == MAT_REUSE_MATRIX) {
1078:     mat_elemental = *newmat;
1079:     MatZeroEntries(mat_elemental);
1080:   } else {
1081:     MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
1082:     MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
1083:     MatSetType(mat_elemental,MATELEMENTAL);
1084:     MatSetUp(mat_elemental);
1085:   }
1086:   MatGetRowUpperTriangular(A);
1087:   for (row=rstart; row<rend; row++) {
1088:     MatGetRow(A,row,&ncols,&cols,&vals);
1089:     /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1090:     MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
1091:     for (j=0; j<ncols; j++) { /* lower triangular part */
1092:       PetscScalar v;
1093:       if (cols[j] == row) continue;
1094:       v    = A->hermitian ? PetscConj(vals[j]) : vals[j];
1095:       MatSetValues(mat_elemental,1,&cols[j],1,&row,&v,ADD_VALUES);
1096:     }
1097:     MatRestoreRow(A,row,&ncols,&cols,&vals);
1098:   }
1099:   MatRestoreRowUpperTriangular(A);
1100:   MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
1101:   MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);

1103:   if (reuse == MAT_INPLACE_MATRIX) {
1104:     MatHeaderReplace(A,&mat_elemental);
1105:   } else {
1106:     *newmat = mat_elemental;
1107:   }
1108:   return(0);
1109: }

1111: static PetscErrorCode MatDestroy_Elemental(Mat A)
1112: {
1113:   Mat_Elemental      *a = (Mat_Elemental*)A->data;
1114:   PetscErrorCode     ierr;
1115:   Mat_Elemental_Grid *commgrid;
1116:   PetscBool          flg;
1117:   MPI_Comm           icomm;

1120:   delete a->emat;
1121:   delete a->P;
1122:   delete a->Q;

1124:   El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
1125:   PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);
1126:   MPI_Comm_get_attr(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);
1127:   if (--commgrid->grid_refct == 0) {
1128:     delete commgrid->grid;
1129:     PetscFree(commgrid);
1130:     MPI_Comm_free_keyval(&Petsc_Elemental_keyval);
1131:   }
1132:   PetscCommDestroy(&icomm);
1133:   PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",NULL);
1134:   PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverType_C",NULL);
1135:   PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_elemental_mpidense_C",NULL);
1136:   PetscFree(A->data);
1137:   return(0);
1138: }

1140: PetscErrorCode MatSetUp_Elemental(Mat A)
1141: {
1142:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
1144:   MPI_Comm       comm;
1145:   PetscMPIInt    rsize,csize;
1146:   PetscInt       n;

1149:   PetscLayoutSetUp(A->rmap);
1150:   PetscLayoutSetUp(A->cmap);

1152:   /* Check if local row and column sizes are equally distributed.
1153:      Jed: Elemental uses "element" cyclic ordering so the sizes need to match that
1154:      exactly.  The strategy in MatElemental is for PETSc to implicitly permute to block ordering (like would be returned by
1155:      PetscSplitOwnership(comm,&n,&N), at which point Elemental matrices can act on PETSc vectors without redistributing the vectors. */
1156:   PetscObjectGetComm((PetscObject)A,&comm);
1157:   n = PETSC_DECIDE;
1158:   PetscSplitOwnership(comm,&n,&A->rmap->N);
1159:   if (n != A->rmap->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local row size %D of ELEMENTAL matrix must be equally distributed",A->rmap->n);

1161:   n = PETSC_DECIDE;
1162:   PetscSplitOwnership(comm,&n,&A->cmap->N);
1163:   if (n != A->cmap->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local column size %D of ELEMENTAL matrix must be equally distributed",A->cmap->n);

1165:   a->emat->Resize(A->rmap->N,A->cmap->N);
1166:   El::Zero(*a->emat);

1168:   MPI_Comm_size(A->rmap->comm,&rsize);
1169:   MPI_Comm_size(A->cmap->comm,&csize);
1170:   if (csize != rsize) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Cannot use row and column communicators of different sizes");
1171:   a->commsize = rsize;
1172:   a->mr[0] = A->rmap->N % rsize; if (!a->mr[0]) a->mr[0] = rsize;
1173:   a->mr[1] = A->cmap->N % csize; if (!a->mr[1]) a->mr[1] = csize;
1174:   a->m[0]  = A->rmap->N / rsize + (a->mr[0] != rsize);
1175:   a->m[1]  = A->cmap->N / csize + (a->mr[1] != csize);
1176:   return(0);
1177: }

1179: PetscErrorCode MatAssemblyBegin_Elemental(Mat A, MatAssemblyType type)
1180: {
1181:   Mat_Elemental  *a = (Mat_Elemental*)A->data;

1184:   /* printf("Calling ProcessQueues\n"); */
1185:   a->emat->ProcessQueues();
1186:   /* printf("Finished ProcessQueues\n"); */
1187:   return(0);
1188: }

1190: PetscErrorCode MatAssemblyEnd_Elemental(Mat A, MatAssemblyType type)
1191: {
1193:   /* Currently does nothing */
1194:   return(0);
1195: }

1197: PetscErrorCode MatLoad_Elemental(Mat newMat, PetscViewer viewer)
1198: {
1200:   Mat            Adense,Ae;
1201:   MPI_Comm       comm;

1204:   PetscObjectGetComm((PetscObject)newMat,&comm);
1205:   MatCreate(comm,&Adense);
1206:   MatSetType(Adense,MATDENSE);
1207:   MatLoad(Adense,viewer);
1208:   MatConvert(Adense, MATELEMENTAL, MAT_INITIAL_MATRIX,&Ae);
1209:   MatDestroy(&Adense);
1210:   MatHeaderReplace(newMat,&Ae);
1211:   return(0);
1212: }

1214: /* -------------------------------------------------------------------*/
1215: static struct _MatOps MatOps_Values = {
1216:        MatSetValues_Elemental,
1217:        0,
1218:        0,
1219:        MatMult_Elemental,
1220: /* 4*/ MatMultAdd_Elemental,
1221:        MatMultTranspose_Elemental,
1222:        MatMultTransposeAdd_Elemental,
1223:        MatSolve_Elemental,
1224:        MatSolveAdd_Elemental,
1225:        0,
1226: /*10*/ 0,
1227:        MatLUFactor_Elemental,
1228:        MatCholeskyFactor_Elemental,
1229:        0,
1230:        MatTranspose_Elemental,
1231: /*15*/ MatGetInfo_Elemental,
1232:        0,
1233:        MatGetDiagonal_Elemental,
1234:        MatDiagonalScale_Elemental,
1235:        MatNorm_Elemental,
1236: /*20*/ MatAssemblyBegin_Elemental,
1237:        MatAssemblyEnd_Elemental,
1238:        MatSetOption_Elemental,
1239:        MatZeroEntries_Elemental,
1240: /*24*/ 0,
1241:        MatLUFactorSymbolic_Elemental,
1242:        MatLUFactorNumeric_Elemental,
1243:        MatCholeskyFactorSymbolic_Elemental,
1244:        MatCholeskyFactorNumeric_Elemental,
1245: /*29*/ MatSetUp_Elemental,
1246:        0,
1247:        0,
1248:        0,
1249:        0,
1250: /*34*/ MatDuplicate_Elemental,
1251:        0,
1252:        0,
1253:        0,
1254:        0,
1255: /*39*/ MatAXPY_Elemental,
1256:        0,
1257:        0,
1258:        0,
1259:        MatCopy_Elemental,
1260: /*44*/ 0,
1261:        MatScale_Elemental,
1262:        MatShift_Basic,
1263:        0,
1264:        0,
1265: /*49*/ 0,
1266:        0,
1267:        0,
1268:        0,
1269:        0,
1270: /*54*/ 0,
1271:        0,
1272:        0,
1273:        0,
1274:        0,
1275: /*59*/ 0,
1276:        MatDestroy_Elemental,
1277:        MatView_Elemental,
1278:        0,
1279:        0,
1280: /*64*/ 0,
1281:        0,
1282:        0,
1283:        0,
1284:        0,
1285: /*69*/ 0,
1286:        0,
1287:        MatConvert_Elemental_Dense,
1288:        0,
1289:        0,
1290: /*74*/ 0,
1291:        0,
1292:        0,
1293:        0,
1294:        0,
1295: /*79*/ 0,
1296:        0,
1297:        0,
1298:        0,
1299:        MatLoad_Elemental,
1300: /*84*/ 0,
1301:        0,
1302:        0,
1303:        0,
1304:        0,
1305: /*89*/ 0,
1306:        0,
1307:        MatMatMultNumeric_Elemental,
1308:        0,
1309:        0,
1310: /*94*/ 0,
1311:        0,
1312:        0,
1313:        MatMatTransposeMultNumeric_Elemental,
1314:        0,
1315: /*99*/ MatProductSetFromOptions_Elemental,
1316:        0,
1317:        0,
1318:        MatConjugate_Elemental,
1319:        0,
1320: /*104*/0,
1321:        0,
1322:        0,
1323:        0,
1324:        0,
1325: /*109*/MatMatSolve_Elemental,
1326:        0,
1327:        0,
1328:        0,
1329:        MatMissingDiagonal_Elemental,
1330: /*114*/0,
1331:        0,
1332:        0,
1333:        0,
1334:        0,
1335: /*119*/0,
1336:        MatHermitianTranspose_Elemental,
1337:        0,
1338:        0,
1339:        0,
1340: /*124*/0,
1341:        0,
1342:        0,
1343:        0,
1344:        0,
1345: /*129*/0,
1346:        0,
1347:        0,
1348:        0,
1349:        0,
1350: /*134*/0,
1351:        0,
1352:        0,
1353:        0,
1354:        0,
1355:        0,
1356: /*140*/0,
1357:        0,
1358:        0,
1359:        0,
1360:        0,
1361: /*145*/0,
1362:        0,
1363:        0
1364: };

1366: /*MC
1367:    MATELEMENTAL = "elemental" - A matrix type for dense matrices using the Elemental package

1369:   Use ./configure --download-elemental to install PETSc to use Elemental

1371:   Use -pc_type lu -pc_factor_mat_solver_type elemental to use this direct solver

1373:    Options Database Keys:
1374: + -mat_type elemental - sets the matrix type to "elemental" during a call to MatSetFromOptions()
1375: - -mat_elemental_grid_height - sets Grid Height for 2D cyclic ordering of internal matrix

1377:   Level: beginner

1379: .seealso: MATDENSE
1380: M*/

1382: PETSC_EXTERN PetscErrorCode MatCreate_Elemental(Mat A)
1383: {
1384:   Mat_Elemental      *a;
1385:   PetscErrorCode     ierr;
1386:   PetscBool          flg,flg1;
1387:   Mat_Elemental_Grid *commgrid;
1388:   MPI_Comm           icomm;
1389:   PetscInt           optv1;

1392:   PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));
1393:   A->insertmode = NOT_SET_VALUES;

1395:   PetscNewLog(A,&a);
1396:   A->data = (void*)a;

1398:   /* Set up the elemental matrix */
1399:   El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));

1401:   /* Grid needs to be shared between multiple Mats on the same communicator, implement by attribute caching on the MPI_Comm */
1402:   if (Petsc_Elemental_keyval == MPI_KEYVAL_INVALID) {
1403:     MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN,MPI_COMM_NULL_DELETE_FN,&Petsc_Elemental_keyval,(void*)0);
1404:     PetscCitationsRegister(ElementalCitation,&ElementalCite);
1405:   }
1406:   PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);
1407:   MPI_Comm_get_attr(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);
1408:   if (!flg) {
1409:     PetscNewLog(A,&commgrid);

1411:     PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"Elemental Options","Mat");
1412:     /* displayed default grid sizes (CommSize,1) are set by us arbitrarily until El::Grid() is called */
1413:     PetscOptionsInt("-mat_elemental_grid_height","Grid Height","None",El::mpi::Size(cxxcomm),&optv1,&flg1);
1414:     if (flg1) {
1415:       if (El::mpi::Size(cxxcomm) % optv1) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Grid Height %D must evenly divide CommSize %D",optv1,(PetscInt)El::mpi::Size(cxxcomm));
1416:       commgrid->grid = new El::Grid(cxxcomm,optv1); /* use user-provided grid height */
1417:     } else {
1418:       commgrid->grid = new El::Grid(cxxcomm); /* use Elemental default grid sizes */
1419:       /* printf("new commgrid->grid = %p\n",commgrid->grid);  -- memory leak revealed by valgrind? */
1420:     }
1421:     commgrid->grid_refct = 1;
1422:     MPI_Comm_set_attr(icomm,Petsc_Elemental_keyval,(void*)commgrid);

1424:     a->pivoting    = 1;
1425:     PetscOptionsInt("-mat_elemental_pivoting","Pivoting","None",a->pivoting,&a->pivoting,NULL);

1427:     PetscOptionsEnd();
1428:   } else {
1429:     commgrid->grid_refct++;
1430:   }
1431:   PetscCommDestroy(&icomm);
1432:   a->grid        = commgrid->grid;
1433:   a->emat        = new El::DistMatrix<PetscElemScalar>(*a->grid);
1434:   a->roworiented = PETSC_TRUE;

1436:   PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",MatGetOwnershipIS_Elemental);
1437:   PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_elemental_mpidense_C",MatProductSetFromOptions_Elemental_MPIDense);
1438:   PetscObjectChangeTypeName((PetscObject)A,MATELEMENTAL);
1439:   return(0);
1440: }