Actual source code: matelem.cxx

petsc-3.14.6 2021-03-30
Report Typos and Errors
  1: #include <petsc/private/petscelemental.h>

  3: /*
  4:     The variable Petsc_Elemental_keyval is used to indicate an MPI attribute that
  5:   is attached to a communicator, in this case the attribute is a Mat_Elemental_Grid
  6: */
  7: static PetscMPIInt Petsc_Elemental_keyval = MPI_KEYVAL_INVALID;

  9: static PetscErrorCode MatView_Elemental(Mat A,PetscViewer viewer)
 10: {
 12:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
 13:   PetscBool      iascii;

 16:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
 17:   if (iascii) {
 18:     PetscViewerFormat format;
 19:     PetscViewerGetFormat(viewer,&format);
 20:     if (format == PETSC_VIEWER_ASCII_INFO) {
 21:       /* call elemental viewing function */
 22:       PetscViewerASCIIPrintf(viewer,"Elemental run parameters:\n");
 23:       PetscViewerASCIIPrintf(viewer,"  allocated entries=%d\n",(*a->emat).AllocatedMemory());
 24:       PetscViewerASCIIPrintf(viewer,"  grid height=%d, grid width=%d\n",(*a->emat).Grid().Height(),(*a->emat).Grid().Width());
 25:       if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
 26:         /* call elemental viewing function */
 27:         PetscPrintf(PetscObjectComm((PetscObject)viewer),"test matview_elemental 2\n");
 28:       }

 30:     } else if (format == PETSC_VIEWER_DEFAULT) {
 31:       PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);
 32:       El::Print( *a->emat, "Elemental matrix (cyclic ordering)");
 33:       PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);
 34:       if (A->factortype == MAT_FACTOR_NONE){
 35:         Mat Adense;
 36:         MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);
 37:         MatView(Adense,viewer);
 38:         MatDestroy(&Adense);
 39:       }
 40:     } else SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Format");
 41:   } else {
 42:     /* convert to dense format and call MatView() */
 43:     Mat Adense;
 44:     MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);
 45:     MatView(Adense,viewer);
 46:     MatDestroy(&Adense);
 47:   }
 48:   return(0);
 49: }

 51: static PetscErrorCode MatGetInfo_Elemental(Mat A,MatInfoType flag,MatInfo *info)
 52: {
 53:   Mat_Elemental  *a = (Mat_Elemental*)A->data;

 56:   info->block_size = 1.0;

 58:   if (flag == MAT_LOCAL) {
 59:     info->nz_allocated   = (*a->emat).AllocatedMemory(); /* locally allocated */
 60:     info->nz_used        = info->nz_allocated;
 61:   } else if (flag == MAT_GLOBAL_MAX) {
 62:     //MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));
 63:     /* see MatGetInfo_MPIAIJ() for getting global info->nz_allocated! */
 64:     //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_MAX not written yet");
 65:   } else if (flag == MAT_GLOBAL_SUM) {
 66:     //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_SUM not written yet");
 67:     info->nz_allocated   = (*a->emat).AllocatedMemory(); /* locally allocated */
 68:     info->nz_used        = info->nz_allocated; /* assume Elemental does accurate allocation */
 69:     //MPIU_Allreduce(isend,irecv,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));
 70:     //PetscPrintf(PETSC_COMM_SELF,"    ... [%d] locally allocated %g\n",rank,info->nz_allocated);
 71:   }

 73:   info->nz_unneeded       = 0.0;
 74:   info->assemblies        = A->num_ass;
 75:   info->mallocs           = 0;
 76:   info->memory            = ((PetscObject)A)->mem;
 77:   info->fill_ratio_given  = 0; /* determined by Elemental */
 78:   info->fill_ratio_needed = 0;
 79:   info->factor_mallocs    = 0;
 80:   return(0);
 81: }

 83: PetscErrorCode MatSetOption_Elemental(Mat A,MatOption op,PetscBool flg)
 84: {
 85:   Mat_Elemental  *a = (Mat_Elemental*)A->data;

 88:   switch (op) {
 89:   case MAT_NEW_NONZERO_LOCATIONS:
 90:   case MAT_NEW_NONZERO_LOCATION_ERR:
 91:   case MAT_NEW_NONZERO_ALLOCATION_ERR:
 92:   case MAT_SYMMETRIC:
 93:   case MAT_SORTED_FULL:
 94:   case MAT_HERMITIAN:
 95:     break;
 96:   case MAT_ROW_ORIENTED:
 97:     a->roworiented = flg;
 98:     break;
 99:   default:
100:     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"unknown option %s",MatOptions[op]);
101:   }
102:   return(0);
103: }

105: static PetscErrorCode MatSetValues_Elemental(Mat A,PetscInt nr,const PetscInt *rows,PetscInt nc,const PetscInt *cols,const PetscScalar *vals,InsertMode imode)
106: {
107:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
108:   PetscInt       i,j,rrank,ridx,crank,cidx,erow,ecol,numQueues=0;

111:   // TODO: Initialize matrix to all zeros?

113:   // Count the number of queues from this process
114:   if (a->roworiented) {
115:     for (i=0; i<nr; i++) {
116:       if (rows[i] < 0) continue;
117:       P2RO(A,0,rows[i],&rrank,&ridx);
118:       RO2E(A,0,rrank,ridx,&erow);
119:       if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
120:       for (j=0; j<nc; j++) {
121:         if (cols[j] < 0) continue;
122:         P2RO(A,1,cols[j],&crank,&cidx);
123:         RO2E(A,1,crank,cidx,&ecol);
124:         if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
125:         if (!a->emat->IsLocal(erow,ecol)){ /* off-proc entry */
126:           /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
127:           if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
128:           ++numQueues;
129:           continue;
130:         }
131:         /* printf("Locally updating (%d,%d)\n",erow,ecol); */
132:         switch (imode) {
133:         case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
134:         case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
135:         default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
136:         }
137:       }
138:     }

140:     /* printf("numQueues=%d\n",numQueues); */
141:     a->emat->Reserve( numQueues);
142:     for (i=0; i<nr; i++) {
143:       if (rows[i] < 0) continue;
144:       P2RO(A,0,rows[i],&rrank,&ridx);
145:       RO2E(A,0,rrank,ridx,&erow);
146:       for (j=0; j<nc; j++) {
147:         if (cols[j] < 0) continue;
148:         P2RO(A,1,cols[j],&crank,&cidx);
149:         RO2E(A,1,crank,cidx,&ecol);
150:         if (!a->emat->IsLocal(erow,ecol)) { /*off-proc entry*/
151:           /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
152:           a->emat->QueueUpdate( erow, ecol, vals[i*nc+j]);
153:         }
154:       }
155:     }
156:   } else { /* columnoriented */
157:     for (j=0; j<nc; j++) {
158:       if (cols[j] < 0) continue;
159:       P2RO(A,1,cols[j],&crank,&cidx);
160:       RO2E(A,1,crank,cidx,&ecol);
161:       if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
162:       for (i=0; i<nr; i++) {
163:         if (rows[i] < 0) continue;
164:         P2RO(A,0,rows[i],&rrank,&ridx);
165:         RO2E(A,0,rrank,ridx,&erow);
166:         if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
167:         if (!a->emat->IsLocal(erow,ecol)){ /* off-proc entry */
168:           /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
169:           if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
170:           ++numQueues;
171:           continue;
172:         }
173:         /* printf("Locally updating (%d,%d)\n",erow,ecol); */
174:         switch (imode) {
175:         case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
176:         case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
177:         default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
178:         }
179:       }
180:     }

182:     /* printf("numQueues=%d\n",numQueues); */
183:     a->emat->Reserve( numQueues);
184:     for (j=0; j<nc; j++) {
185:       if (cols[j] < 0) continue;
186:       P2RO(A,1,cols[j],&crank,&cidx);
187:       RO2E(A,1,crank,cidx,&ecol);

189:       for (i=0; i<nr; i++) {
190:         if (rows[i] < 0) continue;
191:         P2RO(A,0,rows[i],&rrank,&ridx);
192:         RO2E(A,0,rrank,ridx,&erow);
193:         if (!a->emat->IsLocal(erow,ecol)) { /*off-proc entry*/
194:           /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
195:           a->emat->QueueUpdate( erow, ecol, vals[i+j*nr]);
196:         }
197:       }
198:     }
199:   }
200:   return(0);
201: }

203: static PetscErrorCode MatMult_Elemental(Mat A,Vec X,Vec Y)
204: {
205:   Mat_Elemental         *a = (Mat_Elemental*)A->data;
206:   PetscErrorCode        ierr;
207:   const PetscElemScalar *x;
208:   PetscElemScalar       *y;
209:   PetscElemScalar       one = 1,zero = 0;

212:   VecGetArrayRead(X,(const PetscScalar **)&x);
213:   VecGetArray(Y,(PetscScalar **)&y);
214:   { /* Scoping so that constructor is called before pointer is returned */
215:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
216:     xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
217:     ye.Attach(A->rmap->N,1,*a->grid,0,0,y,A->rmap->n);
218:     El::Gemv(El::NORMAL,one,*a->emat,xe,zero,ye);
219:   }
220:   VecRestoreArrayRead(X,(const PetscScalar **)&x);
221:   VecRestoreArray(Y,(PetscScalar **)&y);
222:   return(0);
223: }

225: static PetscErrorCode MatMultTranspose_Elemental(Mat A,Vec X,Vec Y)
226: {
227:   Mat_Elemental         *a = (Mat_Elemental*)A->data;
228:   PetscErrorCode        ierr;
229:   const PetscElemScalar *x;
230:   PetscElemScalar       *y;
231:   PetscElemScalar       one = 1,zero = 0;

234:   VecGetArrayRead(X,(const PetscScalar **)&x);
235:   VecGetArray(Y,(PetscScalar **)&y);
236:   { /* Scoping so that constructor is called before pointer is returned */
237:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
238:     xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
239:     ye.Attach(A->cmap->N,1,*a->grid,0,0,y,A->cmap->n);
240:     El::Gemv(El::TRANSPOSE,one,*a->emat,xe,zero,ye);
241:   }
242:   VecRestoreArrayRead(X,(const PetscScalar **)&x);
243:   VecRestoreArray(Y,(PetscScalar **)&y);
244:   return(0);
245: }

247: static PetscErrorCode MatMultAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
248: {
249:   Mat_Elemental         *a = (Mat_Elemental*)A->data;
250:   PetscErrorCode        ierr;
251:   const PetscElemScalar *x;
252:   PetscElemScalar       *z;
253:   PetscElemScalar       one = 1;

256:   if (Y != Z) {VecCopy(Y,Z);}
257:   VecGetArrayRead(X,(const PetscScalar **)&x);
258:   VecGetArray(Z,(PetscScalar **)&z);
259:   { /* Scoping so that constructor is called before pointer is returned */
260:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
261:     xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
262:     ze.Attach(A->rmap->N,1,*a->grid,0,0,z,A->rmap->n);
263:     El::Gemv(El::NORMAL,one,*a->emat,xe,one,ze);
264:   }
265:   VecRestoreArrayRead(X,(const PetscScalar **)&x);
266:   VecRestoreArray(Z,(PetscScalar **)&z);
267:   return(0);
268: }

270: static PetscErrorCode MatMultTransposeAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
271: {
272:   Mat_Elemental         *a = (Mat_Elemental*)A->data;
273:   PetscErrorCode        ierr;
274:   const PetscElemScalar *x;
275:   PetscElemScalar       *z;
276:   PetscElemScalar       one = 1;

279:   if (Y != Z) {VecCopy(Y,Z);}
280:   VecGetArrayRead(X,(const PetscScalar **)&x);
281:   VecGetArray(Z,(PetscScalar **)&z);
282:   { /* Scoping so that constructor is called before pointer is returned */
283:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
284:     xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
285:     ze.Attach(A->cmap->N,1,*a->grid,0,0,z,A->cmap->n);
286:     El::Gemv(El::TRANSPOSE,one,*a->emat,xe,one,ze);
287:   }
288:   VecRestoreArrayRead(X,(const PetscScalar **)&x);
289:   VecRestoreArray(Z,(PetscScalar **)&z);
290:   return(0);
291: }

293: PetscErrorCode MatMatMultNumeric_Elemental(Mat A,Mat B,Mat C)
294: {
295:   Mat_Elemental    *a = (Mat_Elemental*)A->data;
296:   Mat_Elemental    *b = (Mat_Elemental*)B->data;
297:   Mat_Elemental    *c = (Mat_Elemental*)C->data;
298:   PetscElemScalar  one = 1,zero = 0;

301:   { /* Scoping so that constructor is called before pointer is returned */
302:     El::Gemm(El::NORMAL,El::NORMAL,one,*a->emat,*b->emat,zero,*c->emat);
303:   }
304:   C->assembled = PETSC_TRUE;
305:   return(0);
306: }

308: PetscErrorCode MatMatMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat Ce)
309: {

313:   MatSetSizes(Ce,A->rmap->n,B->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
314:   MatSetType(Ce,MATELEMENTAL);
315:   MatSetUp(Ce);
316:   Ce->ops->matmultnumeric = MatMatMultNumeric_Elemental;
317:   return(0);
318: }

320: static PetscErrorCode MatMatTransposeMultNumeric_Elemental(Mat A,Mat B,Mat C)
321: {
322:   Mat_Elemental      *a = (Mat_Elemental*)A->data;
323:   Mat_Elemental      *b = (Mat_Elemental*)B->data;
324:   Mat_Elemental      *c = (Mat_Elemental*)C->data;
325:   PetscElemScalar    one = 1,zero = 0;

328:   { /* Scoping so that constructor is called before pointer is returned */
329:     El::Gemm(El::NORMAL,El::TRANSPOSE,one,*a->emat,*b->emat,zero,*c->emat);
330:   }
331:   C->assembled = PETSC_TRUE;
332:   return(0);
333: }

335: static PetscErrorCode MatMatTransposeMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat C)
336: {

340:   MatSetSizes(C,A->rmap->n,B->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
341:   MatSetType(C,MATELEMENTAL);
342:   MatSetUp(C);
343:   return(0);
344: }

346: /* --------------------------------------- */
347: static PetscErrorCode MatProductSetFromOptions_Elemental_AB(Mat C)
348: {
350:   C->ops->matmultsymbolic = MatMatMultSymbolic_Elemental;
351:   C->ops->productsymbolic = MatProductSymbolic_AB;
352:   return(0);
353: }

355: static PetscErrorCode MatProductSetFromOptions_Elemental_ABt(Mat C)
356: {
358:   C->ops->mattransposemultsymbolic = MatMatTransposeMultSymbolic_Elemental;
359:   C->ops->productsymbolic          = MatProductSymbolic_ABt;
360:   return(0);
361: }

363: PETSC_INTERN PetscErrorCode MatProductSetFromOptions_Elemental(Mat C)
364: {
366:   Mat_Product    *product = C->product;

369:   switch (product->type) {
370:   case MATPRODUCT_AB:
371:     MatProductSetFromOptions_Elemental_AB(C);
372:     break;
373:   case MATPRODUCT_ABt:
374:     MatProductSetFromOptions_Elemental_ABt(C);
375:     break;
376:   default:
377:     break;
378:   }
379:   return(0);
380: }

382: PetscErrorCode MatMatMultNumeric_Elemental_MPIDense(Mat A,Mat B,Mat C)
383: {
384:   Mat            Be,Ce;

388:   MatConvert(B,MATELEMENTAL,MAT_INITIAL_MATRIX,&Be);
389:   MatMatMult(A,Be,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&Ce);
390:   MatConvert(Ce,MATMPIDENSE,MAT_REUSE_MATRIX,&C);
391:   MatDestroy(&Be);
392:   MatDestroy(&Ce);
393:   return(0);
394: }

396: PetscErrorCode MatMatMultSymbolic_Elemental_MPIDense(Mat A,Mat B,PetscReal fill,Mat C)
397: {

401:   MatSetSizes(C,A->rmap->n,B->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
402:   MatSetType(C,MATMPIDENSE);
403:   MatSetUp(C);
404:   C->ops->matmultnumeric = MatMatMultNumeric_Elemental_MPIDense;
405:   return(0);
406: }

408: PetscErrorCode MatProductSetFromOptions_Elemental_MPIDense_AB(Mat C)
409: {
411:   C->ops->matmultsymbolic = MatMatMultSymbolic_Elemental_MPIDense;
412:   C->ops->productsymbolic = MatProductSymbolic_AB;
413:   return(0);
414: }

416: PetscErrorCode MatProductSetFromOptions_Elemental_MPIDense(Mat C)
417: {
419:   Mat_Product    *product = C->product;

422:   if (product->type == MATPRODUCT_AB) {
423:     MatProductSetFromOptions_Elemental_MPIDense_AB(C);
424:   }
425:   return(0);
426: }
427: /* --------------------------------------- */

429: static PetscErrorCode MatGetDiagonal_Elemental(Mat A,Vec D)
430: {
431:   PetscInt        i,nrows,ncols,nD,rrank,ridx,crank,cidx;
432:   Mat_Elemental   *a = (Mat_Elemental*)A->data;
433:   PetscErrorCode  ierr;
434:   PetscElemScalar v;
435:   MPI_Comm        comm;

438:   PetscObjectGetComm((PetscObject)A,&comm);
439:   MatGetSize(A,&nrows,&ncols);
440:   nD = nrows>ncols ? ncols : nrows;
441:   for (i=0; i<nD; i++) {
442:     PetscInt erow,ecol;
443:     P2RO(A,0,i,&rrank,&ridx);
444:     RO2E(A,0,rrank,ridx,&erow);
445:     if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
446:     P2RO(A,1,i,&crank,&cidx);
447:     RO2E(A,1,crank,cidx,&ecol);
448:     if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
449:     v = a->emat->Get(erow,ecol);
450:     VecSetValues(D,1,&i,(PetscScalar*)&v,INSERT_VALUES);
451:   }
452:   VecAssemblyBegin(D);
453:   VecAssemblyEnd(D);
454:   return(0);
455: }

457: static PetscErrorCode MatDiagonalScale_Elemental(Mat X,Vec L,Vec R)
458: {
459:   Mat_Elemental         *x = (Mat_Elemental*)X->data;
460:   const PetscElemScalar *d;
461:   PetscErrorCode        ierr;

464:   if (R) {
465:     VecGetArrayRead(R,(const PetscScalar **)&d);
466:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
467:     de.LockedAttach(X->cmap->N,1,*x->grid,0,0,d,X->cmap->n);
468:     El::DiagonalScale(El::RIGHT,El::NORMAL,de,*x->emat);
469:     VecRestoreArrayRead(R,(const PetscScalar **)&d);
470:   }
471:   if (L) {
472:     VecGetArrayRead(L,(const PetscScalar **)&d);
473:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
474:     de.LockedAttach(X->rmap->N,1,*x->grid,0,0,d,X->rmap->n);
475:     El::DiagonalScale(El::LEFT,El::NORMAL,de,*x->emat);
476:     VecRestoreArrayRead(L,(const PetscScalar **)&d);
477:   }
478:   return(0);
479: }

481: static PetscErrorCode MatMissingDiagonal_Elemental(Mat A,PetscBool *missing,PetscInt *d)
482: {
484:   *missing = PETSC_FALSE;
485:   return(0);
486: }

488: static PetscErrorCode MatScale_Elemental(Mat X,PetscScalar a)
489: {
490:   Mat_Elemental  *x = (Mat_Elemental*)X->data;

493:   El::Scale((PetscElemScalar)a,*x->emat);
494:   return(0);
495: }

497: /*
498:   MatAXPY - Computes Y = a*X + Y.
499: */
500: static PetscErrorCode MatAXPY_Elemental(Mat Y,PetscScalar a,Mat X,MatStructure str)
501: {
502:   Mat_Elemental  *x = (Mat_Elemental*)X->data;
503:   Mat_Elemental  *y = (Mat_Elemental*)Y->data;

507:   El::Axpy((PetscElemScalar)a,*x->emat,*y->emat);
508:   PetscObjectStateIncrease((PetscObject)Y);
509:   return(0);
510: }

512: static PetscErrorCode MatCopy_Elemental(Mat A,Mat B,MatStructure str)
513: {
514:   Mat_Elemental *a=(Mat_Elemental*)A->data;
515:   Mat_Elemental *b=(Mat_Elemental*)B->data;

519:   El::Copy(*a->emat,*b->emat);
520:   PetscObjectStateIncrease((PetscObject)B);
521:   return(0);
522: }

524: static PetscErrorCode MatDuplicate_Elemental(Mat A,MatDuplicateOption op,Mat *B)
525: {
526:   Mat            Be;
527:   MPI_Comm       comm;
528:   Mat_Elemental  *a=(Mat_Elemental*)A->data;

532:   PetscObjectGetComm((PetscObject)A,&comm);
533:   MatCreate(comm,&Be);
534:   MatSetSizes(Be,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
535:   MatSetType(Be,MATELEMENTAL);
536:   MatSetUp(Be);
537:   *B = Be;
538:   if (op == MAT_COPY_VALUES) {
539:     Mat_Elemental *b=(Mat_Elemental*)Be->data;
540:     El::Copy(*a->emat,*b->emat);
541:   }
542:   Be->assembled = PETSC_TRUE;
543:   return(0);
544: }

546: static PetscErrorCode MatTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
547: {
548:   Mat            Be = *B;
550:   MPI_Comm       comm;
551:   Mat_Elemental  *a = (Mat_Elemental*)A->data, *b;

554:   PetscObjectGetComm((PetscObject)A,&comm);
555:   /* Only out-of-place supported */
556:   if (reuse == MAT_INPLACE_MATRIX) SETERRQ(comm,PETSC_ERR_SUP,"Only out-of-place supported");
557:   if (reuse == MAT_INITIAL_MATRIX) {
558:     MatCreate(comm,&Be);
559:     MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
560:     MatSetType(Be,MATELEMENTAL);
561:     MatSetUp(Be);
562:     *B = Be;
563:   }
564:   b = (Mat_Elemental*)Be->data;
565:   El::Transpose(*a->emat,*b->emat);
566:   Be->assembled = PETSC_TRUE;
567:   return(0);
568: }

570: static PetscErrorCode MatConjugate_Elemental(Mat A)
571: {
572:   Mat_Elemental  *a = (Mat_Elemental*)A->data;

575:   El::Conjugate(*a->emat);
576:   return(0);
577: }

579: static PetscErrorCode MatHermitianTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
580: {
581:   Mat            Be = *B;
583:   MPI_Comm       comm;
584:   Mat_Elemental  *a = (Mat_Elemental*)A->data, *b;

587:   PetscObjectGetComm((PetscObject)A,&comm);
588:   /* Only out-of-place supported */
589:   if (reuse == MAT_INITIAL_MATRIX){
590:     MatCreate(comm,&Be);
591:     MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
592:     MatSetType(Be,MATELEMENTAL);
593:     MatSetUp(Be);
594:     *B = Be;
595:   }
596:   b = (Mat_Elemental*)Be->data;
597:   El::Adjoint(*a->emat,*b->emat);
598:   Be->assembled = PETSC_TRUE;
599:   return(0);
600: }

602: static PetscErrorCode MatSolve_Elemental(Mat A,Vec B,Vec X)
603: {
604:   Mat_Elemental     *a = (Mat_Elemental*)A->data;
605:   PetscErrorCode    ierr;
606:   PetscElemScalar   *x;
607:   PetscInt          pivoting = a->pivoting;

610:   VecCopy(B,X);
611:   VecGetArray(X,(PetscScalar **)&x);

613:   El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe;
614:   xe.Attach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
615:   El::DistMatrix<PetscElemScalar,El::MC,El::MR> xer(xe);
616:   switch (A->factortype) {
617:   case MAT_FACTOR_LU:
618:     if (pivoting == 0) {
619:       El::lu::SolveAfter(El::NORMAL,*a->emat,xer);
620:     } else if (pivoting == 1) {
621:       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,xer);
622:     } else { /* pivoting == 2 */
623:       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*a->Q,xer);
624:     }
625:     break;
626:   case MAT_FACTOR_CHOLESKY:
627:     El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,xer);
628:     break;
629:   default:
630:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
631:     break;
632:   }
633:   El::Copy(xer,xe);

635:   VecRestoreArray(X,(PetscScalar **)&x);
636:   return(0);
637: }

639: static PetscErrorCode MatSolveAdd_Elemental(Mat A,Vec B,Vec Y,Vec X)
640: {
641:   PetscErrorCode    ierr;

644:   MatSolve_Elemental(A,B,X);
645:   VecAXPY(X,1,Y);
646:   return(0);
647: }

649: static PetscErrorCode MatMatSolve_Elemental(Mat A,Mat B,Mat X)
650: {
651:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
652:   Mat_Elemental  *x;
653:   Mat            C;
654:   PetscInt       pivoting = a->pivoting;
655:   PetscBool      flg;
656:   MatType        type;

660:   MatGetType(X,&type);
661:   PetscStrcmp(type,MATELEMENTAL,&flg);
662:   if (!flg) {
663:     MatConvert(B,MATELEMENTAL,MAT_INITIAL_MATRIX,&C);
664:     x = (Mat_Elemental*)C->data;
665:   } else {
666:     x = (Mat_Elemental*)X->data;
667:     El::Copy(*((Mat_Elemental*)B->data)->emat,*x->emat);
668:   }
669:   switch (A->factortype) {
670:   case MAT_FACTOR_LU:
671:     if (pivoting == 0) {
672:       El::lu::SolveAfter(El::NORMAL,*a->emat,*x->emat);
673:     } else if (pivoting == 1) {
674:       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*x->emat);
675:     } else {
676:       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*a->Q,*x->emat);
677:     }
678:     break;
679:   case MAT_FACTOR_CHOLESKY:
680:     El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,*x->emat);
681:     break;
682:   default:
683:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
684:     break;
685:   }
686:   if (!flg) {
687:     MatConvert(C,type,MAT_REUSE_MATRIX,&X);
688:     MatDestroy(&C);
689:   }
690:   return(0);
691: }

693: static PetscErrorCode MatLUFactor_Elemental(Mat A,IS row,IS col,const MatFactorInfo *info)
694: {
695:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
697:   PetscInt       pivoting = a->pivoting;

700:   if (pivoting == 0) {
701:     El::LU(*a->emat);
702:   } else if (pivoting == 1) {
703:     El::LU(*a->emat,*a->P);
704:   } else {
705:     El::LU(*a->emat,*a->P,*a->Q);
706:   }
707:   A->factortype = MAT_FACTOR_LU;
708:   A->assembled  = PETSC_TRUE;

710:   PetscFree(A->solvertype);
711:   PetscStrallocpy(MATSOLVERELEMENTAL,&A->solvertype);
712:   return(0);
713: }

715: static PetscErrorCode  MatLUFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
716: {

720:   MatCopy(A,F,SAME_NONZERO_PATTERN);
721:   MatLUFactor_Elemental(F,0,0,info);
722:   return(0);
723: }

725: static PetscErrorCode  MatLUFactorSymbolic_Elemental(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
726: {
728:   /* F is created and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
729:   return(0);
730: }

732: static PetscErrorCode MatCholeskyFactor_Elemental(Mat A,IS perm,const MatFactorInfo *info)
733: {
734:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
735:   El::DistMatrix<PetscElemScalar,El::MC,El::STAR> d;

739:   El::Cholesky(El::UPPER,*a->emat);
740:   A->factortype = MAT_FACTOR_CHOLESKY;
741:   A->assembled  = PETSC_TRUE;

743:   PetscFree(A->solvertype);
744:   PetscStrallocpy(MATSOLVERELEMENTAL,&A->solvertype);
745:   return(0);
746: }

748: static PetscErrorCode MatCholeskyFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
749: {

753:   MatCopy(A,F,SAME_NONZERO_PATTERN);
754:   MatCholeskyFactor_Elemental(F,0,info);
755:   return(0);
756: }

758: static PetscErrorCode MatCholeskyFactorSymbolic_Elemental(Mat F,Mat A,IS perm,const MatFactorInfo *info)
759: {
761:   /* F is created and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
762:   return(0);
763: }

765: PetscErrorCode MatFactorGetSolverType_elemental_elemental(Mat A,MatSolverType *type)
766: {
768:   *type = MATSOLVERELEMENTAL;
769:   return(0);
770: }

772: static PetscErrorCode MatGetFactor_elemental_elemental(Mat A,MatFactorType ftype,Mat *F)
773: {
774:   Mat            B;

778:   /* Create the factorization matrix */
779:   MatCreate(PetscObjectComm((PetscObject)A),&B);
780:   MatSetSizes(B,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
781:   MatSetType(B,MATELEMENTAL);
782:   MatSetUp(B);
783:   B->factortype = ftype;
784:   PetscFree(B->solvertype);
785:   PetscStrallocpy(MATSOLVERELEMENTAL,&B->solvertype);

787:   PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_elemental_elemental);
788:   *F            = B;
789:   return(0);
790: }

792: PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_Elemental(void)
793: {

797:   MatSolverTypeRegister(MATSOLVERELEMENTAL,MATELEMENTAL,        MAT_FACTOR_LU,MatGetFactor_elemental_elemental);
798:   MatSolverTypeRegister(MATSOLVERELEMENTAL,MATELEMENTAL,        MAT_FACTOR_CHOLESKY,MatGetFactor_elemental_elemental);
799:   return(0);
800: }

802: static PetscErrorCode MatNorm_Elemental(Mat A,NormType type,PetscReal *nrm)
803: {
804:   Mat_Elemental *a=(Mat_Elemental*)A->data;

807:   switch (type){
808:   case NORM_1:
809:     *nrm = El::OneNorm(*a->emat);
810:     break;
811:   case NORM_FROBENIUS:
812:     *nrm = El::FrobeniusNorm(*a->emat);
813:     break;
814:   case NORM_INFINITY:
815:     *nrm = El::InfinityNorm(*a->emat);
816:     break;
817:   default:
818:     SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Unsupported norm type");
819:   }
820:   return(0);
821: }

823: static PetscErrorCode MatZeroEntries_Elemental(Mat A)
824: {
825:   Mat_Elemental *a=(Mat_Elemental*)A->data;

828:   El::Zero(*a->emat);
829:   return(0);
830: }

832: static PetscErrorCode MatGetOwnershipIS_Elemental(Mat A,IS *rows,IS *cols)
833: {
834:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
836:   PetscInt       i,m,shift,stride,*idx;

839:   if (rows) {
840:     m = a->emat->LocalHeight();
841:     shift = a->emat->ColShift();
842:     stride = a->emat->ColStride();
843:     PetscMalloc1(m,&idx);
844:     for (i=0; i<m; i++) {
845:       PetscInt rank,offset;
846:       E2RO(A,0,shift+i*stride,&rank,&offset);
847:       RO2P(A,0,rank,offset,&idx[i]);
848:     }
849:     ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,rows);
850:   }
851:   if (cols) {
852:     m = a->emat->LocalWidth();
853:     shift = a->emat->RowShift();
854:     stride = a->emat->RowStride();
855:     PetscMalloc1(m,&idx);
856:     for (i=0; i<m; i++) {
857:       PetscInt rank,offset;
858:       E2RO(A,1,shift+i*stride,&rank,&offset);
859:       RO2P(A,1,rank,offset,&idx[i]);
860:     }
861:     ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,cols);
862:   }
863:   return(0);
864: }

866: static PetscErrorCode MatConvert_Elemental_Dense(Mat A,MatType newtype,MatReuse reuse,Mat *B)
867: {
868:   Mat                Bmpi;
869:   Mat_Elemental      *a = (Mat_Elemental*)A->data;
870:   MPI_Comm           comm;
871:   PetscErrorCode     ierr;
872:   IS                 isrows,iscols;
873:   PetscInt           rrank,ridx,crank,cidx,nrows,ncols,i,j,erow,ecol,elrow,elcol;
874:   const PetscInt     *rows,*cols;
875:   PetscElemScalar    v;
876:   const El::Grid     &grid = a->emat->Grid();

879:   PetscObjectGetComm((PetscObject)A,&comm);

881:   if (reuse == MAT_REUSE_MATRIX) {
882:     Bmpi = *B;
883:   } else {
884:     MatCreate(comm,&Bmpi);
885:     MatSetSizes(Bmpi,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
886:     MatSetType(Bmpi,MATDENSE);
887:     MatSetUp(Bmpi);
888:   }

890:   /* Get local entries of A */
891:   MatGetOwnershipIS(A,&isrows,&iscols);
892:   ISGetLocalSize(isrows,&nrows);
893:   ISGetIndices(isrows,&rows);
894:   ISGetLocalSize(iscols,&ncols);
895:   ISGetIndices(iscols,&cols);

897:   if (a->roworiented) {
898:     for (i=0; i<nrows; i++) {
899:       P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
900:       RO2E(A,0,rrank,ridx,&erow);
901:       if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
902:       for (j=0; j<ncols; j++) {
903:         P2RO(A,1,cols[j],&crank,&cidx);
904:         RO2E(A,1,crank,cidx,&ecol);
905:         if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");

907:         elrow = erow / grid.MCSize(); /* Elemental local row index */
908:         elcol = ecol / grid.MRSize(); /* Elemental local column index */
909:         v = a->emat->GetLocal(elrow,elcol);
910:         MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);
911:       }
912:     }
913:   } else { /* column-oriented */
914:     for (j=0; j<ncols; j++) {
915:       P2RO(A,1,cols[j],&crank,&cidx);
916:       RO2E(A,1,crank,cidx,&ecol);
917:       if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
918:       for (i=0; i<nrows; i++) {
919:         P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
920:         RO2E(A,0,rrank,ridx,&erow);
921:         if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");

923:         elrow = erow / grid.MCSize(); /* Elemental local row index */
924:         elcol = ecol / grid.MRSize(); /* Elemental local column index */
925:         v = a->emat->GetLocal(elrow,elcol);
926:         MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);
927:       }
928:     }
929:   }
930:   MatAssemblyBegin(Bmpi,MAT_FINAL_ASSEMBLY);
931:   MatAssemblyEnd(Bmpi,MAT_FINAL_ASSEMBLY);
932:   if (reuse == MAT_INPLACE_MATRIX) {
933:     MatHeaderReplace(A,&Bmpi);
934:   } else {
935:     *B = Bmpi;
936:   }
937:   ISDestroy(&isrows);
938:   ISDestroy(&iscols);
939:   return(0);
940: }

942: PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
943: {
944:   Mat               mat_elemental;
945:   PetscErrorCode    ierr;
946:   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols;
947:   const PetscInt    *cols;
948:   const PetscScalar *vals;

951:   if (reuse == MAT_REUSE_MATRIX) {
952:     mat_elemental = *newmat;
953:     MatZeroEntries(mat_elemental);
954:   } else {
955:     MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
956:     MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
957:     MatSetType(mat_elemental,MATELEMENTAL);
958:     MatSetUp(mat_elemental);
959:   }
960:   for (row=0; row<M; row++) {
961:     MatGetRow(A,row,&ncols,&cols,&vals);
962:     /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
963:     MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
964:     MatRestoreRow(A,row,&ncols,&cols,&vals);
965:   }
966:   MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
967:   MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);

969:   if (reuse == MAT_INPLACE_MATRIX) {
970:     MatHeaderReplace(A,&mat_elemental);
971:   } else {
972:     *newmat = mat_elemental;
973:   }
974:   return(0);
975: }

977: PETSC_INTERN PetscErrorCode MatConvert_MPIAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
978: {
979:   Mat               mat_elemental;
980:   PetscErrorCode    ierr;
981:   PetscInt          row,ncols,rstart=A->rmap->rstart,rend=A->rmap->rend,j;
982:   const PetscInt    *cols;
983:   const PetscScalar *vals;

986:   if (reuse == MAT_REUSE_MATRIX) {
987:     mat_elemental = *newmat;
988:     MatZeroEntries(mat_elemental);
989:   } else {
990:     MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
991:     MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,A->rmap->N,A->cmap->N);
992:     MatSetType(mat_elemental,MATELEMENTAL);
993:     MatSetUp(mat_elemental);
994:   }
995:   for (row=rstart; row<rend; row++) {
996:     MatGetRow(A,row,&ncols,&cols,&vals);
997:     for (j=0; j<ncols; j++) {
998:       /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
999:       MatSetValues(mat_elemental,1,&row,1,&cols[j],&vals[j],ADD_VALUES);
1000:     }
1001:     MatRestoreRow(A,row,&ncols,&cols,&vals);
1002:   }
1003:   MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
1004:   MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);

1006:   if (reuse == MAT_INPLACE_MATRIX) {
1007:     MatHeaderReplace(A,&mat_elemental);
1008:   } else {
1009:     *newmat = mat_elemental;
1010:   }
1011:   return(0);
1012: }

1014: PETSC_INTERN PetscErrorCode MatConvert_SeqSBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
1015: {
1016:   Mat               mat_elemental;
1017:   PetscErrorCode    ierr;
1018:   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols,j;
1019:   const PetscInt    *cols;
1020:   const PetscScalar *vals;

1023:   if (reuse == MAT_REUSE_MATRIX) {
1024:     mat_elemental = *newmat;
1025:     MatZeroEntries(mat_elemental);
1026:   } else {
1027:     MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
1028:     MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
1029:     MatSetType(mat_elemental,MATELEMENTAL);
1030:     MatSetUp(mat_elemental);
1031:   }
1032:   MatGetRowUpperTriangular(A);
1033:   for (row=0; row<M; row++) {
1034:     MatGetRow(A,row,&ncols,&cols,&vals);
1035:     /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1036:     MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
1037:     for (j=0; j<ncols; j++) { /* lower triangular part */
1038:       PetscScalar v;
1039:       if (cols[j] == row) continue;
1040:       v    = A->hermitian ? PetscConj(vals[j]) : vals[j];
1041:       MatSetValues(mat_elemental,1,&cols[j],1,&row,&v,ADD_VALUES);
1042:     }
1043:     MatRestoreRow(A,row,&ncols,&cols,&vals);
1044:   }
1045:   MatRestoreRowUpperTriangular(A);
1046:   MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
1047:   MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);

1049:   if (reuse == MAT_INPLACE_MATRIX) {
1050:     MatHeaderReplace(A,&mat_elemental);
1051:   } else {
1052:     *newmat = mat_elemental;
1053:   }
1054:   return(0);
1055: }

1057: PETSC_INTERN PetscErrorCode MatConvert_MPISBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
1058: {
1059:   Mat               mat_elemental;
1060:   PetscErrorCode    ierr;
1061:   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols,j,rstart=A->rmap->rstart,rend=A->rmap->rend;
1062:   const PetscInt    *cols;
1063:   const PetscScalar *vals;

1066:   if (reuse == MAT_REUSE_MATRIX) {
1067:     mat_elemental = *newmat;
1068:     MatZeroEntries(mat_elemental);
1069:   } else {
1070:     MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
1071:     MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
1072:     MatSetType(mat_elemental,MATELEMENTAL);
1073:     MatSetUp(mat_elemental);
1074:   }
1075:   MatGetRowUpperTriangular(A);
1076:   for (row=rstart; row<rend; row++) {
1077:     MatGetRow(A,row,&ncols,&cols,&vals);
1078:     /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1079:     MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
1080:     for (j=0; j<ncols; j++) { /* lower triangular part */
1081:       PetscScalar v;
1082:       if (cols[j] == row) continue;
1083:       v    = A->hermitian ? PetscConj(vals[j]) : vals[j];
1084:       MatSetValues(mat_elemental,1,&cols[j],1,&row,&v,ADD_VALUES);
1085:     }
1086:     MatRestoreRow(A,row,&ncols,&cols,&vals);
1087:   }
1088:   MatRestoreRowUpperTriangular(A);
1089:   MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
1090:   MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);

1092:   if (reuse == MAT_INPLACE_MATRIX) {
1093:     MatHeaderReplace(A,&mat_elemental);
1094:   } else {
1095:     *newmat = mat_elemental;
1096:   }
1097:   return(0);
1098: }

1100: static PetscErrorCode MatDestroy_Elemental(Mat A)
1101: {
1102:   Mat_Elemental      *a = (Mat_Elemental*)A->data;
1103:   PetscErrorCode     ierr;
1104:   Mat_Elemental_Grid *commgrid;
1105:   PetscBool          flg;
1106:   MPI_Comm           icomm;

1109:   delete a->emat;
1110:   delete a->P;
1111:   delete a->Q;

1113:   El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
1114:   PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);
1115:   MPI_Comm_get_attr(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);
1116:   if (--commgrid->grid_refct == 0) {
1117:     delete commgrid->grid;
1118:     PetscFree(commgrid);
1119:     MPI_Comm_free_keyval(&Petsc_Elemental_keyval);
1120:   }
1121:   PetscCommDestroy(&icomm);
1122:   PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",NULL);
1123:   PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverType_C",NULL);
1124:   PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_elemental_mpidense_C",NULL);
1125:   PetscFree(A->data);
1126:   return(0);
1127: }

1129: PetscErrorCode MatSetUp_Elemental(Mat A)
1130: {
1131:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
1133:   MPI_Comm       comm;
1134:   PetscMPIInt    rsize,csize;
1135:   PetscInt       n;

1138:   PetscLayoutSetUp(A->rmap);
1139:   PetscLayoutSetUp(A->cmap);

1141:   /* Check if local row and column sizes are equally distributed.
1142:      Jed: Elemental uses "element" cyclic ordering so the sizes need to match that
1143:      exactly.  The strategy in MatElemental is for PETSc to implicitly permute to block ordering (like would be returned by
1144:      PetscSplitOwnership(comm,&n,&N), at which point Elemental matrices can act on PETSc vectors without redistributing the vectors. */
1145:   PetscObjectGetComm((PetscObject)A,&comm);
1146:   n = PETSC_DECIDE;
1147:   PetscSplitOwnership(comm,&n,&A->rmap->N);
1148:   if (n != A->rmap->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local row size %D of ELEMENTAL matrix must be equally distributed",A->rmap->n);

1150:   n = PETSC_DECIDE;
1151:   PetscSplitOwnership(comm,&n,&A->cmap->N);
1152:   if (n != A->cmap->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local column size %D of ELEMENTAL matrix must be equally distributed",A->cmap->n);

1154:   a->emat->Resize(A->rmap->N,A->cmap->N);
1155:   El::Zero(*a->emat);

1157:   MPI_Comm_size(A->rmap->comm,&rsize);
1158:   MPI_Comm_size(A->cmap->comm,&csize);
1159:   if (csize != rsize) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Cannot use row and column communicators of different sizes");
1160:   a->commsize = rsize;
1161:   a->mr[0] = A->rmap->N % rsize; if (!a->mr[0]) a->mr[0] = rsize;
1162:   a->mr[1] = A->cmap->N % csize; if (!a->mr[1]) a->mr[1] = csize;
1163:   a->m[0]  = A->rmap->N / rsize + (a->mr[0] != rsize);
1164:   a->m[1]  = A->cmap->N / csize + (a->mr[1] != csize);
1165:   return(0);
1166: }

1168: PetscErrorCode MatAssemblyBegin_Elemental(Mat A, MatAssemblyType type)
1169: {
1170:   Mat_Elemental  *a = (Mat_Elemental*)A->data;

1173:   /* printf("Calling ProcessQueues\n"); */
1174:   a->emat->ProcessQueues();
1175:   /* printf("Finished ProcessQueues\n"); */
1176:   return(0);
1177: }

1179: PetscErrorCode MatAssemblyEnd_Elemental(Mat A, MatAssemblyType type)
1180: {
1182:   /* Currently does nothing */
1183:   return(0);
1184: }

1186: PetscErrorCode MatLoad_Elemental(Mat newMat, PetscViewer viewer)
1187: {
1189:   Mat            Adense,Ae;
1190:   MPI_Comm       comm;

1193:   PetscObjectGetComm((PetscObject)newMat,&comm);
1194:   MatCreate(comm,&Adense);
1195:   MatSetType(Adense,MATDENSE);
1196:   MatLoad(Adense,viewer);
1197:   MatConvert(Adense, MATELEMENTAL, MAT_INITIAL_MATRIX,&Ae);
1198:   MatDestroy(&Adense);
1199:   MatHeaderReplace(newMat,&Ae);
1200:   return(0);
1201: }

1203: /* -------------------------------------------------------------------*/
1204: static struct _MatOps MatOps_Values = {
1205:        MatSetValues_Elemental,
1206:        0,
1207:        0,
1208:        MatMult_Elemental,
1209: /* 4*/ MatMultAdd_Elemental,
1210:        MatMultTranspose_Elemental,
1211:        MatMultTransposeAdd_Elemental,
1212:        MatSolve_Elemental,
1213:        MatSolveAdd_Elemental,
1214:        0,
1215: /*10*/ 0,
1216:        MatLUFactor_Elemental,
1217:        MatCholeskyFactor_Elemental,
1218:        0,
1219:        MatTranspose_Elemental,
1220: /*15*/ MatGetInfo_Elemental,
1221:        0,
1222:        MatGetDiagonal_Elemental,
1223:        MatDiagonalScale_Elemental,
1224:        MatNorm_Elemental,
1225: /*20*/ MatAssemblyBegin_Elemental,
1226:        MatAssemblyEnd_Elemental,
1227:        MatSetOption_Elemental,
1228:        MatZeroEntries_Elemental,
1229: /*24*/ 0,
1230:        MatLUFactorSymbolic_Elemental,
1231:        MatLUFactorNumeric_Elemental,
1232:        MatCholeskyFactorSymbolic_Elemental,
1233:        MatCholeskyFactorNumeric_Elemental,
1234: /*29*/ MatSetUp_Elemental,
1235:        0,
1236:        0,
1237:        0,
1238:        0,
1239: /*34*/ MatDuplicate_Elemental,
1240:        0,
1241:        0,
1242:        0,
1243:        0,
1244: /*39*/ MatAXPY_Elemental,
1245:        0,
1246:        0,
1247:        0,
1248:        MatCopy_Elemental,
1249: /*44*/ 0,
1250:        MatScale_Elemental,
1251:        MatShift_Basic,
1252:        0,
1253:        0,
1254: /*49*/ 0,
1255:        0,
1256:        0,
1257:        0,
1258:        0,
1259: /*54*/ 0,
1260:        0,
1261:        0,
1262:        0,
1263:        0,
1264: /*59*/ 0,
1265:        MatDestroy_Elemental,
1266:        MatView_Elemental,
1267:        0,
1268:        0,
1269: /*64*/ 0,
1270:        0,
1271:        0,
1272:        0,
1273:        0,
1274: /*69*/ 0,
1275:        0,
1276:        MatConvert_Elemental_Dense,
1277:        0,
1278:        0,
1279: /*74*/ 0,
1280:        0,
1281:        0,
1282:        0,
1283:        0,
1284: /*79*/ 0,
1285:        0,
1286:        0,
1287:        0,
1288:        MatLoad_Elemental,
1289: /*84*/ 0,
1290:        0,
1291:        0,
1292:        0,
1293:        0,
1294: /*89*/ 0,
1295:        0,
1296:        MatMatMultNumeric_Elemental,
1297:        0,
1298:        0,
1299: /*94*/ 0,
1300:        0,
1301:        0,
1302:        MatMatTransposeMultNumeric_Elemental,
1303:        0,
1304: /*99*/ MatProductSetFromOptions_Elemental,
1305:        0,
1306:        0,
1307:        MatConjugate_Elemental,
1308:        0,
1309: /*104*/0,
1310:        0,
1311:        0,
1312:        0,
1313:        0,
1314: /*109*/MatMatSolve_Elemental,
1315:        0,
1316:        0,
1317:        0,
1318:        MatMissingDiagonal_Elemental,
1319: /*114*/0,
1320:        0,
1321:        0,
1322:        0,
1323:        0,
1324: /*119*/0,
1325:        MatHermitianTranspose_Elemental,
1326:        0,
1327:        0,
1328:        0,
1329: /*124*/0,
1330:        0,
1331:        0,
1332:        0,
1333:        0,
1334: /*129*/0,
1335:        0,
1336:        0,
1337:        0,
1338:        0,
1339: /*134*/0,
1340:        0,
1341:        0,
1342:        0,
1343:        0,
1344:        0,
1345: /*140*/0,
1346:        0,
1347:        0,
1348:        0,
1349:        0,
1350: /*145*/0,
1351:        0,
1352:        0
1353: };

1355: /*MC
1356:    MATELEMENTAL = "elemental" - A matrix type for dense matrices using the Elemental package

1358:   Use ./configure --download-elemental to install PETSc to use Elemental

1360:   Use -pc_type lu -pc_factor_mat_solver_type elemental to use this direct solver

1362:    Options Database Keys:
1363: + -mat_type elemental - sets the matrix type to "elemental" during a call to MatSetFromOptions()
1364: - -mat_elemental_grid_height - sets Grid Height for 2D cyclic ordering of internal matrix

1366:   Level: beginner

1368: .seealso: MATDENSE
1369: M*/

1371: PETSC_EXTERN PetscErrorCode MatCreate_Elemental(Mat A)
1372: {
1373:   Mat_Elemental      *a;
1374:   PetscErrorCode     ierr;
1375:   PetscBool          flg,flg1;
1376:   Mat_Elemental_Grid *commgrid;
1377:   MPI_Comm           icomm;
1378:   PetscInt           optv1;

1381:   PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));
1382:   A->insertmode = NOT_SET_VALUES;

1384:   PetscNewLog(A,&a);
1385:   A->data = (void*)a;

1387:   /* Set up the elemental matrix */
1388:   El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));

1390:   /* Grid needs to be shared between multiple Mats on the same communicator, implement by attribute caching on the MPI_Comm */
1391:   if (Petsc_Elemental_keyval == MPI_KEYVAL_INVALID) {
1392:     MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN,MPI_COMM_NULL_DELETE_FN,&Petsc_Elemental_keyval,(void*)0);
1393:   }
1394:   PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);
1395:   MPI_Comm_get_attr(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);
1396:   if (!flg) {
1397:     PetscNewLog(A,&commgrid);

1399:     PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"Elemental Options","Mat");
1400:     /* displayed default grid sizes (CommSize,1) are set by us arbitrarily until El::Grid() is called */
1401:     PetscOptionsInt("-mat_elemental_grid_height","Grid Height","None",El::mpi::Size(cxxcomm),&optv1,&flg1);
1402:     if (flg1) {
1403:       if (El::mpi::Size(cxxcomm) % optv1) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Grid Height %D must evenly divide CommSize %D",optv1,(PetscInt)El::mpi::Size(cxxcomm));
1404:       commgrid->grid = new El::Grid(cxxcomm,optv1); /* use user-provided grid height */
1405:     } else {
1406:       commgrid->grid = new El::Grid(cxxcomm); /* use Elemental default grid sizes */
1407:       /* printf("new commgrid->grid = %p\n",commgrid->grid);  -- memory leak revealed by valgrind? */
1408:     }
1409:     commgrid->grid_refct = 1;
1410:     MPI_Comm_set_attr(icomm,Petsc_Elemental_keyval,(void*)commgrid);

1412:     a->pivoting    = 1;
1413:     PetscOptionsInt("-mat_elemental_pivoting","Pivoting","None",a->pivoting,&a->pivoting,NULL);

1415:     PetscOptionsEnd();
1416:   } else {
1417:     commgrid->grid_refct++;
1418:   }
1419:   PetscCommDestroy(&icomm);
1420:   a->grid        = commgrid->grid;
1421:   a->emat        = new El::DistMatrix<PetscElemScalar>(*a->grid);
1422:   a->roworiented = PETSC_TRUE;

1424:   PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",MatGetOwnershipIS_Elemental);
1425:   PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_elemental_mpidense_C",MatProductSetFromOptions_Elemental_MPIDense);
1426:   PetscObjectChangeTypeName((PetscObject)A,MATELEMENTAL);
1427:   return(0);
1428: }