Actual source code: matelem.cxx

petsc-3.11.4 2019-09-28
Report Typos and Errors
  1:  #include <../src/mat/impls/elemental/matelemimpl.h>

  3: /*
  4:     The variable Petsc_Elemental_keyval is used to indicate an MPI attribute that
  5:   is attached to a communicator, in this case the attribute is a Mat_Elemental_Grid
  6: */
  7: static PetscMPIInt Petsc_Elemental_keyval = MPI_KEYVAL_INVALID;

  9: /*@C
 10:    PetscElementalInitializePackage - Initialize Elemental package

 12:    Logically Collective

 14:    Level: developer

 16: .seealso: MATELEMENTAL, PetscElementalFinalizePackage()
 17: @*/
 18: PetscErrorCode PetscElementalInitializePackage(void)
 19: {

 23:   if (El::Initialized()) return(0);
 24:   El::Initialize();   /* called by the 1st call of MatCreate_Elemental */
 25:   PetscRegisterFinalize(PetscElementalFinalizePackage);
 26:   return(0);
 27: }

 29: /*@C
 30:    PetscElementalFinalizePackage - Finalize Elemental package

 32:    Logically Collective

 34:    Level: developer

 36: .seealso: MATELEMENTAL, PetscElementalInitializePackage()
 37: @*/
 38: PetscErrorCode PetscElementalFinalizePackage(void)
 39: {
 41:   El::Finalize();  /* called by PetscFinalize() */
 42:   return(0);
 43: }

 45: static PetscErrorCode MatView_Elemental(Mat A,PetscViewer viewer)
 46: {
 48:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
 49:   PetscBool      iascii;

 52:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
 53:   if (iascii) {
 54:     PetscViewerFormat format;
 55:     PetscViewerGetFormat(viewer,&format);
 56:     if (format == PETSC_VIEWER_ASCII_INFO) {
 57:       /* call elemental viewing function */
 58:       PetscViewerASCIIPrintf(viewer,"Elemental run parameters:\n");
 59:       PetscViewerASCIIPrintf(viewer,"  allocated entries=%d\n",(*a->emat).AllocatedMemory());
 60:       PetscViewerASCIIPrintf(viewer,"  grid height=%d, grid width=%d\n",(*a->emat).Grid().Height(),(*a->emat).Grid().Width());
 61:       if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
 62:         /* call elemental viewing function */
 63:         PetscPrintf(PetscObjectComm((PetscObject)viewer),"test matview_elemental 2\n");
 64:       }

 66:     } else if (format == PETSC_VIEWER_DEFAULT) {
 67:       PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);
 68:       El::Print( *a->emat, "Elemental matrix (cyclic ordering)" );
 69:       PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);
 70:       if (A->factortype == MAT_FACTOR_NONE){
 71:         Mat Adense;
 72:         PetscPrintf(PetscObjectComm((PetscObject)viewer),"Elemental matrix (explicit ordering)\n");
 73:         MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);
 74:         MatView(Adense,viewer);
 75:         MatDestroy(&Adense);
 76:       }
 77:     } else SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Format");
 78:   } else {
 79:     /* convert to dense format and call MatView() */
 80:     Mat Adense;
 81:     PetscPrintf(PetscObjectComm((PetscObject)viewer),"Elemental matrix (explicit ordering)\n");
 82:     MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);
 83:     MatView(Adense,viewer);
 84:     MatDestroy(&Adense);
 85:   }
 86:   return(0);
 87: }

 89: static PetscErrorCode MatGetInfo_Elemental(Mat A,MatInfoType flag,MatInfo *info)
 90: {
 91:   Mat_Elemental  *a = (Mat_Elemental*)A->data;

 94:   info->block_size = 1.0;

 96:   if (flag == MAT_LOCAL) {
 97:     info->nz_allocated   = (double)(*a->emat).AllocatedMemory(); /* locally allocated */
 98:     info->nz_used        = info->nz_allocated;
 99:   } else if (flag == MAT_GLOBAL_MAX) {
100:     //MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));
101:     /* see MatGetInfo_MPIAIJ() for getting global info->nz_allocated! */
102:     //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_MAX not written yet");
103:   } else if (flag == MAT_GLOBAL_SUM) {
104:     //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_SUM not written yet");
105:     info->nz_allocated   = (double)(*a->emat).AllocatedMemory(); /* locally allocated */
106:     info->nz_used        = info->nz_allocated; /* assume Elemental does accurate allocation */
107:     //MPIU_Allreduce(isend,irecv,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));
108:     //PetscPrintf(PETSC_COMM_SELF,"    ... [%d] locally allocated %g\n",rank,info->nz_allocated);
109:   }

111:   info->nz_unneeded       = 0.0;
112:   info->assemblies        = (double)A->num_ass;
113:   info->mallocs           = 0;
114:   info->memory            = ((PetscObject)A)->mem;
115:   info->fill_ratio_given  = 0; /* determined by Elemental */
116:   info->fill_ratio_needed = 0;
117:   info->factor_mallocs    = 0;
118:   return(0);
119: }

121: PetscErrorCode MatSetOption_Elemental(Mat A,MatOption op,PetscBool flg)
122: {
123:   Mat_Elemental  *a = (Mat_Elemental*)A->data;

126:   switch (op) {
127:   case MAT_NEW_NONZERO_LOCATIONS:
128:   case MAT_NEW_NONZERO_LOCATION_ERR:
129:   case MAT_NEW_NONZERO_ALLOCATION_ERR:
130:   case MAT_SYMMETRIC:
131:     break;
132:   case MAT_ROW_ORIENTED:
133:     a->roworiented = flg;
134:     break;
135:   default:
136:     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"unknown option %s",MatOptions[op]);
137:   }
138:   return(0);
139: }

141: static PetscErrorCode MatSetValues_Elemental(Mat A,PetscInt nr,const PetscInt *rows,PetscInt nc,const PetscInt *cols,const PetscScalar *vals,InsertMode imode)
142: {
143:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
144:   PetscInt       i,j,rrank,ridx,crank,cidx,erow,ecol,numQueues=0;

147:   // TODO: Initialize matrix to all zeros?

149:   // Count the number of queues from this process
150:   if (a->roworiented) {
151:     for (i=0; i<nr; i++) {
152:       if (rows[i] < 0) continue;
153:       P2RO(A,0,rows[i],&rrank,&ridx);
154:       RO2E(A,0,rrank,ridx,&erow);
155:       if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
156:       for (j=0; j<nc; j++) {
157:         if (cols[j] < 0) continue;
158:         P2RO(A,1,cols[j],&crank,&cidx);
159:         RO2E(A,1,crank,cidx,&ecol);
160:         if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
161:         if (!a->emat->IsLocal(erow,ecol) ){ /* off-proc entry */
162:           /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
163:           if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
164:           ++numQueues;
165:           continue;
166:         }
167:         /* printf("Locally updating (%d,%d)\n",erow,ecol); */
168:         switch (imode) {
169:         case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
170:         case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
171:         default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
172:         }
173:       }
174:     }

176:     /* printf("numQueues=%d\n",numQueues); */
177:     a->emat->Reserve( numQueues );
178:     for (i=0; i<nr; i++) {
179:       if (rows[i] < 0) continue;
180:       P2RO(A,0,rows[i],&rrank,&ridx);
181:       RO2E(A,0,rrank,ridx,&erow);
182:       for (j=0; j<nc; j++) {
183:         if (cols[j] < 0) continue;
184:         P2RO(A,1,cols[j],&crank,&cidx);
185:         RO2E(A,1,crank,cidx,&ecol);
186:         if ( !a->emat->IsLocal(erow,ecol) ) { /*off-proc entry*/
187:           /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
188:           a->emat->QueueUpdate( erow, ecol, vals[i*nc+j] );
189:         }
190:       }
191:     }
192:   } else { /* columnoriented */
193:     for (j=0; j<nc; j++) {
194:       if (cols[j] < 0) continue;
195:       P2RO(A,1,cols[j],&crank,&cidx);
196:       RO2E(A,1,crank,cidx,&ecol);
197:       if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
198:       for (i=0; i<nr; i++) {
199:         if (rows[i] < 0) continue;
200:         P2RO(A,0,rows[i],&rrank,&ridx);
201:         RO2E(A,0,rrank,ridx,&erow);
202:         if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
203:         if (!a->emat->IsLocal(erow,ecol) ){ /* off-proc entry */
204:           /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
205:           if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
206:           ++numQueues;
207:           continue;
208:         }
209:         /* printf("Locally updating (%d,%d)\n",erow,ecol); */
210:         switch (imode) {
211:         case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
212:         case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
213:         default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
214:         }
215:       }
216:     }

218:     /* printf("numQueues=%d\n",numQueues); */
219:     a->emat->Reserve( numQueues );
220:     for (j=0; j<nc; j++) {
221:       if (cols[j] < 0) continue;
222:       P2RO(A,1,cols[j],&crank,&cidx);
223:       RO2E(A,1,crank,cidx,&ecol);

225:       for (i=0; i<nr; i++) {
226:         if (rows[i] < 0) continue;
227:         P2RO(A,0,rows[i],&rrank,&ridx);
228:         RO2E(A,0,rrank,ridx,&erow);
229:         if ( !a->emat->IsLocal(erow,ecol) ) { /*off-proc entry*/
230:           /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
231:           a->emat->QueueUpdate( erow, ecol, vals[i+j*nr] );
232:         }
233:       }
234:     }
235:   }
236:   return(0);
237: }

239: static PetscErrorCode MatMult_Elemental(Mat A,Vec X,Vec Y)
240: {
241:   Mat_Elemental         *a = (Mat_Elemental*)A->data;
242:   PetscErrorCode        ierr;
243:   const PetscElemScalar *x;
244:   PetscElemScalar       *y;
245:   PetscElemScalar       one = 1,zero = 0;

248:   VecGetArrayRead(X,(const PetscScalar **)&x);
249:   VecGetArray(Y,(PetscScalar **)&y);
250:   { /* Scoping so that constructor is called before pointer is returned */
251:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
252:     xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
253:     ye.Attach(A->rmap->N,1,*a->grid,0,0,y,A->rmap->n);
254:     El::Gemv(El::NORMAL,one,*a->emat,xe,zero,ye);
255:   }
256:   VecRestoreArrayRead(X,(const PetscScalar **)&x);
257:   VecRestoreArray(Y,(PetscScalar **)&y);
258:   return(0);
259: }

261: static PetscErrorCode MatMultTranspose_Elemental(Mat A,Vec X,Vec Y)
262: {
263:   Mat_Elemental         *a = (Mat_Elemental*)A->data;
264:   PetscErrorCode        ierr;
265:   const PetscElemScalar *x;
266:   PetscElemScalar       *y;
267:   PetscElemScalar       one = 1,zero = 0;

270:   VecGetArrayRead(X,(const PetscScalar **)&x);
271:   VecGetArray(Y,(PetscScalar **)&y);
272:   { /* Scoping so that constructor is called before pointer is returned */
273:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
274:     xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
275:     ye.Attach(A->cmap->N,1,*a->grid,0,0,y,A->cmap->n);
276:     El::Gemv(El::TRANSPOSE,one,*a->emat,xe,zero,ye);
277:   }
278:   VecRestoreArrayRead(X,(const PetscScalar **)&x);
279:   VecRestoreArray(Y,(PetscScalar **)&y);
280:   return(0);
281: }

283: static PetscErrorCode MatMultAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
284: {
285:   Mat_Elemental         *a = (Mat_Elemental*)A->data;
286:   PetscErrorCode        ierr;
287:   const PetscElemScalar *x;
288:   PetscElemScalar       *z;
289:   PetscElemScalar       one = 1;

292:   if (Y != Z) {VecCopy(Y,Z);}
293:   VecGetArrayRead(X,(const PetscScalar **)&x);
294:   VecGetArray(Z,(PetscScalar **)&z);
295:   { /* Scoping so that constructor is called before pointer is returned */
296:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
297:     xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
298:     ze.Attach(A->rmap->N,1,*a->grid,0,0,z,A->rmap->n);
299:     El::Gemv(El::NORMAL,one,*a->emat,xe,one,ze);
300:   }
301:   VecRestoreArrayRead(X,(const PetscScalar **)&x);
302:   VecRestoreArray(Z,(PetscScalar **)&z);
303:   return(0);
304: }

306: static PetscErrorCode MatMultTransposeAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
307: {
308:   Mat_Elemental         *a = (Mat_Elemental*)A->data;
309:   PetscErrorCode        ierr;
310:   const PetscElemScalar *x;
311:   PetscElemScalar       *z;
312:   PetscElemScalar       one = 1;

315:   if (Y != Z) {VecCopy(Y,Z);}
316:   VecGetArrayRead(X,(const PetscScalar **)&x);
317:   VecGetArray(Z,(PetscScalar **)&z);
318:   { /* Scoping so that constructor is called before pointer is returned */
319:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
320:     xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
321:     ze.Attach(A->cmap->N,1,*a->grid,0,0,z,A->cmap->n);
322:     El::Gemv(El::TRANSPOSE,one,*a->emat,xe,one,ze);
323:   }
324:   VecRestoreArrayRead(X,(const PetscScalar **)&x);
325:   VecRestoreArray(Z,(PetscScalar **)&z);
326:   return(0);
327: }

329: static PetscErrorCode MatMatMultNumeric_Elemental(Mat A,Mat B,Mat C)
330: {
331:   Mat_Elemental    *a = (Mat_Elemental*)A->data;
332:   Mat_Elemental    *b = (Mat_Elemental*)B->data;
333:   Mat_Elemental    *c = (Mat_Elemental*)C->data;
334:   PetscElemScalar  one = 1,zero = 0;

337:   { /* Scoping so that constructor is called before pointer is returned */
338:     El::Gemm(El::NORMAL,El::NORMAL,one,*a->emat,*b->emat,zero,*c->emat);
339:   }
340:   C->assembled = PETSC_TRUE;
341:   return(0);
342: }

344: static PetscErrorCode MatMatMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat *C)
345: {
347:   Mat            Ce;
348:   MPI_Comm       comm;

351:   PetscObjectGetComm((PetscObject)A,&comm);
352:   MatCreate(comm,&Ce);
353:   MatSetSizes(Ce,A->rmap->n,B->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
354:   MatSetType(Ce,MATELEMENTAL);
355:   MatSetUp(Ce);
356:   *C = Ce;
357:   return(0);
358: }

360: static PetscErrorCode MatMatMult_Elemental(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
361: {

365:   if (scall == MAT_INITIAL_MATRIX){
366:     PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);
367:     MatMatMultSymbolic_Elemental(A,B,1.0,C);
368:     PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);
369:   }
370:   PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);
371:   MatMatMultNumeric_Elemental(A,B,*C);
372:   PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);
373:   return(0);
374: }

376: static PetscErrorCode MatMatTransposeMultNumeric_Elemental(Mat A,Mat B,Mat C)
377: {
378:   Mat_Elemental      *a = (Mat_Elemental*)A->data;
379:   Mat_Elemental      *b = (Mat_Elemental*)B->data;
380:   Mat_Elemental      *c = (Mat_Elemental*)C->data;
381:   PetscElemScalar    one = 1,zero = 0;

384:   { /* Scoping so that constructor is called before pointer is returned */
385:     El::Gemm(El::NORMAL,El::TRANSPOSE,one,*a->emat,*b->emat,zero,*c->emat);
386:   }
387:   C->assembled = PETSC_TRUE;
388:   return(0);
389: }

391: static PetscErrorCode MatMatTransposeMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat *C)
392: {
394:   Mat            Ce;
395:   MPI_Comm       comm;

398:   PetscObjectGetComm((PetscObject)A,&comm);
399:   MatCreate(comm,&Ce);
400:   MatSetSizes(Ce,A->rmap->n,B->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
401:   MatSetType(Ce,MATELEMENTAL);
402:   MatSetUp(Ce);
403:   *C = Ce;
404:   return(0);
405: }

407: static PetscErrorCode MatMatTransposeMult_Elemental(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
408: {

412:   if (scall == MAT_INITIAL_MATRIX){
413:     PetscLogEventBegin(MAT_MatTransposeMultSymbolic,A,B,0,0);
414:     MatMatMultSymbolic_Elemental(A,B,1.0,C);
415:     PetscLogEventEnd(MAT_MatTransposeMultSymbolic,A,B,0,0);
416:   }
417:   PetscLogEventBegin(MAT_MatTransposeMultNumeric,A,B,0,0);
418:   MatMatTransposeMultNumeric_Elemental(A,B,*C);
419:   PetscLogEventEnd(MAT_MatTransposeMultNumeric,A,B,0,0);
420:   return(0);
421: }

423: static PetscErrorCode MatGetDiagonal_Elemental(Mat A,Vec D)
424: {
425:   PetscInt        i,nrows,ncols,nD,rrank,ridx,crank,cidx;
426:   Mat_Elemental   *a = (Mat_Elemental*)A->data;
427:   PetscErrorCode  ierr;
428:   PetscElemScalar v;
429:   MPI_Comm        comm;

432:   PetscObjectGetComm((PetscObject)A,&comm);
433:   MatGetSize(A,&nrows,&ncols);
434:   nD = nrows>ncols ? ncols : nrows;
435:   for (i=0; i<nD; i++) {
436:     PetscInt erow,ecol;
437:     P2RO(A,0,i,&rrank,&ridx);
438:     RO2E(A,0,rrank,ridx,&erow);
439:     if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
440:     P2RO(A,1,i,&crank,&cidx);
441:     RO2E(A,1,crank,cidx,&ecol);
442:     if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
443:     v = a->emat->Get(erow,ecol);
444:     VecSetValues(D,1,&i,(PetscScalar*)&v,INSERT_VALUES);
445:   }
446:   VecAssemblyBegin(D);
447:   VecAssemblyEnd(D);
448:   return(0);
449: }

451: static PetscErrorCode MatDiagonalScale_Elemental(Mat X,Vec L,Vec R)
452: {
453:   Mat_Elemental         *x = (Mat_Elemental*)X->data;
454:   const PetscElemScalar *d;
455:   PetscErrorCode        ierr;

458:   if (R) {
459:     VecGetArrayRead(R,(const PetscScalar **)&d);
460:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
461:     de.LockedAttach(X->cmap->N,1,*x->grid,0,0,d,X->cmap->n);
462:     El::DiagonalScale(El::RIGHT,El::NORMAL,de,*x->emat);
463:     VecRestoreArrayRead(R,(const PetscScalar **)&d);
464:   }
465:   if (L) {
466:     VecGetArrayRead(L,(const PetscScalar **)&d);
467:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
468:     de.LockedAttach(X->rmap->N,1,*x->grid,0,0,d,X->rmap->n);
469:     El::DiagonalScale(El::LEFT,El::NORMAL,de,*x->emat);
470:     VecRestoreArrayRead(L,(const PetscScalar **)&d);
471:   }
472:   return(0);
473: }

475: static PetscErrorCode MatMissingDiagonal_Elemental(Mat A,PetscBool *missing,PetscInt *d)
476: {
478:   *missing = PETSC_FALSE;
479:   return(0);
480: }

482: static PetscErrorCode MatScale_Elemental(Mat X,PetscScalar a)
483: {
484:   Mat_Elemental  *x = (Mat_Elemental*)X->data;

487:   El::Scale((PetscElemScalar)a,*x->emat);
488:   return(0);
489: }

491: /*
492:   MatAXPY - Computes Y = a*X + Y.
493: */
494: static PetscErrorCode MatAXPY_Elemental(Mat Y,PetscScalar a,Mat X,MatStructure str)
495: {
496:   Mat_Elemental  *x = (Mat_Elemental*)X->data;
497:   Mat_Elemental  *y = (Mat_Elemental*)Y->data;

501:   El::Axpy((PetscElemScalar)a,*x->emat,*y->emat);
502:   PetscObjectStateIncrease((PetscObject)Y);
503:   return(0);
504: }

506: static PetscErrorCode MatCopy_Elemental(Mat A,Mat B,MatStructure str)
507: {
508:   Mat_Elemental *a=(Mat_Elemental*)A->data;
509:   Mat_Elemental *b=(Mat_Elemental*)B->data;

513:   El::Copy(*a->emat,*b->emat);
514:   PetscObjectStateIncrease((PetscObject)B);
515:   return(0);
516: }

518: static PetscErrorCode MatDuplicate_Elemental(Mat A,MatDuplicateOption op,Mat *B)
519: {
520:   Mat            Be;
521:   MPI_Comm       comm;
522:   Mat_Elemental  *a=(Mat_Elemental*)A->data;

526:   PetscObjectGetComm((PetscObject)A,&comm);
527:   MatCreate(comm,&Be);
528:   MatSetSizes(Be,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
529:   MatSetType(Be,MATELEMENTAL);
530:   MatSetUp(Be);
531:   *B = Be;
532:   if (op == MAT_COPY_VALUES) {
533:     Mat_Elemental *b=(Mat_Elemental*)Be->data;
534:     El::Copy(*a->emat,*b->emat);
535:   }
536:   Be->assembled = PETSC_TRUE;
537:   return(0);
538: }

540: static PetscErrorCode MatTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
541: {
542:   Mat            Be = *B;
544:   MPI_Comm       comm;
545:   Mat_Elemental  *a = (Mat_Elemental*)A->data, *b;

548:   PetscObjectGetComm((PetscObject)A,&comm);
549:   /* Only out-of-place supported */
550:   if (reuse == MAT_INPLACE_MATRIX) SETERRQ(comm,PETSC_ERR_SUP,"Only out-of-place supported");
551:   if (reuse == MAT_INITIAL_MATRIX) {
552:     MatCreate(comm,&Be);
553:     MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
554:     MatSetType(Be,MATELEMENTAL);
555:     MatSetUp(Be);
556:     *B = Be;
557:   }
558:   b = (Mat_Elemental*)Be->data;
559:   El::Transpose(*a->emat,*b->emat);
560:   Be->assembled = PETSC_TRUE;
561:   return(0);
562: }

564: static PetscErrorCode MatConjugate_Elemental(Mat A)
565: {
566:   Mat_Elemental  *a = (Mat_Elemental*)A->data;

569:   El::Conjugate(*a->emat);
570:   return(0);
571: }

573: static PetscErrorCode MatHermitianTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
574: {
575:   Mat            Be = *B;
577:   MPI_Comm       comm;
578:   Mat_Elemental  *a = (Mat_Elemental*)A->data, *b;

581:   PetscObjectGetComm((PetscObject)A,&comm);
582:   /* Only out-of-place supported */
583:   if (reuse == MAT_INITIAL_MATRIX){
584:     MatCreate(comm,&Be);
585:     MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
586:     MatSetType(Be,MATELEMENTAL);
587:     MatSetUp(Be);
588:     *B = Be;
589:   }
590:   b = (Mat_Elemental*)Be->data;
591:   El::Adjoint(*a->emat,*b->emat);
592:   Be->assembled = PETSC_TRUE;
593:   return(0);
594: }

596: static PetscErrorCode MatSolve_Elemental(Mat A,Vec B,Vec X)
597: {
598:   Mat_Elemental     *a = (Mat_Elemental*)A->data;
599:   PetscErrorCode    ierr;
600:   PetscElemScalar   *x;
601:   PetscInt          pivoting = a->pivoting;

604:   VecCopy(B,X);
605:   VecGetArray(X,(PetscScalar **)&x);

607:   El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe;
608:   xe.Attach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
609:   El::DistMatrix<PetscElemScalar,El::MC,El::MR> xer(xe);
610:   switch (A->factortype) {
611:   case MAT_FACTOR_LU:
612:     if (pivoting == 0) {
613:       El::lu::SolveAfter(El::NORMAL,*a->emat,xer);
614:     } else if (pivoting == 1) {
615:       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,xer);
616:     } else { /* pivoting == 2 */
617:       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*a->Q,xer);
618:     }
619:     break;
620:   case MAT_FACTOR_CHOLESKY:
621:     El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,xer);
622:     break;
623:   default:
624:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
625:     break;
626:   }
627:   El::Copy(xer,xe);

629:   VecRestoreArray(X,(PetscScalar **)&x);
630:   return(0);
631: }

633: static PetscErrorCode MatSolveAdd_Elemental(Mat A,Vec B,Vec Y,Vec X)
634: {
635:   PetscErrorCode    ierr;

638:   MatSolve_Elemental(A,B,X);
639:   VecAXPY(X,1,Y);
640:   return(0);
641: }

643: static PetscErrorCode MatMatSolve_Elemental(Mat A,Mat B,Mat X)
644: {
645:   Mat_Elemental *a=(Mat_Elemental*)A->data;
646:   Mat_Elemental *b=(Mat_Elemental*)B->data;
647:   Mat_Elemental *x=(Mat_Elemental*)X->data;
648:   PetscInt      pivoting = a->pivoting;

651:   El::Copy(*b->emat,*x->emat);
652:   switch (A->factortype) {
653:   case MAT_FACTOR_LU:
654:     if (pivoting == 0) {
655:       El::lu::SolveAfter(El::NORMAL,*a->emat,*x->emat);
656:     } else if (pivoting == 1) {
657:       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*x->emat);
658:     } else {
659:       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*a->Q,*x->emat);
660:     }
661:     break;
662:   case MAT_FACTOR_CHOLESKY:
663:     El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,*x->emat);
664:     break;
665:   default:
666:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
667:     break;
668:   }
669:   return(0);
670: }

672: static PetscErrorCode MatLUFactor_Elemental(Mat A,IS row,IS col,const MatFactorInfo *info)
673: {
674:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
676:   PetscInt       pivoting = a->pivoting;

679:   if (pivoting == 0) {
680:     El::LU(*a->emat);
681:   } else if (pivoting == 1) {
682:     El::LU(*a->emat,*a->P);
683:   } else {
684:     El::LU(*a->emat,*a->P,*a->Q);
685:   }
686:   A->factortype = MAT_FACTOR_LU;
687:   A->assembled  = PETSC_TRUE;

689:   PetscFree(A->solvertype);
690:   PetscStrallocpy(MATSOLVERELEMENTAL,&A->solvertype);
691:   return(0);
692: }

694: static PetscErrorCode  MatLUFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
695: {

699:   MatCopy(A,F,SAME_NONZERO_PATTERN);
700:   MatLUFactor_Elemental(F,0,0,info);
701:   return(0);
702: }

704: static PetscErrorCode  MatLUFactorSymbolic_Elemental(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
705: {
707:   /* F is create and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
708:   return(0);
709: }

711: static PetscErrorCode MatCholeskyFactor_Elemental(Mat A,IS perm,const MatFactorInfo *info)
712: {
713:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
714:   El::DistMatrix<PetscElemScalar,El::MC,El::STAR> d;

718:   El::Cholesky(El::UPPER,*a->emat);
719:   A->factortype = MAT_FACTOR_CHOLESKY;
720:   A->assembled  = PETSC_TRUE;

722:   PetscFree(A->solvertype);
723:   PetscStrallocpy(MATSOLVERELEMENTAL,&A->solvertype);
724:   return(0);
725: }

727: static PetscErrorCode MatCholeskyFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
728: {

732:   MatCopy(A,F,SAME_NONZERO_PATTERN);
733:   MatCholeskyFactor_Elemental(F,0,info);
734:   return(0);
735: }

737: static PetscErrorCode MatCholeskyFactorSymbolic_Elemental(Mat F,Mat A,IS perm,const MatFactorInfo *info)
738: {
740:   /* F is create and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
741:   return(0);
742: }

744: PetscErrorCode MatFactorGetSolverType_elemental_elemental(Mat A,MatSolverType *type)
745: {
747:   *type = MATSOLVERELEMENTAL;
748:   return(0);
749: }

751: static PetscErrorCode MatGetFactor_elemental_elemental(Mat A,MatFactorType ftype,Mat *F)
752: {
753:   Mat            B;

757:   /* Create the factorization matrix */
758:   MatCreate(PetscObjectComm((PetscObject)A),&B);
759:   MatSetSizes(B,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
760:   MatSetType(B,MATELEMENTAL);
761:   MatSetUp(B);
762:   B->factortype = ftype;
763:   PetscFree(B->solvertype);
764:   PetscStrallocpy(MATSOLVERELEMENTAL,&B->solvertype);

766:   PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_elemental_elemental);
767:   *F            = B;
768:   return(0);
769: }

771: PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_Elemental(void)
772: {

776:   MatSolverTypeRegister(MATSOLVERELEMENTAL,MATELEMENTAL,        MAT_FACTOR_LU,MatGetFactor_elemental_elemental);
777:   MatSolverTypeRegister(MATSOLVERELEMENTAL,MATELEMENTAL,        MAT_FACTOR_CHOLESKY,MatGetFactor_elemental_elemental);
778:   return(0);
779: }

781: static PetscErrorCode MatNorm_Elemental(Mat A,NormType type,PetscReal *nrm)
782: {
783:   Mat_Elemental *a=(Mat_Elemental*)A->data;

786:   switch (type){
787:   case NORM_1:
788:     *nrm = El::OneNorm(*a->emat);
789:     break;
790:   case NORM_FROBENIUS:
791:     *nrm = El::FrobeniusNorm(*a->emat);
792:     break;
793:   case NORM_INFINITY:
794:     *nrm = El::InfinityNorm(*a->emat);
795:     break;
796:   default:
797:     printf("Error: unsupported norm type!\n");
798:   }
799:   return(0);
800: }

802: static PetscErrorCode MatZeroEntries_Elemental(Mat A)
803: {
804:   Mat_Elemental *a=(Mat_Elemental*)A->data;

807:   El::Zero(*a->emat);
808:   return(0);
809: }

811: static PetscErrorCode MatGetOwnershipIS_Elemental(Mat A,IS *rows,IS *cols)
812: {
813:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
815:   PetscInt       i,m,shift,stride,*idx;

818:   if (rows) {
819:     m = a->emat->LocalHeight();
820:     shift = a->emat->ColShift();
821:     stride = a->emat->ColStride();
822:     PetscMalloc1(m,&idx);
823:     for (i=0; i<m; i++) {
824:       PetscInt rank,offset;
825:       E2RO(A,0,shift+i*stride,&rank,&offset);
826:       RO2P(A,0,rank,offset,&idx[i]);
827:     }
828:     ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,rows);
829:   }
830:   if (cols) {
831:     m = a->emat->LocalWidth();
832:     shift = a->emat->RowShift();
833:     stride = a->emat->RowStride();
834:     PetscMalloc1(m,&idx);
835:     for (i=0; i<m; i++) {
836:       PetscInt rank,offset;
837:       E2RO(A,1,shift+i*stride,&rank,&offset);
838:       RO2P(A,1,rank,offset,&idx[i]);
839:     }
840:     ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,cols);
841:   }
842:   return(0);
843: }

845: static PetscErrorCode MatConvert_Elemental_Dense(Mat A,MatType newtype,MatReuse reuse,Mat *B)
846: {
847:   Mat                Bmpi;
848:   Mat_Elemental      *a = (Mat_Elemental*)A->data;
849:   MPI_Comm           comm;
850:   PetscErrorCode     ierr;
851:   IS                 isrows,iscols;
852:   PetscInt           rrank,ridx,crank,cidx,nrows,ncols,i,j,erow,ecol,elrow,elcol;
853:   const PetscInt     *rows,*cols;
854:   PetscElemScalar    v;
855:   const El::Grid     &grid = a->emat->Grid();

858:   PetscObjectGetComm((PetscObject)A,&comm);

860:   if (reuse == MAT_REUSE_MATRIX) {
861:     Bmpi = *B;
862:   } else {
863:     MatCreate(comm,&Bmpi);
864:     MatSetSizes(Bmpi,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
865:     MatSetType(Bmpi,MATDENSE);
866:     MatSetUp(Bmpi);
867:   }

869:   /* Get local entries of A */
870:   MatGetOwnershipIS(A,&isrows,&iscols);
871:   ISGetLocalSize(isrows,&nrows);
872:   ISGetIndices(isrows,&rows);
873:   ISGetLocalSize(iscols,&ncols);
874:   ISGetIndices(iscols,&cols);

876:   if (a->roworiented) {
877:     for (i=0; i<nrows; i++) {
878:       P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
879:       RO2E(A,0,rrank,ridx,&erow);
880:       if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
881:       for (j=0; j<ncols; j++) {
882:         P2RO(A,1,cols[j],&crank,&cidx);
883:         RO2E(A,1,crank,cidx,&ecol);
884:         if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");

886:         elrow = erow / grid.MCSize(); /* Elemental local row index */
887:         elcol = ecol / grid.MRSize(); /* Elemental local column index */
888:         v = a->emat->GetLocal(elrow,elcol);
889:         MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);
890:       }
891:     }
892:   } else { /* column-oriented */
893:     for (j=0; j<ncols; j++) {
894:       P2RO(A,1,cols[j],&crank,&cidx);
895:       RO2E(A,1,crank,cidx,&ecol);
896:       if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
897:       for (i=0; i<nrows; i++) {
898:         P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
899:         RO2E(A,0,rrank,ridx,&erow);
900:         if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");

902:         elrow = erow / grid.MCSize(); /* Elemental local row index */
903:         elcol = ecol / grid.MRSize(); /* Elemental local column index */
904:         v = a->emat->GetLocal(elrow,elcol);
905:         MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);
906:       }
907:     }
908:   }
909:   MatAssemblyBegin(Bmpi,MAT_FINAL_ASSEMBLY);
910:   MatAssemblyEnd(Bmpi,MAT_FINAL_ASSEMBLY);
911:   if (reuse == MAT_INPLACE_MATRIX) {
912:     MatHeaderReplace(A,&Bmpi);
913:   } else {
914:     *B = Bmpi;
915:   }
916:   ISDestroy(&isrows);
917:   ISDestroy(&iscols);
918:   return(0);
919: }

921: PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
922: {
923:   Mat               mat_elemental;
924:   PetscErrorCode    ierr;
925:   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols;
926:   const PetscInt    *cols;
927:   const PetscScalar *vals;

930:   if (reuse == MAT_REUSE_MATRIX) {
931:     mat_elemental = *newmat;
932:     MatZeroEntries(mat_elemental);
933:   } else {
934:     MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
935:     MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
936:     MatSetType(mat_elemental,MATELEMENTAL);
937:     MatSetUp(mat_elemental);
938:   }
939:   for (row=0; row<M; row++) {
940:     MatGetRow(A,row,&ncols,&cols,&vals);
941:     /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
942:     MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
943:     MatRestoreRow(A,row,&ncols,&cols,&vals);
944:   }
945:   MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
946:   MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);

948:   if (reuse == MAT_INPLACE_MATRIX) {
949:     MatHeaderReplace(A,&mat_elemental);
950:   } else {
951:     *newmat = mat_elemental;
952:   }
953:   return(0);
954: }

956: PETSC_INTERN PetscErrorCode MatConvert_MPIAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
957: {
958:   Mat               mat_elemental;
959:   PetscErrorCode    ierr;
960:   PetscInt          row,ncols,rstart=A->rmap->rstart,rend=A->rmap->rend,j;
961:   const PetscInt    *cols;
962:   const PetscScalar *vals;

965:   if (reuse == MAT_REUSE_MATRIX) {
966:     mat_elemental = *newmat;
967:     MatZeroEntries(mat_elemental);
968:   } else {
969:     MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
970:     MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,A->rmap->N,A->cmap->N);
971:     MatSetType(mat_elemental,MATELEMENTAL);
972:     MatSetUp(mat_elemental);
973:   }
974:   for (row=rstart; row<rend; row++) {
975:     MatGetRow(A,row,&ncols,&cols,&vals);
976:     for (j=0; j<ncols; j++) {
977:       /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
978:       MatSetValues(mat_elemental,1,&row,1,&cols[j],&vals[j],ADD_VALUES);
979:     }
980:     MatRestoreRow(A,row,&ncols,&cols,&vals);
981:   }
982:   MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
983:   MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);

985:   if (reuse == MAT_INPLACE_MATRIX) {
986:     MatHeaderReplace(A,&mat_elemental);
987:   } else {
988:     *newmat = mat_elemental;
989:   }
990:   return(0);
991: }

993: PETSC_INTERN PetscErrorCode MatConvert_SeqSBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
994: {
995:   Mat               mat_elemental;
996:   PetscErrorCode    ierr;
997:   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols,j;
998:   const PetscInt    *cols;
999:   const PetscScalar *vals;

1002:   if (reuse == MAT_REUSE_MATRIX) {
1003:     mat_elemental = *newmat;
1004:     MatZeroEntries(mat_elemental);
1005:   } else {
1006:     MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
1007:     MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
1008:     MatSetType(mat_elemental,MATELEMENTAL);
1009:     MatSetUp(mat_elemental);
1010:   }
1011:   MatGetRowUpperTriangular(A);
1012:   for (row=0; row<M; row++) {
1013:     MatGetRow(A,row,&ncols,&cols,&vals);
1014:     /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1015:     MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
1016:     for (j=0; j<ncols; j++) { /* lower triangular part */
1017:       if (cols[j] == row) continue;
1018:       MatSetValues(mat_elemental,1,&cols[j],1,&row,&vals[j],ADD_VALUES);
1019:     }
1020:     MatRestoreRow(A,row,&ncols,&cols,&vals);
1021:   }
1022:   MatRestoreRowUpperTriangular(A);
1023:   MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
1024:   MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);

1026:   if (reuse == MAT_INPLACE_MATRIX) {
1027:     MatHeaderReplace(A,&mat_elemental);
1028:   } else {
1029:     *newmat = mat_elemental;
1030:   }
1031:   return(0);
1032: }

1034: PETSC_INTERN PetscErrorCode MatConvert_MPISBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
1035: {
1036:   Mat               mat_elemental;
1037:   PetscErrorCode    ierr;
1038:   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols,j,rstart=A->rmap->rstart,rend=A->rmap->rend;
1039:   const PetscInt    *cols;
1040:   const PetscScalar *vals;

1043:   if (reuse == MAT_REUSE_MATRIX) {
1044:     mat_elemental = *newmat;
1045:     MatZeroEntries(mat_elemental);
1046:   } else {
1047:     MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
1048:     MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
1049:     MatSetType(mat_elemental,MATELEMENTAL);
1050:     MatSetUp(mat_elemental);
1051:   }
1052:   MatGetRowUpperTriangular(A);
1053:   for (row=rstart; row<rend; row++) {
1054:     MatGetRow(A,row,&ncols,&cols,&vals);
1055:     /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1056:     MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
1057:     for (j=0; j<ncols; j++) { /* lower triangular part */
1058:       if (cols[j] == row) continue;
1059:       MatSetValues(mat_elemental,1,&cols[j],1,&row,&vals[j],ADD_VALUES);
1060:     }
1061:     MatRestoreRow(A,row,&ncols,&cols,&vals);
1062:   }
1063:   MatRestoreRowUpperTriangular(A);
1064:   MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
1065:   MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);

1067:   if (reuse == MAT_INPLACE_MATRIX) {
1068:     MatHeaderReplace(A,&mat_elemental);
1069:   } else {
1070:     *newmat = mat_elemental;
1071:   }
1072:   return(0);
1073: }

1075: static PetscErrorCode MatDestroy_Elemental(Mat A)
1076: {
1077:   Mat_Elemental      *a = (Mat_Elemental*)A->data;
1078:   PetscErrorCode     ierr;
1079:   Mat_Elemental_Grid *commgrid;
1080:   PetscBool          flg;
1081:   MPI_Comm           icomm;

1084:   delete a->emat;
1085:   delete a->P;
1086:   delete a->Q;

1088:   El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
1089:   PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);
1090:   MPI_Comm_get_attr(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);
1091:   if (--commgrid->grid_refct == 0) {
1092:     delete commgrid->grid;
1093:     PetscFree(commgrid);
1094:     MPI_Comm_free_keyval(&Petsc_Elemental_keyval);
1095:   }
1096:   PetscCommDestroy(&icomm);
1097:   PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",NULL);
1098:   PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverType_C",NULL);
1099:   PetscFree(A->data);
1100:   return(0);
1101: }

1103: PetscErrorCode MatSetUp_Elemental(Mat A)
1104: {
1105:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
1107:   MPI_Comm       comm;
1108:   PetscMPIInt    rsize,csize;
1109:   PetscInt       n;

1112:   PetscLayoutSetUp(A->rmap);
1113:   PetscLayoutSetUp(A->cmap);

1115:   /* Check if local row and clomun sizes are equally distributed.
1116:      Jed: Elemental uses "element" cyclic ordering so the sizes need to match that
1117:      exactly.  The strategy in MatElemental is for PETSc to implicitly permute to block ordering (like would be returned by
1118:      PetscSplitOwnership(comm,&n,&N)), at which point Elemental matrices can act on PETSc vectors without redistributing the vectors. */
1119:   PetscObjectGetComm((PetscObject)A,&comm);
1120:   n = PETSC_DECIDE;
1121:   PetscSplitOwnership(comm,&n,&A->rmap->N);
1122:   if (n != A->rmap->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local row size %D of ELEMENTAL matrix must be equally distributed",A->rmap->n);

1124:   n = PETSC_DECIDE;
1125:   PetscSplitOwnership(comm,&n,&A->cmap->N);
1126:   if (n != A->cmap->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local column size %D of ELEMENTAL matrix must be equally distributed",A->cmap->n);

1128:   a->emat->Resize(A->rmap->N,A->cmap->N);
1129:   El::Zero(*a->emat);

1131:   MPI_Comm_size(A->rmap->comm,&rsize);
1132:   MPI_Comm_size(A->cmap->comm,&csize);
1133:   if (csize != rsize) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Cannot use row and column communicators of different sizes");
1134:   a->commsize = rsize;
1135:   a->mr[0] = A->rmap->N % rsize; if (!a->mr[0]) a->mr[0] = rsize;
1136:   a->mr[1] = A->cmap->N % csize; if (!a->mr[1]) a->mr[1] = csize;
1137:   a->m[0]  = A->rmap->N / rsize + (a->mr[0] != rsize);
1138:   a->m[1]  = A->cmap->N / csize + (a->mr[1] != csize);
1139:   return(0);
1140: }

1142: PetscErrorCode MatAssemblyBegin_Elemental(Mat A, MatAssemblyType type)
1143: {
1144:   Mat_Elemental  *a = (Mat_Elemental*)A->data;

1147:   /* printf("Calling ProcessQueues\n"); */
1148:   a->emat->ProcessQueues();
1149:   /* printf("Finished ProcessQueues\n"); */
1150:   return(0);
1151: }

1153: PetscErrorCode MatAssemblyEnd_Elemental(Mat A, MatAssemblyType type)
1154: {
1156:   /* Currently does nothing */
1157:   return(0);
1158: }

1160: PetscErrorCode MatLoad_Elemental(Mat newMat, PetscViewer viewer)
1161: {
1163:   Mat            Adense,Ae;
1164:   MPI_Comm       comm;

1167:   PetscObjectGetComm((PetscObject)newMat,&comm);
1168:   MatCreate(comm,&Adense);
1169:   MatSetType(Adense,MATDENSE);
1170:   MatLoad(Adense,viewer);
1171:   MatConvert(Adense, MATELEMENTAL, MAT_INITIAL_MATRIX,&Ae);
1172:   MatDestroy(&Adense);
1173:   MatHeaderReplace(newMat,&Ae);
1174:   return(0);
1175: }

1177: /* -------------------------------------------------------------------*/
1178: static struct _MatOps MatOps_Values = {
1179:        MatSetValues_Elemental,
1180:        0,
1181:        0,
1182:        MatMult_Elemental,
1183: /* 4*/ MatMultAdd_Elemental,
1184:        MatMultTranspose_Elemental,
1185:        MatMultTransposeAdd_Elemental,
1186:        MatSolve_Elemental,
1187:        MatSolveAdd_Elemental,
1188:        0,
1189: /*10*/ 0,
1190:        MatLUFactor_Elemental,
1191:        MatCholeskyFactor_Elemental,
1192:        0,
1193:        MatTranspose_Elemental,
1194: /*15*/ MatGetInfo_Elemental,
1195:        0,
1196:        MatGetDiagonal_Elemental,
1197:        MatDiagonalScale_Elemental,
1198:        MatNorm_Elemental,
1199: /*20*/ MatAssemblyBegin_Elemental,
1200:        MatAssemblyEnd_Elemental,
1201:        MatSetOption_Elemental,
1202:        MatZeroEntries_Elemental,
1203: /*24*/ 0,
1204:        MatLUFactorSymbolic_Elemental,
1205:        MatLUFactorNumeric_Elemental,
1206:        MatCholeskyFactorSymbolic_Elemental,
1207:        MatCholeskyFactorNumeric_Elemental,
1208: /*29*/ MatSetUp_Elemental,
1209:        0,
1210:        0,
1211:        0,
1212:        0,
1213: /*34*/ MatDuplicate_Elemental,
1214:        0,
1215:        0,
1216:        0,
1217:        0,
1218: /*39*/ MatAXPY_Elemental,
1219:        0,
1220:        0,
1221:        0,
1222:        MatCopy_Elemental,
1223: /*44*/ 0,
1224:        MatScale_Elemental,
1225:        MatShift_Basic,
1226:        0,
1227:        0,
1228: /*49*/ 0,
1229:        0,
1230:        0,
1231:        0,
1232:        0,
1233: /*54*/ 0,
1234:        0,
1235:        0,
1236:        0,
1237:        0,
1238: /*59*/ 0,
1239:        MatDestroy_Elemental,
1240:        MatView_Elemental,
1241:        0,
1242:        0,
1243: /*64*/ 0,
1244:        0,
1245:        0,
1246:        0,
1247:        0,
1248: /*69*/ 0,
1249:        0,
1250:        MatConvert_Elemental_Dense,
1251:        0,
1252:        0,
1253: /*74*/ 0,
1254:        0,
1255:        0,
1256:        0,
1257:        0,
1258: /*79*/ 0,
1259:        0,
1260:        0,
1261:        0,
1262:        MatLoad_Elemental,
1263: /*84*/ 0,
1264:        0,
1265:        0,
1266:        0,
1267:        0,
1268: /*89*/ MatMatMult_Elemental,
1269:        MatMatMultSymbolic_Elemental,
1270:        MatMatMultNumeric_Elemental,
1271:        0,
1272:        0,
1273: /*94*/ 0,
1274:        MatMatTransposeMult_Elemental,
1275:        MatMatTransposeMultSymbolic_Elemental,
1276:        MatMatTransposeMultNumeric_Elemental,
1277:        0,
1278: /*99*/ 0,
1279:        0,
1280:        0,
1281:        MatConjugate_Elemental,
1282:        0,
1283: /*104*/0,
1284:        0,
1285:        0,
1286:        0,
1287:        0,
1288: /*109*/MatMatSolve_Elemental,
1289:        0,
1290:        0,
1291:        0,
1292:        MatMissingDiagonal_Elemental,
1293: /*114*/0,
1294:        0,
1295:        0,
1296:        0,
1297:        0,
1298: /*119*/0,
1299:        MatHermitianTranspose_Elemental,
1300:        0,
1301:        0,
1302:        0,
1303: /*124*/0,
1304:        0,
1305:        0,
1306:        0,
1307:        0,
1308: /*129*/0,
1309:        0,
1310:        0,
1311:        0,
1312:        0,
1313: /*134*/0,
1314:        0,
1315:        0,
1316:        0,
1317:        0
1318: };

1320: /*MC
1321:    MATELEMENTAL = "elemental" - A matrix type for dense matrices using the Elemental package

1323:   Use ./configure --download-elemental to install PETSc to use Elemental

1325:   Use -pc_type lu -pc_factor_mat_solver_type elemental to use this direct solver

1327:    Options Database Keys:
1328: + -mat_type elemental - sets the matrix type to "elemental" during a call to MatSetFromOptions()
1329: - -mat_elemental_grid_height - sets Grid Height for 2D cyclic ordering of internal matrix

1331:   Level: beginner

1333: .seealso: MATDENSE
1334: M*/

1336: PETSC_EXTERN PetscErrorCode MatCreate_Elemental(Mat A)
1337: {
1338:   Mat_Elemental      *a;
1339:   PetscErrorCode     ierr;
1340:   PetscBool          flg,flg1;
1341:   Mat_Elemental_Grid *commgrid;
1342:   MPI_Comm           icomm;
1343:   PetscInt           optv1;

1346:   PetscElementalInitializePackage();
1347:   PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));
1348:   A->insertmode = NOT_SET_VALUES;

1350:   PetscNewLog(A,&a);
1351:   A->data = (void*)a;

1353:   /* Set up the elemental matrix */
1354:   El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));

1356:   /* Grid needs to be shared between multiple Mats on the same communicator, implement by attribute caching on the MPI_Comm */
1357:   if (Petsc_Elemental_keyval == MPI_KEYVAL_INVALID) {
1358:     MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN,MPI_COMM_NULL_DELETE_FN,&Petsc_Elemental_keyval,(void*)0);
1359:   }
1360:   PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);
1361:   MPI_Comm_get_attr(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);
1362:   if (!flg) {
1363:     PetscNewLog(A,&commgrid);

1365:     PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"Elemental Options","Mat");
1366:     /* displayed default grid sizes (CommSize,1) are set by us arbitrarily until El::Grid() is called */
1367:     PetscOptionsInt("-mat_elemental_grid_height","Grid Height","None",El::mpi::Size(cxxcomm),&optv1,&flg1);
1368:     if (flg1) {
1369:       if (El::mpi::Size(cxxcomm) % optv1 != 0) {
1370:         SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Grid Height %D must evenly divide CommSize %D",optv1,(PetscInt)El::mpi::Size(cxxcomm));
1371:       }
1372:       commgrid->grid = new El::Grid(cxxcomm,optv1); /* use user-provided grid height */
1373:     } else {
1374:       commgrid->grid = new El::Grid(cxxcomm); /* use Elemental default grid sizes */
1375:       /* printf("new commgrid->grid = %p\n",commgrid->grid);  -- memory leak revealed by valgrind? */
1376:     }
1377:     commgrid->grid_refct = 1;
1378:     MPI_Comm_set_attr(icomm,Petsc_Elemental_keyval,(void*)commgrid);

1380:     a->pivoting    = 1;
1381:     PetscOptionsInt("-mat_elemental_pivoting","Pivoting","None",a->pivoting,&a->pivoting,NULL);

1383:     PetscOptionsEnd();
1384:   } else {
1385:     commgrid->grid_refct++;
1386:   }
1387:   PetscCommDestroy(&icomm);
1388:   a->grid        = commgrid->grid;
1389:   a->emat        = new El::DistMatrix<PetscElemScalar>(*a->grid);
1390:   a->roworiented = PETSC_TRUE;

1392:   PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",MatGetOwnershipIS_Elemental);
1393:   PetscObjectChangeTypeName((PetscObject)A,MATELEMENTAL);
1394:   return(0);
1395: }