Actual source code: mhyp.c

petsc-3.7.3 2016-08-01
Report Typos and Errors
  2: /*
  3:     Creates hypre ijmatrix from PETSc matrix
  4: */
  5: #include <petscsys.h>
  6: #include <petsc/private/matimpl.h>
  7: #include <petscdmda.h>                /*I "petscdmda.h" I*/
  8: #include <../src/dm/impls/da/hypre/mhyp.h>

 12: PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o,HYPRE_IJMatrix ij)
 13: {
 15:   PetscInt       i,n_d,n_o;
 16:   const PetscInt *ia_d,*ia_o;
 17:   PetscBool      done_d=PETSC_FALSE,done_o=PETSC_FALSE;
 18:   PetscInt       *nnz_d=NULL,*nnz_o=NULL;

 21:   if (A_d) { /* determine number of nonzero entries in local diagonal part */
 22:     MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);
 23:     if (done_d) {
 24:       PetscMalloc1(n_d,&nnz_d);
 25:       for (i=0; i<n_d; i++) {
 26:         nnz_d[i] = ia_d[i+1] - ia_d[i];
 27:       }
 28:     }
 29:     MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);
 30:   }
 31:   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
 32:     MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);
 33:     if (done_o) {
 34:       PetscMalloc1(n_o,&nnz_o);
 35:       for (i=0; i<n_o; i++) {
 36:         nnz_o[i] = ia_o[i+1] - ia_o[i];
 37:       }
 38:     }
 39:     MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);
 40:   }
 41:   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
 42:     if (!done_o) { /* only diagonal part */
 43:       PetscMalloc1(n_d,&nnz_o);
 44:       for (i=0; i<n_d; i++) {
 45:         nnz_o[i] = 0;
 46:       }
 47:     }
 48:     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,(HYPRE_Int *)nnz_d,(HYPRE_Int *)nnz_o));
 49:     PetscFree(nnz_d);
 50:     PetscFree(nnz_o);
 51:   }
 52:   return(0);
 53: }

 57: PetscErrorCode MatHYPRE_IJMatrixCreate(Mat A,HYPRE_IJMatrix *ij)
 58: {
 60:   PetscInt       rstart,rend,cstart,cend;

 66:   MatSetUp(A);
 67:   rstart = A->rmap->rstart;
 68:   rend   = A->rmap->rend;
 69:   cstart = A->cmap->rstart;
 70:   cend   = A->cmap->rend;
 71:   PetscStackCallStandard(HYPRE_IJMatrixCreate,(PetscObjectComm((PetscObject)A),rstart,rend-1,cstart,cend-1,ij));
 72:   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(*ij,HYPRE_PARCSR));
 73:   {
 74:     PetscBool      same;
 75:     Mat            A_d,A_o;
 76:     const PetscInt *colmap;
 77:     PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&same);
 78:     if (same) {
 79:       MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);
 80:       MatHYPRE_IJMatrixPreallocate(A_d,A_o,*ij);
 81:       return(0);
 82:     }
 83:     PetscObjectTypeCompare((PetscObject)A,MATMPIBAIJ,&same);
 84:     if (same) {
 85:       MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);
 86:       MatHYPRE_IJMatrixPreallocate(A_d,A_o,*ij);
 87:       return(0);
 88:     }
 89:     PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&same);
 90:     if (same) {
 91:       MatHYPRE_IJMatrixPreallocate(A,NULL,*ij);
 92:       return(0);
 93:     }
 94:     PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&same);
 95:     if (same) {
 96:       MatHYPRE_IJMatrixPreallocate(A,NULL,*ij);
 97:       return(0);
 98:     }
 99:   }
100:   return(0);
101: }

103: extern PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix);
104: extern PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix);
105: /*
106:     Copies the data over (column indices, numerical values) to hypre matrix
107: */

111: PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A,HYPRE_IJMatrix ij)
112: {
113:   PetscErrorCode    ierr;
114:   PetscInt          i,rstart,rend,ncols,nr,nc;
115:   const PetscScalar *values;
116:   const PetscInt    *cols;
117:   PetscBool         flg;

120:   PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);
121:   MatGetSize(A,&nr,&nc);
122:   if (flg && nr == nc) {
123:     MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);
124:     return(0);
125:   }
126:   PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);
127:   if (flg) {
128:     MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);
129:     return(0);
130:   }

132:   PetscLogEventBegin(MAT_Convert,A,0,0,0);
133:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
134:   MatGetOwnershipRange(A,&rstart,&rend);
135:   for (i=rstart; i<rend; i++) {
136:     MatGetRow(A,i,&ncols,&cols,&values);
137:     PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,(HYPRE_Int *)&ncols,(HYPRE_Int *)&i,(HYPRE_Int *)cols,values));
138:     MatRestoreRow(A,i,&ncols,&cols,&values);
139:   }
140:   PetscStackCallStandard(HYPRE_IJMatrixAssemble,(ij));
141:   PetscLogEventEnd(MAT_Convert,A,0,0,0);
142:   return(0);
143: }

145: /*
146:     This copies the CSR format directly from the PETSc data structure to the hypre
147:     data structure without calls to MatGetRow() or hypre's set values.

149: */
150: #include <_hypre_IJ_mv.h>
151: #include <HYPRE_IJ_mv.h>
152: #include <../src/mat/impls/aij/mpi/mpiaij.h>

156: PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A,HYPRE_IJMatrix ij)
157: {
159:   Mat_SeqAIJ     *pdiag = (Mat_SeqAIJ*)A->data;

161:   hypre_ParCSRMatrix    *par_matrix;
162:   hypre_AuxParCSRMatrix *aux_matrix;
163:   hypre_CSRMatrix       *hdiag;


170:   PetscLogEventBegin(MAT_Convert,A,0,0,0);
171:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
172:   par_matrix = (hypre_ParCSRMatrix*)hypre_IJMatrixObject(ij);
173:   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
174:   hdiag      = hypre_ParCSRMatrixDiag(par_matrix);

176:   /*
177:        this is the Hack part where we monkey directly with the hypre datastructures
178:   */
179:   PetscMemcpy(hdiag->i,pdiag->i,(A->rmap->n + 1)*sizeof(PetscInt));
180:   PetscMemcpy(hdiag->j,pdiag->j,pdiag->nz*sizeof(PetscInt));
181:   PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));

183:   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
184:   PetscStackCallStandard(HYPRE_IJMatrixAssemble,(ij));
185:   PetscLogEventEnd(MAT_Convert,A,0,0,0);
186:   return(0);
187: }

191: PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A,HYPRE_IJMatrix ij)
192: {
194:   Mat_MPIAIJ     *pA = (Mat_MPIAIJ*)A->data;
195:   Mat_SeqAIJ     *pdiag,*poffd;
196:   PetscInt       i,*garray = pA->garray,*jj,cstart,*pjj;

198:   hypre_ParCSRMatrix    *par_matrix;
199:   hypre_AuxParCSRMatrix *aux_matrix;
200:   hypre_CSRMatrix       *hdiag,*hoffd;

206:   pdiag = (Mat_SeqAIJ*) pA->A->data;
207:   poffd = (Mat_SeqAIJ*) pA->B->data;
208:   /* cstart is only valid for square MPIAIJ layed out in the usual way */
209:   MatGetOwnershipRange(A,&cstart,NULL);

211:   PetscLogEventBegin(MAT_Convert,A,0,0,0);
212:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
213:   par_matrix = (hypre_ParCSRMatrix*)hypre_IJMatrixObject(ij);
214:   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
215:   hdiag      = hypre_ParCSRMatrixDiag(par_matrix);
216:   hoffd      = hypre_ParCSRMatrixOffd(par_matrix);

218:   /*
219:        this is the Hack part where we monkey directly with the hypre datastructures
220:   */
221:   PetscMemcpy(hdiag->i,pdiag->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
222:   /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */
223:   jj  = (PetscInt*)hdiag->j;
224:   pjj = (PetscInt*)pdiag->j;
225:   for (i=0; i<pdiag->nz; i++) jj[i] = cstart + pjj[i];
226:   PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));
227:   PetscMemcpy(hoffd->i,poffd->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
228:   /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this
229:      If we hacked a hypre a bit more we might be able to avoid this step */
230:   jj  = (PetscInt*) hoffd->j;
231:   pjj = (PetscInt*) poffd->j;
232:   for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]];
233:   PetscMemcpy(hoffd->data,poffd->a,poffd->nz*sizeof(PetscScalar));

235:   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
236:   PetscStackCallStandard(HYPRE_IJMatrixAssemble,(ij));
237:   PetscLogEventEnd(MAT_Convert,A,0,0,0);
238:   return(0);
239: }

241: /*
242:     Does NOT copy the data over, instead uses DIRECTLY the pointers from the PETSc MPIAIJ format

244:     This is UNFINISHED and does NOT work! The problem is that hypre puts the diagonal entry first
245:     which will corrupt the PETSc data structure if we did this. Need a work around to this problem.
246: */
247: #include <_hypre_IJ_mv.h>
248: #include <HYPRE_IJ_mv.h>

252: PetscErrorCode MatHYPRE_IJMatrixLink(Mat A,HYPRE_IJMatrix *ij)
253: {
254:   PetscErrorCode        ierr;
255:   PetscInt              rstart,rend,cstart,cend;
256:   PetscBool             flg;
257:   hypre_AuxParCSRMatrix *aux_matrix;

263:   PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);
264:   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Can only use with PETSc MPIAIJ matrices");
265:   MatSetUp(A);

267:   PetscLogEventBegin(MAT_Convert,A,0,0,0);
268:   rstart = A->rmap->rstart;
269:   rend   = A->rmap->rend;
270:   cstart = A->cmap->rstart;
271:   cend   = A->cmap->rend;
272:   PetscStackCallStandard(HYPRE_IJMatrixCreate,(PetscObjectComm((PetscObject)A),rstart,rend-1,cstart,cend-1,ij));
273:   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(*ij,HYPRE_PARCSR));

275:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(*ij));
276:   PetscStackCall("hypre_IJMatrixTranslator",aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(*ij));

278:   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;

280:   /* this is the Hack part where we monkey directly with the hypre datastructures */

282:   PetscStackCallStandard(HYPRE_IJMatrixAssemble,(*ij));
283:   PetscLogEventEnd(MAT_Convert,A,0,0,0);
284:   return(0);
285: }

287: /* -----------------------------------------------------------------------------------------------------------------*/

289: /*MC
290:    MATHYPRESTRUCT - MATHYPRESTRUCT = "hyprestruct" - A matrix type to be used for parallel sparse matrices
291:           based on the hypre HYPRE_StructMatrix.

293:    Level: intermediate

295:    Notes: Unlike the more general support for blocks in hypre this allows only one block per process and requires the block
296:           be defined by a DMDA.

298:           The matrix needs a DMDA associated with it by either a call to MatSetupDM() or if the matrix is obtained from DMCreateMatrix()

300: .seealso: MatCreate(), PCPFMG, MatSetupDM(), DMCreateMatrix()
301: M*/


306: PetscErrorCode  MatSetValuesLocal_HYPREStruct_3d(Mat mat,PetscInt nrow,const PetscInt irow[],PetscInt ncol,const PetscInt icol[],const PetscScalar y[],InsertMode addv)
307: {
308:   PetscErrorCode    ierr;
309:   PetscInt          i,j,stencil,index[3],row,entries[7];
310:   const PetscScalar *values = y;
311:   Mat_HYPREStruct   *ex     = (Mat_HYPREStruct*) mat->data;

314:   if (PetscUnlikely(ncol >= 7)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"ncol %D >= 7 too large",ncol);
315:   for (i=0; i<nrow; i++) {
316:     for (j=0; j<ncol; j++) {
317:       stencil = icol[j] - irow[i];
318:       if (!stencil) {
319:         entries[j] = 3;
320:       } else if (stencil == -1) {
321:         entries[j] = 2;
322:       } else if (stencil == 1) {
323:         entries[j] = 4;
324:       } else if (stencil == -ex->gnx) {
325:         entries[j] = 1;
326:       } else if (stencil == ex->gnx) {
327:         entries[j] = 5;
328:       } else if (stencil == -ex->gnxgny) {
329:         entries[j] = 0;
330:       } else if (stencil == ex->gnxgny) {
331:         entries[j] = 6;
332:       } else SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row %D local column %D have bad stencil %D",irow[i],icol[j],stencil);
333:     }
334:     row      = ex->gindices[irow[i]] - ex->rstart;
335:     index[0] = ex->xs + (row % ex->nx);
336:     index[1] = ex->ys + ((row/ex->nx) % ex->ny);
337:     index[2] = ex->zs + (row/(ex->nxny));
338:     if (addv == ADD_VALUES) {
339:       PetscStackCallStandard(HYPRE_StructMatrixAddToValues,(ex->hmat,(HYPRE_Int *)index,ncol,(HYPRE_Int *)entries,(PetscScalar*)values));
340:     } else {
341:       PetscStackCallStandard(HYPRE_StructMatrixSetValues,(ex->hmat,(HYPRE_Int *)index,ncol,(HYPRE_Int *)entries,(PetscScalar*)values));
342:     }
343:     values += ncol;
344:   }
345:   return(0);
346: }

350: PetscErrorCode  MatZeroRowsLocal_HYPREStruct_3d(Mat mat,PetscInt nrow,const PetscInt irow[],PetscScalar d,Vec x,Vec b)
351: {
352:   PetscErrorCode  ierr;
353:   PetscInt        i,index[3],row,entries[7] = {0,1,2,3,4,5,6};
354:   PetscScalar     values[7];
355:   Mat_HYPREStruct *ex = (Mat_HYPREStruct*) mat->data;

358:   if (x && b) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"No support");
359:   PetscMemzero(values,7*sizeof(PetscScalar));
360:   values[3] = d;
361:   for (i=0; i<nrow; i++) {
362:     row      = ex->gindices[irow[i]] - ex->rstart;
363:     index[0] = ex->xs + (row % ex->nx);
364:     index[1] = ex->ys + ((row/ex->nx) % ex->ny);
365:     index[2] = ex->zs + (row/(ex->nxny));
366:     PetscStackCallStandard(HYPRE_StructMatrixSetValues,(ex->hmat,(HYPRE_Int *)index,7,(HYPRE_Int *)entries,values));
367:   }
368:   PetscStackCallStandard(HYPRE_StructMatrixAssemble,(ex->hmat));
369:   return(0);
370: }

374: PetscErrorCode MatZeroEntries_HYPREStruct_3d(Mat mat)
375: {
376:   PetscErrorCode  ierr;
377:   PetscInt        indices[7] = {0,1,2,3,4,5,6};
378:   Mat_HYPREStruct *ex        = (Mat_HYPREStruct*) mat->data;

381:   /* hypre has no public interface to do this */
382:   PetscStackCallStandard(hypre_StructMatrixClearBoxValues,(ex->hmat,&ex->hbox,7,(HYPRE_Int *)indices,0,1));
383:   PetscStackCallStandard(HYPRE_StructMatrixAssemble,(ex->hmat));
384:   return(0);
385: }

389: static PetscErrorCode  MatSetupDM_HYPREStruct(Mat mat,DM da)
390: {
391:   PetscErrorCode         ierr;
392:   Mat_HYPREStruct        *ex = (Mat_HYPREStruct*) mat->data;
393:   PetscInt               dim,dof,sw[3],nx,ny,nz,ilower[3],iupper[3],ssize,i;
394:   DMBoundaryType         px,py,pz;
395:   DMDAStencilType        st;
396:   ISLocalToGlobalMapping ltog;

399:   ex->da = da;
400:   PetscObjectReference((PetscObject)da);

402:   DMDAGetInfo(ex->da,&dim,0,0,0,0,0,0,&dof,&sw[0],&px,&py,&pz,&st);
403:   DMDAGetCorners(ex->da,&ilower[0],&ilower[1],&ilower[2],&iupper[0],&iupper[1],&iupper[2]);
404:   iupper[0] += ilower[0] - 1;
405:   iupper[1] += ilower[1] - 1;
406:   iupper[2] += ilower[2] - 1;

408:   /* the hypre_Box is used to zero out the matrix entries in MatZeroValues() */
409:   ex->hbox.imin[0] = ilower[0];
410:   ex->hbox.imin[1] = ilower[1];
411:   ex->hbox.imin[2] = ilower[2];
412:   ex->hbox.imax[0] = iupper[0];
413:   ex->hbox.imax[1] = iupper[1];
414:   ex->hbox.imax[2] = iupper[2];

416:   /* create the hypre grid object and set its information */
417:   if (dof > 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Currently only support for scalar problems");
418:   if (px || py || pz) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Ask us to add periodic support by calling HYPRE_StructGridSetPeriodic()");
419:   PetscStackCallStandard(HYPRE_StructGridCreate,(ex->hcomm,dim,&ex->hgrid));
420:   PetscStackCallStandard(HYPRE_StructGridSetExtents,(ex->hgrid,(HYPRE_Int *)ilower,(HYPRE_Int *)iupper));
421:   PetscStackCallStandard(HYPRE_StructGridAssemble,(ex->hgrid));

423:   sw[1] = sw[0];
424:   sw[2] = sw[1];
425:   PetscStackCallStandard(HYPRE_StructGridSetNumGhost,(ex->hgrid,(HYPRE_Int *)sw));

427:   /* create the hypre stencil object and set its information */
428:   if (sw[0] > 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Ask us to add support for wider stencils");
429:   if (st == DMDA_STENCIL_BOX) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Ask us to add support for box stencils");
430:   if (dim == 1) {
431:     PetscInt offsets[3][1] = {{-1},{0},{1}};
432:     ssize = 3;
433:     PetscStackCallStandard(HYPRE_StructStencilCreate,(dim,ssize,&ex->hstencil));
434:     for (i=0; i<ssize; i++) {
435:       PetscStackCallStandard(HYPRE_StructStencilSetElement,(ex->hstencil,i,(HYPRE_Int *)offsets[i]));
436:     }
437:   } else if (dim == 2) {
438:     PetscInt offsets[5][2] = {{0,-1},{-1,0},{0,0},{1,0},{0,1}};
439:     ssize = 5;
440:     PetscStackCallStandard(HYPRE_StructStencilCreate,(dim,ssize,&ex->hstencil));
441:     for (i=0; i<ssize; i++) {
442:       PetscStackCallStandard(HYPRE_StructStencilSetElement,(ex->hstencil,i,(HYPRE_Int *)offsets[i]));
443:     }
444:   } else if (dim == 3) {
445:     PetscInt offsets[7][3] = {{0,0,-1},{0,-1,0},{-1,0,0},{0,0,0},{1,0,0},{0,1,0},{0,0,1}};
446:     ssize = 7;
447:     PetscStackCallStandard(HYPRE_StructStencilCreate,(dim,ssize,&ex->hstencil));
448:     for (i=0; i<ssize; i++) {
449:       PetscStackCallStandard(HYPRE_StructStencilSetElement,(ex->hstencil,i,(HYPRE_Int *)offsets[i]));
450:     }
451:   }

453:   /* create the HYPRE vector for rhs and solution */
454:   PetscStackCallStandard(HYPRE_StructVectorCreate,(ex->hcomm,ex->hgrid,&ex->hb));
455:   PetscStackCallStandard(HYPRE_StructVectorCreate,(ex->hcomm,ex->hgrid,&ex->hx));
456:   PetscStackCallStandard(HYPRE_StructVectorInitialize,(ex->hb));
457:   PetscStackCallStandard(HYPRE_StructVectorInitialize,(ex->hx));
458:   PetscStackCallStandard(HYPRE_StructVectorAssemble,(ex->hb));
459:   PetscStackCallStandard(HYPRE_StructVectorAssemble,(ex->hx));

461:   /* create the hypre matrix object and set its information */
462:   PetscStackCallStandard(HYPRE_StructMatrixCreate,(ex->hcomm,ex->hgrid,ex->hstencil,&ex->hmat));
463:   PetscStackCallStandard(HYPRE_StructGridDestroy,(ex->hgrid));
464:   PetscStackCallStandard(HYPRE_StructStencilDestroy,(ex->hstencil));
465:   if (ex->needsinitialization) {
466:     PetscStackCallStandard(HYPRE_StructMatrixInitialize,(ex->hmat));
467:     ex->needsinitialization = PETSC_FALSE;
468:   }

470:   /* set the global and local sizes of the matrix */
471:   DMDAGetCorners(da,0,0,0,&nx,&ny,&nz);
472:   MatSetSizes(mat,dof*nx*ny*nz,dof*nx*ny*nz,PETSC_DECIDE,PETSC_DECIDE);
473:   PetscLayoutSetBlockSize(mat->rmap,1);
474:   PetscLayoutSetBlockSize(mat->cmap,1);
475:   PetscLayoutSetUp(mat->rmap);
476:   PetscLayoutSetUp(mat->cmap);

478:   if (dim == 3) {
479:     mat->ops->setvalueslocal = MatSetValuesLocal_HYPREStruct_3d;
480:     mat->ops->zerorowslocal  = MatZeroRowsLocal_HYPREStruct_3d;
481:     mat->ops->zeroentries    = MatZeroEntries_HYPREStruct_3d;

483:     MatZeroEntries_HYPREStruct_3d(mat);
484:   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Only support for 3d DMDA currently");

486:   /* get values that will be used repeatedly in MatSetValuesLocal() and MatZeroRowsLocal() repeatedly */
487:   MatGetOwnershipRange(mat,&ex->rstart,NULL);
488:   DMGetLocalToGlobalMapping(ex->da,&ltog);
489:   ISLocalToGlobalMappingGetIndices(ltog, (const PetscInt **) &ex->gindices);
490:   DMDAGetGhostCorners(ex->da,0,0,0,&ex->gnx,&ex->gnxgny,0);
491:   ex->gnxgny *= ex->gnx;
492:   DMDAGetCorners(ex->da,&ex->xs,&ex->ys,&ex->zs,&ex->nx,&ex->ny,0);
493:   ex->nxny    = ex->nx*ex->ny;
494:   return(0);
495: }

499: PetscErrorCode MatMult_HYPREStruct(Mat A,Vec x,Vec y)
500: {
501:   PetscErrorCode  ierr;
502:   PetscScalar     *xx,*yy;
503:   PetscInt        ilower[3],iupper[3];
504:   Mat_HYPREStruct *mx = (Mat_HYPREStruct*)(A->data);

507:   DMDAGetCorners(mx->da,&ilower[0],&ilower[1],&ilower[2],&iupper[0],&iupper[1],&iupper[2]);

509:   iupper[0] += ilower[0] - 1;
510:   iupper[1] += ilower[1] - 1;
511:   iupper[2] += ilower[2] - 1;

513:   /* copy x values over to hypre */
514:   PetscStackCallStandard(HYPRE_StructVectorSetConstantValues,(mx->hb,0.0));
515:   VecGetArray(x,&xx);
516:   PetscStackCallStandard(HYPRE_StructVectorSetBoxValues,(mx->hb,(HYPRE_Int *)ilower,(HYPRE_Int *)iupper,xx));
517:   VecRestoreArray(x,&xx);
518:   PetscStackCallStandard(HYPRE_StructVectorAssemble,(mx->hb));
519:   PetscStackCallStandard(HYPRE_StructMatrixMatvec,(1.0,mx->hmat,mx->hb,0.0,mx->hx));

521:   /* copy solution values back to PETSc */
522:   VecGetArray(y,&yy);
523:   PetscStackCallStandard(HYPRE_StructVectorGetBoxValues,(mx->hx,(HYPRE_Int *)ilower,(HYPRE_Int *)iupper,yy));
524:   VecRestoreArray(y,&yy);
525:   return(0);
526: }

530: PetscErrorCode MatAssemblyEnd_HYPREStruct(Mat mat,MatAssemblyType mode)
531: {
532:   Mat_HYPREStruct *ex = (Mat_HYPREStruct*) mat->data;
533:   PetscErrorCode  ierr;

536:   PetscStackCallStandard(HYPRE_StructMatrixAssemble,(ex->hmat));
537:   /* PetscStackCallStandard(HYPRE_StructMatrixPrint,("dummy",ex->hmat,0)); */
538:   return(0);
539: }

543: PetscErrorCode MatZeroEntries_HYPREStruct(Mat mat)
544: {
546:   /* before the DMDA is set to the matrix the zero doesn't need to do anything */
547:   return(0);
548: }


553: PetscErrorCode MatDestroy_HYPREStruct(Mat mat)
554: {
555:   Mat_HYPREStruct *ex = (Mat_HYPREStruct*) mat->data;
556:   PetscErrorCode  ierr;

559:   PetscStackCallStandard(HYPRE_StructMatrixDestroy,(ex->hmat));
560:   PetscStackCallStandard(HYPRE_StructVectorDestroy,(ex->hx));
561:   PetscStackCallStandard(HYPRE_StructVectorDestroy,(ex->hb));
562:   return(0);
563: }


568: PETSC_EXTERN PetscErrorCode MatCreate_HYPREStruct(Mat B)
569: {
570:   Mat_HYPREStruct *ex;
571:   PetscErrorCode  ierr;

574:   PetscNewLog(B,&ex);
575:   B->data      = (void*)ex;
576:   B->rmap->bs  = 1;
577:   B->assembled = PETSC_FALSE;

579:   B->insertmode = NOT_SET_VALUES;

581:   B->ops->assemblyend = MatAssemblyEnd_HYPREStruct;
582:   B->ops->mult        = MatMult_HYPREStruct;
583:   B->ops->zeroentries = MatZeroEntries_HYPREStruct;
584:   B->ops->destroy     = MatDestroy_HYPREStruct;

586:   ex->needsinitialization = PETSC_TRUE;

588:   MPI_Comm_dup(PetscObjectComm((PetscObject)B),&(ex->hcomm));
589:   PetscObjectComposeFunction((PetscObject)B,"MatSetupDM_C",MatSetupDM_HYPREStruct);
590:   PetscObjectChangeTypeName((PetscObject)B,MATHYPRESTRUCT);
591:   return(0);
592: }

594: /*MC
595:    MATHYPRESSTRUCT - MATHYPRESSTRUCT = "hypresstruct" - A matrix type to be used for parallel sparse matrices
596:           based on the hypre HYPRE_SStructMatrix.


599:    Level: intermediate

601:    Notes: Unlike hypre's general semi-struct object consisting of a collection of structured-grid objects and unstructured
602:           grid objects, we will restrict the semi-struct objects to consist of only structured-grid components.

604:           Unlike the more general support for parts and blocks in hypre this allows only one part, and one block per process and requires the block
605:           be defined by a DMDA.

607:           The matrix needs a DMDA associated with it by either a call to MatSetupDM() or if the matrix is obtained from DMCreateMatrix()

609: M*/

613: PetscErrorCode  MatSetValuesLocal_HYPRESStruct_3d(Mat mat,PetscInt nrow,const PetscInt irow[],PetscInt ncol,const PetscInt icol[],const PetscScalar y[],InsertMode addv)
614: {
615:   PetscErrorCode    ierr;
616:   PetscInt          i,j,stencil,index[3];
617:   const PetscScalar *values = y;
618:   Mat_HYPRESStruct  *ex     = (Mat_HYPRESStruct*) mat->data;

620:   PetscInt part = 0;          /* Petsc sstruct interface only allows 1 part */
621:   PetscInt ordering;
622:   PetscInt grid_rank, to_grid_rank;
623:   PetscInt var_type, to_var_type;
624:   PetscInt to_var_entry = 0;

626:   PetscInt nvars= ex->nvars;
627:   PetscInt row,*entries;

630:   PetscMalloc1(7*nvars,&entries);
631:   ordering= ex->dofs_order;  /* ordering= 0   nodal ordering
632:                                           1   variable ordering */
633:   /* stencil entries are orderer by variables: var0_stencil0, var0_stencil1, ..., var0_stencil6, var1_stencil0, var1_stencil1, ...  */

635:   if (!ordering) {
636:     for (i=0; i<nrow; i++) {
637:       grid_rank= irow[i]/nvars;
638:       var_type = (irow[i] % nvars);

640:       for (j=0; j<ncol; j++) {
641:         to_grid_rank= icol[j]/nvars;
642:         to_var_type = (icol[j] % nvars);

644:         to_var_entry= to_var_entry*7;
645:         entries[j]  = to_var_entry;

647:         stencil = to_grid_rank-grid_rank;
648:         if (!stencil) {
649:           entries[j] += 3;
650:         } else if (stencil == -1) {
651:           entries[j] += 2;
652:         } else if (stencil == 1) {
653:           entries[j] += 4;
654:         } else if (stencil == -ex->gnx) {
655:           entries[j] += 1;
656:         } else if (stencil == ex->gnx) {
657:           entries[j] += 5;
658:         } else if (stencil == -ex->gnxgny) {
659:           entries[j] += 0;
660:         } else if (stencil == ex->gnxgny) {
661:           entries[j] += 6;
662:         } else SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row %D local column %D have bad stencil %D",irow[i],icol[j],stencil);
663:       }

665:       row      = ex->gindices[grid_rank] - ex->rstart;
666:       index[0] = ex->xs + (row % ex->nx);
667:       index[1] = ex->ys + ((row/ex->nx) % ex->ny);
668:       index[2] = ex->zs + (row/(ex->nxny));

670:       if (addv == ADD_VALUES) {
671:         PetscStackCallStandard(HYPRE_SStructMatrixAddToValues,(ex->ss_mat,part,(HYPRE_Int *)index,var_type,ncol,(HYPRE_Int *)entries,(PetscScalar*)values));
672:       } else {
673:         PetscStackCallStandard(HYPRE_SStructMatrixSetValues,(ex->ss_mat,part,(HYPRE_Int *)index,var_type,ncol,(HYPRE_Int *)entries,(PetscScalar*)values));
674:       }
675:       values += ncol;
676:     }
677:   } else {
678:     for (i=0; i<nrow; i++) {
679:       var_type = irow[i]/(ex->gnxgnygnz);
680:       grid_rank= irow[i] - var_type*(ex->gnxgnygnz);

682:       for (j=0; j<ncol; j++) {
683:         to_var_type = icol[j]/(ex->gnxgnygnz);
684:         to_grid_rank= icol[j] - to_var_type*(ex->gnxgnygnz);

686:         to_var_entry= to_var_entry*7;
687:         entries[j]  = to_var_entry;

689:         stencil = to_grid_rank-grid_rank;
690:         if (!stencil) {
691:           entries[j] += 3;
692:         } else if (stencil == -1) {
693:           entries[j] += 2;
694:         } else if (stencil == 1) {
695:           entries[j] += 4;
696:         } else if (stencil == -ex->gnx) {
697:           entries[j] += 1;
698:         } else if (stencil == ex->gnx) {
699:           entries[j] += 5;
700:         } else if (stencil == -ex->gnxgny) {
701:           entries[j] += 0;
702:         } else if (stencil == ex->gnxgny) {
703:           entries[j] += 6;
704:         } else SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row %D local column %D have bad stencil %D",irow[i],icol[j],stencil);
705:       }

707:       row      = ex->gindices[grid_rank] - ex->rstart;
708:       index[0] = ex->xs + (row % ex->nx);
709:       index[1] = ex->ys + ((row/ex->nx) % ex->ny);
710:       index[2] = ex->zs + (row/(ex->nxny));

712:       if (addv == ADD_VALUES) {
713:         PetscStackCallStandard(HYPRE_SStructMatrixAddToValues,(ex->ss_mat,part,(HYPRE_Int *)index,var_type,ncol,(HYPRE_Int *)entries,(PetscScalar*)values));
714:       } else {
715:         PetscStackCallStandard(HYPRE_SStructMatrixSetValues,(ex->ss_mat,part,(HYPRE_Int *)index,var_type,ncol,(HYPRE_Int *)entries,(PetscScalar*)values));
716:       }
717:       values += ncol;
718:     }
719:   }
720:   PetscFree(entries);
721:   return(0);
722: }

726: PetscErrorCode  MatZeroRowsLocal_HYPRESStruct_3d(Mat mat,PetscInt nrow,const PetscInt irow[],PetscScalar d,Vec x,Vec b)
727: {
728:   PetscErrorCode   ierr;
729:   PetscInt         i,index[3];
730:   PetscScalar      **values;
731:   Mat_HYPRESStruct *ex = (Mat_HYPRESStruct*) mat->data;

733:   PetscInt part     = 0;      /* Petsc sstruct interface only allows 1 part */
734:   PetscInt ordering = ex->dofs_order;
735:   PetscInt grid_rank;
736:   PetscInt var_type;
737:   PetscInt nvars= ex->nvars;
738:   PetscInt row,*entries;

741:   if (x && b) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"No support");
742:   PetscMalloc1(7*nvars,&entries);

744:   PetscMalloc1(nvars,&values);
745:   PetscMalloc1(7*nvars*nvars,&values[0]);
746:   for (i=1; i<nvars; i++) {
747:     values[i] = values[i-1] + nvars*7;
748:   }

750:   for (i=0; i< nvars; i++) {
751:     PetscMemzero(values[i],nvars*7*sizeof(PetscScalar));
752:     *(values[i]+3) = d;
753:   }

755:   for (i= 0; i< nvars*7; i++) entries[i] = i;

757:   if (!ordering) {
758:     for (i=0; i<nrow; i++) {
759:       grid_rank = irow[i] / nvars;
760:       var_type  =(irow[i] % nvars);

762:       row      = ex->gindices[grid_rank] - ex->rstart;
763:       index[0] = ex->xs + (row % ex->nx);
764:       index[1] = ex->ys + ((row/ex->nx) % ex->ny);
765:       index[2] = ex->zs + (row/(ex->nxny));
766:       PetscStackCallStandard(HYPRE_SStructMatrixSetValues,(ex->ss_mat,part,(HYPRE_Int *)index,var_type,7*nvars,(HYPRE_Int *)entries,values[var_type]));
767:     }
768:   } else {
769:     for (i=0; i<nrow; i++) {
770:       var_type = irow[i]/(ex->gnxgnygnz);
771:       grid_rank= irow[i] - var_type*(ex->gnxgnygnz);

773:       row      = ex->gindices[grid_rank] - ex->rstart;
774:       index[0] = ex->xs + (row % ex->nx);
775:       index[1] = ex->ys + ((row/ex->nx) % ex->ny);
776:       index[2] = ex->zs + (row/(ex->nxny));
777:       PetscStackCallStandard(HYPRE_SStructMatrixSetValues,(ex->ss_mat,part,(HYPRE_Int *)index,var_type,7*nvars,(HYPRE_Int *)entries,values[var_type]));
778:     }
779:   }
780:   PetscStackCallStandard(HYPRE_SStructMatrixAssemble,(ex->ss_mat));
781:   PetscFree(values[0]);
782:   PetscFree(values);
783:   PetscFree(entries);
784:   return(0);
785: }

789: PetscErrorCode MatZeroEntries_HYPRESStruct_3d(Mat mat)
790: {
791:   PetscErrorCode   ierr;
792:   Mat_HYPRESStruct *ex  = (Mat_HYPRESStruct*) mat->data;
793:   PetscInt         nvars= ex->nvars;
794:   PetscInt         size;
795:   PetscInt         part= 0;   /* only one part */

798:   size = ((ex->hbox.imax[0])-(ex->hbox.imin[0])+1)*((ex->hbox.imax[1])-(ex->hbox.imin[1])+1)*((ex->hbox.imax[2])-(ex->hbox.imin[2])+1);
799:   {
800:     PetscInt    i,*entries,iupper[3],ilower[3];
801:     PetscScalar *values;


804:     for (i= 0; i< 3; i++) {
805:       ilower[i]= ex->hbox.imin[i];
806:       iupper[i]= ex->hbox.imax[i];
807:     }

809:     PetscMalloc2(nvars*7,&entries,nvars*7*size,&values);
810:     for (i= 0; i< nvars*7; i++) entries[i]= i;
811:     PetscMemzero(values,nvars*7*size*sizeof(PetscScalar));

813:     for (i= 0; i< nvars; i++) {
814:       PetscStackCallStandard(HYPRE_SStructMatrixSetBoxValues,(ex->ss_mat,part,(HYPRE_Int *)ilower,(HYPRE_Int *)iupper,i,nvars*7,(HYPRE_Int *)entries,values));
815:     }
816:     PetscFree2(entries,values);
817:   }
818:   PetscStackCallStandard(HYPRE_SStructMatrixAssemble,(ex->ss_mat));
819:   return(0);
820: }


825: static PetscErrorCode  MatSetupDM_HYPRESStruct(Mat mat,DM da)
826: {
827:   PetscErrorCode         ierr;
828:   Mat_HYPRESStruct       *ex = (Mat_HYPRESStruct*) mat->data;
829:   PetscInt               dim,dof,sw[3],nx,ny,nz;
830:   PetscInt               ilower[3],iupper[3],ssize,i;
831:   DMBoundaryType         px,py,pz;
832:   DMDAStencilType        st;
833:   PetscInt               nparts= 1;  /* assuming only one part */
834:   PetscInt               part  = 0;
835:   ISLocalToGlobalMapping ltog;
837:   ex->da = da;
838:   PetscObjectReference((PetscObject)da);

840:   DMDAGetInfo(ex->da,&dim,0,0,0,0,0,0,&dof,&sw[0],&px,&py,&pz,&st);
841:   DMDAGetCorners(ex->da,&ilower[0],&ilower[1],&ilower[2],&iupper[0],&iupper[1],&iupper[2]);
842:   iupper[0] += ilower[0] - 1;
843:   iupper[1] += ilower[1] - 1;
844:   iupper[2] += ilower[2] - 1;
845:   /* the hypre_Box is used to zero out the matrix entries in MatZeroValues() */
846:   ex->hbox.imin[0] = ilower[0];
847:   ex->hbox.imin[1] = ilower[1];
848:   ex->hbox.imin[2] = ilower[2];
849:   ex->hbox.imax[0] = iupper[0];
850:   ex->hbox.imax[1] = iupper[1];
851:   ex->hbox.imax[2] = iupper[2];

853:   ex->dofs_order = 0;

855:   /* assuming that the same number of dofs on each gridpoint. Also assume all cell-centred based */
856:   ex->nvars= dof;

858:   /* create the hypre grid object and set its information */
859:   if (px || py || pz) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Ask us to add periodic support by calling HYPRE_SStructGridSetPeriodic()");
860:   PetscStackCallStandard(HYPRE_SStructGridCreate,(ex->hcomm,dim,nparts,&ex->ss_grid));

862:   PetscStackCallStandard(HYPRE_SStructGridSetExtents,(ex->ss_grid,part,ex->hbox.imin,ex->hbox.imax));

864:   {
865:     HYPRE_SStructVariable *vartypes;
866:     PetscMalloc1(ex->nvars,&vartypes);
867:     for (i= 0; i< ex->nvars; i++) vartypes[i]= HYPRE_SSTRUCT_VARIABLE_CELL;
868:     PetscStackCallStandard(HYPRE_SStructGridSetVariables,(ex->ss_grid, part, ex->nvars,vartypes));
869:     PetscFree(vartypes);
870:   }
871:   PetscStackCallStandard(HYPRE_SStructGridAssemble,(ex->ss_grid));

873:   sw[1] = sw[0];
874:   sw[2] = sw[1];
875:   /* PetscStackCallStandard(HYPRE_SStructGridSetNumGhost,(ex->ss_grid,sw)); */

877:   /* create the hypre stencil object and set its information */
878:   if (sw[0] > 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Ask us to add support for wider stencils");
879:   if (st == DMDA_STENCIL_BOX) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Ask us to add support for box stencils");

881:   if (dim == 1) {
882:     PetscInt offsets[3][1] = {{-1},{0},{1}};
883:     PetscInt j, cnt;

885:     ssize = 3*(ex->nvars);
886:     PetscStackCallStandard(HYPRE_SStructStencilCreate,(dim,ssize,&ex->ss_stencil));
887:     cnt= 0;
888:     for (i = 0; i < (ex->nvars); i++) {
889:       for (j = 0; j < 3; j++) {
890:         PetscStackCallStandard(HYPRE_SStructStencilSetEntry,(ex->ss_stencil, cnt, (HYPRE_Int *)offsets[j], i));
891:         cnt++;
892:       }
893:     }
894:   } else if (dim == 2) {
895:     PetscInt offsets[5][2] = {{0,-1},{-1,0},{0,0},{1,0},{0,1}};
896:     PetscInt j, cnt;

898:     ssize = 5*(ex->nvars);
899:     PetscStackCallStandard(HYPRE_SStructStencilCreate,(dim,ssize,&ex->ss_stencil));
900:     cnt= 0;
901:     for (i= 0; i< (ex->nvars); i++) {
902:       for (j= 0; j< 5; j++) {
903:         PetscStackCallStandard(HYPRE_SStructStencilSetEntry,(ex->ss_stencil, cnt, (HYPRE_Int *)offsets[j], i));
904:         cnt++;
905:       }
906:     }
907:   } else if (dim == 3) {
908:     PetscInt offsets[7][3] = {{0,0,-1},{0,-1,0},{-1,0,0},{0,0,0},{1,0,0},{0,1,0},{0,0,1}};
909:     PetscInt j, cnt;

911:     ssize = 7*(ex->nvars);
912:     PetscStackCallStandard(HYPRE_SStructStencilCreate,(dim,ssize,&ex->ss_stencil));
913:     cnt= 0;
914:     for (i= 0; i< (ex->nvars); i++) {
915:       for (j= 0; j< 7; j++) {
916:         PetscStackCallStandard(HYPRE_SStructStencilSetEntry,(ex->ss_stencil, cnt, (HYPRE_Int *)offsets[j], i));
917:         cnt++;
918:       }
919:     }
920:   }

922:   /* create the HYPRE graph */
923:   PetscStackCallStandard(HYPRE_SStructGraphCreate,(ex->hcomm, ex->ss_grid, &(ex->ss_graph)));

925:   /* set the stencil graph. Note that each variable has the same graph. This means that each
926:      variable couples to all the other variable and with the same stencil pattern. */
927:   for (i= 0; i< (ex->nvars); i++) {
928:     PetscStackCallStandard(HYPRE_SStructGraphSetStencil,(ex->ss_graph,part,i,ex->ss_stencil));
929:   }
930:   PetscStackCallStandard(HYPRE_SStructGraphAssemble,(ex->ss_graph));

932:   /* create the HYPRE sstruct vectors for rhs and solution */
933:   PetscStackCallStandard(HYPRE_SStructVectorCreate,(ex->hcomm,ex->ss_grid,&ex->ss_b));
934:   PetscStackCallStandard(HYPRE_SStructVectorCreate,(ex->hcomm,ex->ss_grid,&ex->ss_x));
935:   PetscStackCallStandard(HYPRE_SStructVectorInitialize,(ex->ss_b));
936:   PetscStackCallStandard(HYPRE_SStructVectorInitialize,(ex->ss_x));
937:   PetscStackCallStandard(HYPRE_SStructVectorAssemble,(ex->ss_b));
938:   PetscStackCallStandard(HYPRE_SStructVectorAssemble,(ex->ss_x));

940:   /* create the hypre matrix object and set its information */
941:   PetscStackCallStandard(HYPRE_SStructMatrixCreate,(ex->hcomm,ex->ss_graph,&ex->ss_mat));
942:   PetscStackCallStandard(HYPRE_SStructGridDestroy,(ex->ss_grid));
943:   PetscStackCallStandard(HYPRE_SStructStencilDestroy,(ex->ss_stencil));
944:   if (ex->needsinitialization) {
945:     PetscStackCallStandard(HYPRE_SStructMatrixInitialize,(ex->ss_mat));
946:     ex->needsinitialization = PETSC_FALSE;
947:   }

949:   /* set the global and local sizes of the matrix */
950:   DMDAGetCorners(da,0,0,0,&nx,&ny,&nz);
951:   MatSetSizes(mat,dof*nx*ny*nz,dof*nx*ny*nz,PETSC_DECIDE,PETSC_DECIDE);
952:   PetscLayoutSetBlockSize(mat->rmap,1);
953:   PetscLayoutSetBlockSize(mat->cmap,1);
954:   PetscLayoutSetUp(mat->rmap);
955:   PetscLayoutSetUp(mat->cmap);

957:   if (dim == 3) {
958:     mat->ops->setvalueslocal = MatSetValuesLocal_HYPRESStruct_3d;
959:     mat->ops->zerorowslocal  = MatZeroRowsLocal_HYPRESStruct_3d;
960:     mat->ops->zeroentries    = MatZeroEntries_HYPRESStruct_3d;

962:     MatZeroEntries_HYPRESStruct_3d(mat);
963:   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Only support for 3d DMDA currently");

965:   /* get values that will be used repeatedly in MatSetValuesLocal() and MatZeroRowsLocal() repeatedly */
966:   MatGetOwnershipRange(mat,&ex->rstart,NULL);
967:   DMGetLocalToGlobalMapping(ex->da,&ltog);
968:   ISLocalToGlobalMappingGetIndices(ltog, (const PetscInt **) &ex->gindices);
969:   DMDAGetGhostCorners(ex->da,0,0,0,&ex->gnx,&ex->gnxgny,&ex->gnxgnygnz);

971:   ex->gnxgny    *= ex->gnx;
972:   ex->gnxgnygnz *= ex->gnxgny;

974:   DMDAGetCorners(ex->da,&ex->xs,&ex->ys,&ex->zs,&ex->nx,&ex->ny,&ex->nz);

976:   ex->nxny   = ex->nx*ex->ny;
977:   ex->nxnynz = ex->nz*ex->nxny;
978:   return(0);
979: }

983: PetscErrorCode MatMult_HYPRESStruct(Mat A,Vec x,Vec y)
984: {
985:   PetscErrorCode   ierr;
986:   PetscScalar      *xx,*yy;
987:   PetscInt         ilower[3],iupper[3];
988:   Mat_HYPRESStruct *mx     = (Mat_HYPRESStruct*)(A->data);
989:   PetscInt         ordering= mx->dofs_order;
990:   PetscInt         nvars   = mx->nvars;
991:   PetscInt         part    = 0;
992:   PetscInt         size;
993:   PetscInt         i;

996:   DMDAGetCorners(mx->da,&ilower[0],&ilower[1],&ilower[2],&iupper[0],&iupper[1],&iupper[2]);
997:   iupper[0] += ilower[0] - 1;
998:   iupper[1] += ilower[1] - 1;
999:   iupper[2] += ilower[2] - 1;

1001:   size= 1;
1002:   for (i = 0; i < 3; i++) size *= (iupper[i]-ilower[i]+1);

1004:   /* copy x values over to hypre for variable ordering */
1005:   if (ordering) {
1006:     PetscStackCallStandard(HYPRE_SStructVectorSetConstantValues,(mx->ss_b,0.0));
1007:     VecGetArray(x,&xx);
1008:     for (i= 0; i< nvars; i++) {
1009:       PetscStackCallStandard(HYPRE_SStructVectorSetBoxValues,(mx->ss_b,part,(HYPRE_Int *)ilower,(HYPRE_Int *)iupper,i,xx+(size*i)));
1010:     }
1011:     VecRestoreArray(x,&xx);
1012:     PetscStackCallStandard(HYPRE_SStructVectorAssemble,(mx->ss_b));
1013:     PetscStackCallStandard(HYPRE_SStructMatrixMatvec,(1.0,mx->ss_mat,mx->ss_b,0.0,mx->ss_x));

1015:     /* copy solution values back to PETSc */
1016:     VecGetArray(y,&yy);
1017:     for (i= 0; i< nvars; i++) {
1018:       PetscStackCallStandard(HYPRE_SStructVectorGetBoxValues,(mx->ss_x,part,(HYPRE_Int *)ilower,(HYPRE_Int *)iupper,i,yy+(size*i)));
1019:     }
1020:     VecRestoreArray(y,&yy);
1021:   } else {      /* nodal ordering must be mapped to variable ordering for sys_pfmg */
1022:     PetscScalar *z;
1023:     PetscInt    j, k;

1025:     PetscMalloc1(nvars*size,&z);
1026:     PetscStackCallStandard(HYPRE_SStructVectorSetConstantValues,(mx->ss_b,0.0));
1027:     VecGetArray(x,&xx);

1029:     /* transform nodal to hypre's variable ordering for sys_pfmg */
1030:     for (i= 0; i< size; i++) {
1031:       k= i*nvars;
1032:       for (j= 0; j< nvars; j++) z[j*size+i]= xx[k+j];
1033:     }
1034:     for (i= 0; i< nvars; i++) {
1035:       PetscStackCallStandard(HYPRE_SStructVectorSetBoxValues,(mx->ss_b,part,(HYPRE_Int *)ilower,(HYPRE_Int *)iupper,i,z+(size*i)));
1036:     }
1037:     VecRestoreArray(x,&xx);
1038:     PetscStackCallStandard(HYPRE_SStructVectorAssemble,(mx->ss_b));
1039:     PetscStackCallStandard(HYPRE_SStructMatrixMatvec,(1.0,mx->ss_mat,mx->ss_b,0.0,mx->ss_x));

1041:     /* copy solution values back to PETSc */
1042:     VecGetArray(y,&yy);
1043:     for (i= 0; i< nvars; i++) {
1044:       PetscStackCallStandard(HYPRE_SStructVectorGetBoxValues,(mx->ss_x,part,(HYPRE_Int *)ilower,(HYPRE_Int *)iupper,i,z+(size*i)));
1045:     }
1046:     /* transform hypre's variable ordering for sys_pfmg to nodal ordering */
1047:     for (i= 0; i< size; i++) {
1048:       k= i*nvars;
1049:       for (j= 0; j< nvars; j++) yy[k+j]= z[j*size+i];
1050:     }
1051:     VecRestoreArray(y,&yy);
1052:     PetscFree(z);
1053:   }
1054:   return(0);
1055: }

1059: PetscErrorCode MatAssemblyEnd_HYPRESStruct(Mat mat,MatAssemblyType mode)
1060: {
1061:   Mat_HYPRESStruct *ex = (Mat_HYPRESStruct*) mat->data;
1062:   PetscErrorCode   ierr;

1065:   PetscStackCallStandard(HYPRE_SStructMatrixAssemble,(ex->ss_mat));
1066:   return(0);
1067: }

1071: PetscErrorCode MatZeroEntries_HYPRESStruct(Mat mat)
1072: {
1074:   /* before the DMDA is set to the matrix the zero doesn't need to do anything */
1075:   return(0);
1076: }


1081: PetscErrorCode MatDestroy_HYPRESStruct(Mat mat)
1082: {
1083:   Mat_HYPRESStruct *ex = (Mat_HYPRESStruct*) mat->data;
1084:   PetscErrorCode   ierr;

1087:   PetscStackCallStandard(HYPRE_SStructGraphDestroy,(ex->ss_graph));
1088:   PetscStackCallStandard(HYPRE_SStructMatrixDestroy,(ex->ss_mat));
1089:   PetscStackCallStandard(HYPRE_SStructVectorDestroy,(ex->ss_x));
1090:   PetscStackCallStandard(HYPRE_SStructVectorDestroy,(ex->ss_b));
1091:   return(0);
1092: }

1096: PETSC_EXTERN PetscErrorCode MatCreate_HYPRESStruct(Mat B)
1097: {
1098:   Mat_HYPRESStruct *ex;
1099:   PetscErrorCode   ierr;

1102:   PetscNewLog(B,&ex);
1103:   B->data      = (void*)ex;
1104:   B->rmap->bs  = 1;
1105:   B->assembled = PETSC_FALSE;

1107:   B->insertmode = NOT_SET_VALUES;

1109:   B->ops->assemblyend = MatAssemblyEnd_HYPRESStruct;
1110:   B->ops->mult        = MatMult_HYPRESStruct;
1111:   B->ops->zeroentries = MatZeroEntries_HYPRESStruct;
1112:   B->ops->destroy     = MatDestroy_HYPRESStruct;

1114:   ex->needsinitialization = PETSC_TRUE;

1116:   MPI_Comm_dup(PetscObjectComm((PetscObject)B),&(ex->hcomm));
1117:   PetscObjectComposeFunction((PetscObject)B,"MatSetupDM_C",MatSetupDM_HYPRESStruct);
1118:   PetscObjectChangeTypeName((PetscObject)B,MATHYPRESSTRUCT);
1119:   return(0);
1120: }