Actual source code: mhyp.c
petsc-3.3-p7 2013-05-11
2: /*
3: Creates hypre ijmatrix from PETSc matrix
4: */
5: #include <petscsys.h>
6: #include <petsc-private/matimpl.h> /*I "petscmat.h" I*/
7: #include <petscdmda.h> /*I "petscdmda.h" I*/
8: #include <../src/dm/impls/da/hypre/mhyp.h>
12: PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o,HYPRE_IJMatrix ij)
13: {
15: PetscInt i;
16: PetscInt n_d,*ia_d,n_o,*ia_o;
17: PetscBool done_d=PETSC_FALSE,done_o=PETSC_FALSE;
18: PetscInt *nnz_d=PETSC_NULL,*nnz_o=PETSC_NULL;
19:
21: if (A_d) { /* determine number of nonzero entries in local diagonal part */
22: MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,PETSC_NULL,&done_d);
23: if (done_d) {
24: PetscMalloc(n_d*sizeof(PetscInt),&nnz_d);
25: for (i=0; i<n_d; i++) {
26: nnz_d[i] = ia_d[i+1] - ia_d[i];
27: }
28: }
29: MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,PETSC_NULL,&done_d);
30: }
31: if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
32: MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,PETSC_NULL,&done_o);
33: if (done_o) {
34: PetscMalloc(n_o*sizeof(PetscInt),&nnz_o);
35: for (i=0; i<n_o; i++) {
36: nnz_o[i] = ia_o[i+1] - ia_o[i];
37: }
38: }
39: MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,PETSC_NULL,&done_o);
40: }
41: if (done_d) { /* set number of nonzeros in HYPRE IJ matrix */
42: if (!done_o) { /* only diagonal part */
43: PetscMalloc(n_d*sizeof(PetscInt),&nnz_o);
44: for (i=0; i<n_d; i++) {
45: nnz_o[i] = 0;
46: }
47: }
48: PetscStackCallHypre(0,HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o));
49: PetscFree(nnz_d);
50: PetscFree(nnz_o);
51: }
52: return(0);
53: }
57: PetscErrorCode MatHYPRE_IJMatrixCreate(Mat A,HYPRE_IJMatrix *ij)
58: {
60: PetscInt rstart,rend,cstart,cend;
61:
66: MatSetUp(A);
67: rstart = A->rmap->rstart;
68: rend = A->rmap->rend;
69: cstart = A->cmap->rstart;
70: cend = A->cmap->rend;
71: PetscStackCallHypre(0,HYPRE_IJMatrixCreate,(((PetscObject)A)->comm,rstart,rend-1,cstart,cend-1,ij));
72: PetscStackCallHypre(0,HYPRE_IJMatrixSetObjectType,(*ij,HYPRE_PARCSR));
73: {
74: PetscBool same;
75: Mat A_d,A_o;
76: PetscInt *colmap;
77: PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&same);
78: if (same) {
79: MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);
80: MatHYPRE_IJMatrixPreallocate(A_d,A_o,*ij);
81: return(0);
82: }
83: PetscObjectTypeCompare((PetscObject)A,MATMPIBAIJ,&same);
84: if (same) {
85: MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);
86: MatHYPRE_IJMatrixPreallocate(A_d,A_o,*ij);
87: return(0);
88: }
89: PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&same);
90: if (same) {
91: MatHYPRE_IJMatrixPreallocate(A,PETSC_NULL,*ij);
92: return(0);
93: }
94: PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&same);
95: if (same) {
96: MatHYPRE_IJMatrixPreallocate(A,PETSC_NULL,*ij);
97: return(0);
98: }
99: }
100: return(0);
101: }
103: extern PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix);
104: extern PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix);
105: /*
106: Copies the data over (column indices, numerical values) to hypre matrix
107: */
111: PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A,HYPRE_IJMatrix ij)
112: {
113: PetscErrorCode ierr;
114: PetscInt i,rstart,rend,ncols;
115: const PetscScalar *values;
116: const PetscInt *cols;
117: PetscBool flg;
120: PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);
121: if (flg) {
122: MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);
123: return(0);
124: }
125: PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);
126: if (flg) {
127: MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);
128: return(0);
129: }
131: PetscLogEventBegin(MAT_Convert,A,0,0,0);
132: PetscStackCallHypre(0,HYPRE_IJMatrixInitialize,(ij));
133: MatGetOwnershipRange(A,&rstart,&rend);
134: for (i=rstart; i<rend; i++) {
135: MatGetRow(A,i,&ncols,&cols,&values);
136: PetscStackCallHypre(0,HYPRE_IJMatrixSetValues,(ij,1,&ncols,&i,cols,values));
137: MatRestoreRow(A,i,&ncols,&cols,&values);
138: }
139: PetscStackCallHypre(0,HYPRE_IJMatrixAssemble,(ij));
140: PetscLogEventEnd(MAT_Convert,A,0,0,0);
141: return(0);
142: }
144: /*
145: This copies the CSR format directly from the PETSc data structure to the hypre
146: data structure without calls to MatGetRow() or hypre's set values.
148: */
149: #include <_hypre_IJ_mv.h>
150: #include <HYPRE_IJ_mv.h>
151: #include <../src/mat/impls/aij/mpi/mpiaij.h>
155: PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A,HYPRE_IJMatrix ij)
156: {
157: PetscErrorCode ierr;
158: Mat_SeqAIJ *pdiag = (Mat_SeqAIJ*)A->data;
160: hypre_ParCSRMatrix *par_matrix;
161: hypre_AuxParCSRMatrix *aux_matrix;
162: hypre_CSRMatrix *hdiag;
169: PetscLogEventBegin(MAT_Convert,A,0,0,0);
170: PetscStackCallHypre(0,HYPRE_IJMatrixInitialize,(ij));
171: par_matrix = (hypre_ParCSRMatrix*)hypre_IJMatrixObject(ij);
172: aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
173: hdiag = hypre_ParCSRMatrixDiag(par_matrix);
175: /*
176: this is the Hack part where we monkey directly with the hypre datastructures
177: */
178: PetscMemcpy(hdiag->i,pdiag->i,(A->rmap->n + 1)*sizeof(PetscInt));
179: PetscMemcpy(hdiag->j,pdiag->j,pdiag->nz*sizeof(PetscInt));
180: PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));
182: hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
183: PetscStackCallHypre(0,HYPRE_IJMatrixAssemble,(ij));
184: PetscLogEventEnd(MAT_Convert,A,0,0,0);
185: return(0);
186: }
190: PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A,HYPRE_IJMatrix ij)
191: {
192: PetscErrorCode ierr;
193: Mat_MPIAIJ *pA = (Mat_MPIAIJ*)A->data;
194: Mat_SeqAIJ *pdiag,*poffd;
195: PetscInt i,*garray = pA->garray,*jj,cstart,*pjj;
197: hypre_ParCSRMatrix *par_matrix;
198: hypre_AuxParCSRMatrix *aux_matrix;
199: hypre_CSRMatrix *hdiag,*hoffd;
205: pdiag = (Mat_SeqAIJ*) pA->A->data;
206: poffd = (Mat_SeqAIJ*) pA->B->data;
207: /* cstart is only valid for square MPIAIJ layed out in the usual way */
208: MatGetOwnershipRange(A,&cstart,PETSC_NULL);
210: PetscLogEventBegin(MAT_Convert,A,0,0,0);
211: PetscStackCallHypre(0,HYPRE_IJMatrixInitialize,(ij));
212: par_matrix = (hypre_ParCSRMatrix*)hypre_IJMatrixObject(ij);
213: aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
214: hdiag = hypre_ParCSRMatrixDiag(par_matrix);
215: hoffd = hypre_ParCSRMatrixOffd(par_matrix);
217: /*
218: this is the Hack part where we monkey directly with the hypre datastructures
219: */
220: PetscMemcpy(hdiag->i,pdiag->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
221: /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */
222: jj = hdiag->j;
223: pjj = pdiag->j;
224: for (i=0; i<pdiag->nz; i++) {
225: jj[i] = cstart + pjj[i];
226: }
227: PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));
228: PetscMemcpy(hoffd->i,poffd->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
229: /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this
230: If we hacked a hypre a bit more we might be able to avoid this step */
231: jj = hoffd->j;
232: pjj = poffd->j;
233: for (i=0; i<poffd->nz; i++) {
234: jj[i] = garray[pjj[i]];
235: }
236: PetscMemcpy(hoffd->data,poffd->a,poffd->nz*sizeof(PetscScalar));
238: hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
239: PetscStackCallHypre(0,HYPRE_IJMatrixAssemble,(ij));
240: PetscLogEventEnd(MAT_Convert,A,0,0,0);
241: return(0);
242: }
244: /*
245: Does NOT copy the data over, instead uses DIRECTLY the pointers from the PETSc MPIAIJ format
247: This is UNFINISHED and does NOT work! The problem is that hypre puts the diagonal entry first
248: which will corrupt the PETSc data structure if we did this. Need a work around to this problem.
249: */
250: #include <_hypre_IJ_mv.h>
251: #include <HYPRE_IJ_mv.h>
255: PetscErrorCode MatHYPRE_IJMatrixLink(Mat A,HYPRE_IJMatrix *ij)
256: {
257: PetscErrorCode ierr;
258: PetscInt rstart,rend,cstart,cend;
259: PetscBool flg;
260: hypre_AuxParCSRMatrix *aux_matrix;
266: PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);
267: if (!flg) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Can only use with PETSc MPIAIJ matrices");
268: MatSetUp(A);
270: PetscLogEventBegin(MAT_Convert,A,0,0,0);
271: rstart = A->rmap->rstart;
272: rend = A->rmap->rend;
273: cstart = A->cmap->rstart;
274: cend = A->cmap->rend;
275: PetscStackCallHypre(0,HYPRE_IJMatrixCreate,(((PetscObject)A)->comm,rstart,rend-1,cstart,cend-1,ij));
276: PetscStackCallHypre(0,HYPRE_IJMatrixSetObjectType,(*ij,HYPRE_PARCSR));
277:
278: PetscStackCallHypre(0,HYPRE_IJMatrixInitialize,(*ij));
279: aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(*ij);
281: hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
283: /* this is the Hack part where we monkey directly with the hypre datastructures */
285: PetscStackCallHypre(0,HYPRE_IJMatrixAssemble,(*ij));
286: PetscLogEventEnd(MAT_Convert,A,0,0,0);
287: return(0);
288: }
290: /* -----------------------------------------------------------------------------------------------------------------*/
292: /*MC
293: MATHYPRESTRUCT - MATHYPRESTRUCT = "hyprestruct" - A matrix type to be used for parallel sparse matrices
294: based on the hypre HYPRE_StructMatrix.
296: Level: intermediate
298: Notes: Unlike the more general support for blocks in hypre this allows only one block per process and requires the block
299: be defined by a DMDA.
301: The matrix needs a DMDA associated with it by either a call to MatSetDM() or if the matrix is obtained from DMCreateMatrix()
303: .seealso: MatCreate(), PCPFMG, MatSetDM(), DMCreateMatrix()
304: M*/
309: PetscErrorCode MatSetValuesLocal_HYPREStruct_3d(Mat mat,PetscInt nrow,const PetscInt irow[],PetscInt ncol,const PetscInt icol[],const PetscScalar y[],InsertMode addv)
310: {
311: PetscErrorCode ierr;
312: PetscInt i,j,stencil,index[3],row,entries[7];
313: const PetscScalar *values = y;
314: Mat_HYPREStruct *ex = (Mat_HYPREStruct*) mat->data;
317: for (i=0; i<nrow; i++) {
318: for (j=0; j<ncol; j++) {
319: stencil = icol[j] - irow[i];
320: if (!stencil) {
321: entries[j] = 3;
322: } else if (stencil == -1) {
323: entries[j] = 2;
324: } else if (stencil == 1) {
325: entries[j] = 4;
326: } else if (stencil == -ex->gnx) {
327: entries[j] = 1;
328: } else if (stencil == ex->gnx) {
329: entries[j] = 5;
330: } else if (stencil == -ex->gnxgny) {
331: entries[j] = 0;
332: } else if (stencil == ex->gnxgny) {
333: entries[j] = 6;
334: } else SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row %D local column %D have bad stencil %D",irow[i],icol[j],stencil);
335: }
336: row = ex->gindices[irow[i]] - ex->rstart;
337: index[0] = ex->xs + (row % ex->nx);
338: index[1] = ex->ys + ((row/ex->nx) % ex->ny);
339: index[2] = ex->zs + (row/(ex->nxny));
340: if (addv == ADD_VALUES) {
341: PetscStackCallHypre(0,HYPRE_StructMatrixAddToValues,(ex->hmat,index,ncol,entries,(PetscScalar*)values));
342: } else {
343: PetscStackCallHypre(0,HYPRE_StructMatrixSetValues,(ex->hmat,index,ncol,entries,(PetscScalar*)values));
344: }
345: values += ncol;
346: }
347: return(0);
348: }
352: PetscErrorCode MatZeroRowsLocal_HYPREStruct_3d(Mat mat,PetscInt nrow,const PetscInt irow[],PetscScalar d,Vec x,Vec b)
353: {
354: PetscErrorCode ierr;
355: PetscInt i,index[3],row,entries[7] = {0,1,2,3,4,5,6};
356: PetscScalar values[7];
357: Mat_HYPREStruct *ex = (Mat_HYPREStruct*) mat->data;
360: if (x && b) SETERRQ(((PetscObject)mat)->comm,PETSC_ERR_SUP,"No support");
361: PetscMemzero(values,7*sizeof(PetscScalar));
362: values[3] = d;
363: for (i=0; i<nrow; i++) {
364: row = ex->gindices[irow[i]] - ex->rstart;
365: index[0] = ex->xs + (row % ex->nx);
366: index[1] = ex->ys + ((row/ex->nx) % ex->ny);
367: index[2] = ex->zs + (row/(ex->nxny));
368: PetscStackCallHypre(0,HYPRE_StructMatrixSetValues,(ex->hmat,index,7,entries,values));
369: }
370: PetscStackCallHypre(0,HYPRE_StructMatrixAssemble,(ex->hmat));
371: return(0);
372: }
376: PetscErrorCode MatZeroEntries_HYPREStruct_3d(Mat mat)
377: {
379: PetscInt indices[7] = {0,1,2,3,4,5,6};
380: Mat_HYPREStruct *ex = (Mat_HYPREStruct*) mat->data;
383: /* hypre has no public interface to do this */
384: PetscStackCallHypre(0,hypre_StructMatrixClearBoxValues,(ex->hmat,&ex->hbox,7,indices,0,1));
385: PetscStackCallHypre(0,HYPRE_StructMatrixAssemble,(ex->hmat));
386: return(0);
387: }
391: PetscErrorCode MatSetDM_HYPREStruct(Mat mat,DM da)
392: {
393: PetscErrorCode ierr;
394: Mat_HYPREStruct *ex = (Mat_HYPREStruct*) mat->data;
395: PetscInt dim,dof,sw[3],nx,ny,nz,ilower[3],iupper[3],ssize,i;
396: DMDABoundaryType px,py,pz;
397: DMDAStencilType st;
400: ex->da = da;
401: PetscObjectReference((PetscObject)da);
403: DMDAGetInfo(ex->da,&dim,0,0,0,0,0,0,&dof,&sw[0],&px,&py,&pz,&st);
404: DMDAGetCorners(ex->da,&ilower[0],&ilower[1],&ilower[2],&iupper[0],&iupper[1],&iupper[2]);
405: iupper[0] += ilower[0] - 1;
406: iupper[1] += ilower[1] - 1;
407: iupper[2] += ilower[2] - 1;
409: /* the hypre_Box is used to zero out the matrix entries in MatZeroValues() */
410: ex->hbox.imin[0] = ilower[0];
411: ex->hbox.imin[1] = ilower[1];
412: ex->hbox.imin[2] = ilower[2];
413: ex->hbox.imax[0] = iupper[0];
414: ex->hbox.imax[1] = iupper[1];
415: ex->hbox.imax[2] = iupper[2];
417: /* create the hypre grid object and set its information */
418: if (dof > 1) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_SUP,"Currently only support for scalar problems");
419: if (px || py || pz) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_SUP,"Ask us to add periodic support by calling HYPRE_StructGridSetPeriodic()");
420: PetscStackCallHypre(0,HYPRE_StructGridCreate,(ex->hcomm,dim,&ex->hgrid));
421: PetscStackCallHypre(0,HYPRE_StructGridSetExtents,(ex->hgrid,ilower,iupper));
422: PetscStackCallHypre(0,HYPRE_StructGridAssemble,(ex->hgrid));
423:
424: sw[1] = sw[0];
425: sw[2] = sw[1];
426: PetscStackCallHypre(0,HYPRE_StructGridSetNumGhost,(ex->hgrid,sw));
428: /* create the hypre stencil object and set its information */
429: if (sw[0] > 1) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_SUP,"Ask us to add support for wider stencils");
430: if (st == DMDA_STENCIL_BOX) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_SUP,"Ask us to add support for box stencils");
431: if (dim == 1) {
432: PetscInt offsets[3][1] = {{-1},{0},{1}};
433: ssize = 3;
434: PetscStackCallHypre(0,HYPRE_StructStencilCreate,(dim,ssize,&ex->hstencil));
435: for (i=0; i<ssize; i++) {
436: PetscStackCallHypre(0,HYPRE_StructStencilSetElement,(ex->hstencil,i,offsets[i]));
437: }
438: } else if (dim == 2) {
439: PetscInt offsets[5][2] = {{0,-1},{-1,0},{0,0},{1,0},{0,1}};
440: ssize = 5;
441: PetscStackCallHypre(0,HYPRE_StructStencilCreate,(dim,ssize,&ex->hstencil));
442: for (i=0; i<ssize; i++) {
443: PetscStackCallHypre(0,HYPRE_StructStencilSetElement,(ex->hstencil,i,offsets[i]));
444: }
445: } else if (dim == 3) {
446: PetscInt offsets[7][3] = {{0,0,-1},{0,-1,0},{-1,0,0},{0,0,0},{1,0,0},{0,1,0},{0,0,1}};
447: ssize = 7;
448: PetscStackCallHypre(0,HYPRE_StructStencilCreate,(dim,ssize,&ex->hstencil));
449: for (i=0; i<ssize; i++) {
450: PetscStackCallHypre(0,HYPRE_StructStencilSetElement,(ex->hstencil,i,offsets[i]));
451: }
452: }
453:
454: /* create the HYPRE vector for rhs and solution */
455: PetscStackCallHypre(0,HYPRE_StructVectorCreate,(ex->hcomm,ex->hgrid,&ex->hb));
456: PetscStackCallHypre(0,HYPRE_StructVectorCreate,(ex->hcomm,ex->hgrid,&ex->hx));
457: PetscStackCallHypre(0,HYPRE_StructVectorInitialize,(ex->hb));
458: PetscStackCallHypre(0,HYPRE_StructVectorInitialize,(ex->hx));
459: PetscStackCallHypre(0,HYPRE_StructVectorAssemble,(ex->hb));
460: PetscStackCallHypre(0,HYPRE_StructVectorAssemble,(ex->hx));
462: /* create the hypre matrix object and set its information */
463: PetscStackCallHypre(0,HYPRE_StructMatrixCreate,(ex->hcomm,ex->hgrid,ex->hstencil,&ex->hmat));
464: PetscStackCallHypre(0,HYPRE_StructGridDestroy,(ex->hgrid));
465: PetscStackCallHypre(0,HYPRE_StructStencilDestroy,(ex->hstencil));
466: if (ex->needsinitialization) {
467: PetscStackCallHypre(0,HYPRE_StructMatrixInitialize,(ex->hmat));
468: ex->needsinitialization = PETSC_FALSE;
469: }
471: /* set the global and local sizes of the matrix */
472: DMDAGetCorners(da,0,0,0,&nx,&ny,&nz);
473: MatSetSizes(mat,dof*nx*ny*nz,dof*nx*ny*nz,PETSC_DECIDE,PETSC_DECIDE);
474: PetscLayoutSetBlockSize(mat->rmap,1);
475: PetscLayoutSetBlockSize(mat->cmap,1);
476: PetscLayoutSetUp(mat->rmap);
477: PetscLayoutSetUp(mat->cmap);
479: if (dim == 3) {
480: mat->ops->setvalueslocal = MatSetValuesLocal_HYPREStruct_3d;
481: mat->ops->zerorowslocal = MatZeroRowsLocal_HYPREStruct_3d;
482: mat->ops->zeroentries = MatZeroEntries_HYPREStruct_3d;
483: MatZeroEntries_HYPREStruct_3d(mat);
484: } else SETERRQ(((PetscObject)da)->comm,PETSC_ERR_SUP,"Only support for 3d DMDA currently");
486: /* get values that will be used repeatedly in MatSetValuesLocal() and MatZeroRowsLocal() repeatedly */
487: MatGetOwnershipRange(mat,&ex->rstart,PETSC_NULL);
488: DMDAGetGlobalIndices(ex->da,PETSC_NULL,&ex->gindices);
489: DMDAGetGhostCorners(ex->da,0,0,0,&ex->gnx,&ex->gnxgny,0);
490: ex->gnxgny *= ex->gnx;
491: DMDAGetCorners(ex->da,&ex->xs,&ex->ys,&ex->zs,&ex->nx,&ex->ny,0);
492: ex->nxny = ex->nx*ex->ny;
493: return(0);
494: }
498: PetscErrorCode MatMult_HYPREStruct(Mat A,Vec x,Vec y)
499: {
500: PetscErrorCode ierr;
501: PetscScalar *xx,*yy;
502: PetscInt ilower[3],iupper[3];
503: Mat_HYPREStruct *mx = (Mat_HYPREStruct *)(A->data);
506: DMDAGetCorners(mx->da,&ilower[0],&ilower[1],&ilower[2],&iupper[0],&iupper[1],&iupper[2]);
507: iupper[0] += ilower[0] - 1;
508: iupper[1] += ilower[1] - 1;
509: iupper[2] += ilower[2] - 1;
511: /* copy x values over to hypre */
512: PetscStackCallHypre(0,HYPRE_StructVectorSetConstantValues,(mx->hb,0.0));
513: VecGetArray(x,&xx);
514: PetscStackCallHypre(0,HYPRE_StructVectorSetBoxValues,(mx->hb,ilower,iupper,xx));
515: VecRestoreArray(x,&xx);
516: PetscStackCallHypre(0,HYPRE_StructVectorAssemble,(mx->hb));
517: PetscStackCallHypre(0,HYPRE_StructMatrixMatvec,(1.0,mx->hmat,mx->hb,0.0,mx->hx));
519: /* copy solution values back to PETSc */
520: VecGetArray(y,&yy);
521: PetscStackCallHypre(0,HYPRE_StructVectorGetBoxValues,(mx->hx,ilower,iupper,yy));
522: VecRestoreArray(y,&yy);
523: return(0);
524: }
528: PetscErrorCode MatAssemblyEnd_HYPREStruct(Mat mat,MatAssemblyType mode)
529: {
530: Mat_HYPREStruct *ex = (Mat_HYPREStruct*) mat->data;
531: PetscErrorCode ierr;
534: PetscStackCallHypre(0,HYPRE_StructMatrixAssemble,(ex->hmat));
535: /* PetscStackCallHypre(0,HYPRE_StructMatrixPrint,("dummy",ex->hmat,0)); */
536: return(0);
537: }
541: PetscErrorCode MatZeroEntries_HYPREStruct(Mat mat)
542: {
544: /* before the DMDA is set to the matrix the zero doesn't need to do anything */
545: return(0);
546: }
551: PetscErrorCode MatDestroy_HYPREStruct(Mat mat)
552: {
553: Mat_HYPREStruct *ex = (Mat_HYPREStruct*) mat->data;
554: PetscErrorCode ierr;
557: PetscStackCallHypre(0,HYPRE_StructMatrixDestroy,(ex->hmat));
558: PetscStackCallHypre(0,HYPRE_StructVectorDestroy,(ex->hx));
559: PetscStackCallHypre(0,HYPRE_StructVectorDestroy,(ex->hb));
560: return(0);
561: }
564: EXTERN_C_BEGIN
567: PetscErrorCode MatCreate_HYPREStruct(Mat B)
568: {
569: Mat_HYPREStruct *ex;
570: PetscErrorCode ierr;
573: PetscNewLog(B,Mat_HYPREStruct,&ex);
574: B->data = (void*)ex;
575: B->rmap->bs = 1;
576: B->assembled = PETSC_FALSE;
578: B->insertmode = NOT_SET_VALUES;
580: B->ops->assemblyend = MatAssemblyEnd_HYPREStruct;
581: B->ops->mult = MatMult_HYPREStruct;
582: B->ops->zeroentries = MatZeroEntries_HYPREStruct;
583: B->ops->destroy = MatDestroy_HYPREStruct;
585: ex->needsinitialization = PETSC_TRUE;
587: MPI_Comm_dup(((PetscObject)B)->comm,&(ex->hcomm));
588: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatSetDM_C","MatSetDM_HYPREStruct",MatSetDM_HYPREStruct);
589: PetscObjectChangeTypeName((PetscObject)B,MATHYPRESTRUCT);
590: return(0);
591: }
592: EXTERN_C_END
595: /*MC
596: MATHYPRESSTRUCT - MATHYPRESSTRUCT = "hypresstruct" - A matrix type to be used for parallel sparse matrices
597: based on the hypre HYPRE_SStructMatrix.
598:
600: Level: intermediate
601:
602: Notes: Unlike hypre's general semi-struct object consisting of a collection of structured-grid objects and unstructured
603: grid objects, we will restrict the semi-struct objects to consist of only structured-grid components.
605: Unlike the more general support for parts and blocks in hypre this allows only one part, and one block per process and requires the block
606: be defined by a DMDA.
607:
608: The matrix needs a DMDA associated with it by either a call to MatSetDM() or if the matrix is obtained from DMCreateMatrix()
609:
610: M*/
614: PetscErrorCode MatSetValuesLocal_HYPRESStruct_3d(Mat mat,PetscInt nrow,const PetscInt irow[],PetscInt ncol,const PetscInt icol[],const PetscScalar y[],InsertMode addv)
615: {
616: PetscErrorCode ierr;
617: PetscInt i,j,stencil,index[3];
618: const PetscScalar *values = y;
619: Mat_HYPRESStruct *ex = (Mat_HYPRESStruct*) mat->data;
621: PetscInt part= 0; /* Petsc sstruct interface only allows 1 part */
622: PetscInt ordering;
623: PetscInt grid_rank, to_grid_rank;
624: PetscInt var_type, to_var_type;
625: PetscInt to_var_entry = 0;
627: PetscInt nvars= ex->nvars;
628: PetscInt row,*entries;
631: PetscMalloc(7*nvars*sizeof(PetscInt),&entries);
632: ordering= ex-> dofs_order; /* ordering= 0 nodal ordering
633: 1 variable ordering */
634: /* stencil entries are orderer by variables: var0_stencil0, var0_stencil1, ..., var0_stencil6, var1_stencil0, var1_stencil1, ... */
636: if (!ordering) {
637: for (i=0; i<nrow; i++) {
638: grid_rank= irow[i]/nvars;
639: var_type = (irow[i] % nvars);
641: for (j=0; j<ncol; j++) {
642: to_grid_rank= icol[j]/nvars;
643: to_var_type = (icol[j] % nvars);
645: to_var_entry= to_var_entry*7;
646: entries[j]= to_var_entry;
648: stencil = to_grid_rank-grid_rank;
649: if (!stencil) {
650: entries[j] += 3;
651: } else if (stencil == -1) {
652: entries[j] += 2;
653: } else if (stencil == 1) {
654: entries[j] += 4;
655: } else if (stencil == -ex->gnx) {
656: entries[j] += 1;
657: } else if (stencil == ex->gnx) {
658: entries[j] += 5;
659: } else if (stencil == -ex->gnxgny) {
660: entries[j] += 0;
661: } else if (stencil == ex->gnxgny) {
662: entries[j] += 6;
663: } else SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row %D local column %D have bad stencil %D",irow[i],icol[j],stencil);
664: }
666: row = ex->gindices[grid_rank] - ex->rstart;
667: index[0] = ex->xs + (row % ex->nx);
668: index[1] = ex->ys + ((row/ex->nx) % ex->ny);
669: index[2] = ex->zs + (row/(ex->nxny));
671: if (addv == ADD_VALUES) {
672: PetscStackCallHypre(0,HYPRE_SStructMatrixAddToValues,(ex->ss_mat,part,index,var_type,ncol,entries,(PetscScalar*)values));
673: } else {
674: PetscStackCallHypre(0,HYPRE_SStructMatrixSetValues,(ex->ss_mat,part,index,var_type,ncol,entries,(PetscScalar*)values));
675: }
676: values += ncol;
677: }
678: } else {
679: for (i=0; i<nrow; i++) {
680: var_type = irow[i]/(ex->gnxgnygnz);
681: grid_rank= irow[i] - var_type*(ex->gnxgnygnz);
683: for (j=0; j<ncol; j++) {
684: to_var_type = icol[j]/(ex->gnxgnygnz);
685: to_grid_rank= icol[j] - to_var_type*(ex->gnxgnygnz);
687: to_var_entry= to_var_entry*7;
688: entries[j]= to_var_entry;
690: stencil = to_grid_rank-grid_rank;
691: if (!stencil) {
692: entries[j] += 3;
693: } else if (stencil == -1) {
694: entries[j] += 2;
695: } else if (stencil == 1) {
696: entries[j] += 4;
697: } else if (stencil == -ex->gnx) {
698: entries[j] += 1;
699: } else if (stencil == ex->gnx) {
700: entries[j] += 5;
701: } else if (stencil == -ex->gnxgny) {
702: entries[j] += 0;
703: } else if (stencil == ex->gnxgny) {
704: entries[j] += 6;
705: } else SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row %D local column %D have bad stencil %D",irow[i],icol[j],stencil);
706: }
708: row = ex->gindices[grid_rank] - ex->rstart;
709: index[0] = ex->xs + (row % ex->nx);
710: index[1] = ex->ys + ((row/ex->nx) % ex->ny);
711: index[2] = ex->zs + (row/(ex->nxny));
713: if (addv == ADD_VALUES) {
714: PetscStackCallHypre(0,HYPRE_SStructMatrixAddToValues,(ex->ss_mat,part,index,var_type,ncol,entries,(PetscScalar*)values));
715: } else {
716: PetscStackCallHypre(0,HYPRE_SStructMatrixSetValues,(ex->ss_mat,part,index,var_type,ncol,entries,(PetscScalar*)values));
717: }
718: values += ncol;
719: }
720: }
721: PetscFree(entries);
722: return(0);
723: }
727: PetscErrorCode MatZeroRowsLocal_HYPRESStruct_3d(Mat mat,PetscInt nrow,const PetscInt irow[],PetscScalar d,Vec x,Vec b)
728: {
729: PetscErrorCode ierr;
730: PetscInt i,index[3];
731: PetscScalar **values;
732: Mat_HYPRESStruct *ex = (Mat_HYPRESStruct*) mat->data;
734: PetscInt part= 0; /* Petsc sstruct interface only allows 1 part */
735: PetscInt ordering= ex->dofs_order;
736: PetscInt grid_rank;
737: PetscInt var_type;
738: PetscInt nvars= ex->nvars;
739: PetscInt row,*entries;
742: if (x && b) SETERRQ(((PetscObject)mat)->comm,PETSC_ERR_SUP,"No support");
743: PetscMalloc(7*nvars*sizeof(PetscInt),&entries);
745: PetscMalloc(nvars*sizeof(PetscScalar *),&values);
746: PetscMalloc(7*nvars*nvars*sizeof(PetscScalar),&values[0]);
747: for (i=1; i<nvars; i++) {
748: values[i] = values[i-1] + nvars*7;
749: }
751: for (i=0; i< nvars; i++) {
752: PetscMemzero(values[i],nvars*7*sizeof(PetscScalar));
753: *(values[i]+3)= d;
754: }
756: for (i= 0; i< nvars*7; i++) {
757: entries[i]= i;
758: }
760: if (!ordering) {
761: for (i=0; i<nrow; i++) {
762: grid_rank= irow[i]/nvars;
763: var_type = (irow[i] % nvars);
765: row = ex->gindices[grid_rank] - ex->rstart;
766: index[0] = ex->xs + (row % ex->nx);
767: index[1] = ex->ys + ((row/ex->nx) % ex->ny);
768: index[2] = ex->zs + (row/(ex->nxny));
769: PetscStackCallHypre(0,HYPRE_SStructMatrixSetValues,(ex->ss_mat,part,index,var_type,7*nvars,entries,values[var_type]));
770: }
771: } else {
772: for (i=0; i<nrow; i++) {
773: var_type = irow[i]/(ex->gnxgnygnz);
774: grid_rank= irow[i] - var_type*(ex->gnxgnygnz);
776: row = ex->gindices[grid_rank] - ex->rstart;
777: index[0] = ex->xs + (row % ex->nx);
778: index[1] = ex->ys + ((row/ex->nx) % ex->ny);
779: index[2] = ex->zs + (row/(ex->nxny));
780: PetscStackCallHypre(0,HYPRE_SStructMatrixSetValues,(ex->ss_mat,part,index,var_type,7*nvars,entries,values[var_type]));
781: }
782: }
783: PetscStackCallHypre(0,HYPRE_SStructMatrixAssemble,(ex->ss_mat));
784: PetscFree(values[0]);
785: PetscFree(values);
786: PetscFree(entries);
787: return(0);
788: }
792: PetscErrorCode MatZeroEntries_HYPRESStruct_3d(Mat mat)
793: {
794: PetscErrorCode ierr;
795: Mat_HYPRESStruct *ex = (Mat_HYPRESStruct*) mat->data;
796: PetscInt nvars= ex->nvars;
797: PetscInt size;
798: PetscInt part= 0; /* only one part */
801: size = ((ex->hbox.imax[0])-(ex->hbox.imin[0])+1)*((ex->hbox.imax[1])-(ex->hbox.imin[1])+1)*((ex->hbox.imax[2])-(ex->hbox.imin[2])+1);
802: {
803: PetscInt i,*entries,iupper[3],ilower[3];
804: PetscScalar *values;
805:
806:
807: for (i= 0; i< 3; i++) {
808: ilower[i]= ex->hbox.imin[i];
809: iupper[i]= ex->hbox.imax[i];
810: }
812: PetscMalloc2(nvars*7,PetscInt,&entries,nvars*7*size,PetscScalar,&values);
813: for (i= 0; i< nvars*7; i++) {
814: entries[i]= i;
815: }
816: PetscMemzero(values,nvars*7*size*sizeof(PetscScalar));
818: for (i= 0; i< nvars; i++) {
819: PetscStackCallHypre(0,HYPRE_SStructMatrixSetBoxValues,(ex->ss_mat,part,ilower,iupper,i,nvars*7,entries,values));
820: }
821: PetscFree2(entries,values);
822: }
823: PetscStackCallHypre(0,HYPRE_SStructMatrixAssemble,(ex->ss_mat));
824: return(0);
825: }
830: PetscErrorCode MatSetDM_HYPRESStruct(Mat mat,DM da)
831: {
832: PetscErrorCode ierr;
833: Mat_HYPRESStruct *ex = (Mat_HYPRESStruct*) mat->data;
834: PetscInt dim,dof,sw[3],nx,ny,nz;
835: PetscInt ilower[3],iupper[3],ssize,i;
836: DMDABoundaryType px,py,pz;
837: DMDAStencilType st;
838: PetscInt nparts= 1; /* assuming only one part */
839: PetscInt part = 0;
842: ex->da = da;
843: PetscObjectReference((PetscObject)da);
845: DMDAGetInfo(ex->da,&dim,0,0,0,0,0,0,&dof,&sw[0],&px,&py,&pz,&st);
846: DMDAGetCorners(ex->da,&ilower[0],&ilower[1],&ilower[2],&iupper[0],&iupper[1],&iupper[2]);
847: iupper[0] += ilower[0] - 1;
848: iupper[1] += ilower[1] - 1;
849: iupper[2] += ilower[2] - 1;
850: /* the hypre_Box is used to zero out the matrix entries in MatZeroValues() */
851: ex->hbox.imin[0] = ilower[0];
852: ex->hbox.imin[1] = ilower[1];
853: ex->hbox.imin[2] = ilower[2];
854: ex->hbox.imax[0] = iupper[0];
855: ex->hbox.imax[1] = iupper[1];
856: ex->hbox.imax[2] = iupper[2];
858: ex->dofs_order = 0;
860: /* assuming that the same number of dofs on each gridpoint. Also assume all cell-centred based */
861: ex->nvars= dof;
863: /* create the hypre grid object and set its information */
864: if (px || py || pz) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_SUP,"Ask us to add periodic support by calling HYPRE_SStructGridSetPeriodic()");
865: PetscStackCallHypre(0,HYPRE_SStructGridCreate,(ex->hcomm,dim,nparts,&ex->ss_grid));
867: PetscStackCallHypre(0,HYPRE_SStructGridSetExtents,(ex->ss_grid,part,ex->hbox.imin,ex->hbox.imax));
869: {
870: HYPRE_SStructVariable *vartypes;
871: PetscMalloc(ex->nvars*sizeof(HYPRE_SStructVariable),&vartypes);
872: for (i= 0; i< ex->nvars; i++) {
873: vartypes[i]= HYPRE_SSTRUCT_VARIABLE_CELL;
874: }
875: PetscStackCallHypre(0,HYPRE_SStructGridSetVariables,(ex->ss_grid, part, ex->nvars,vartypes));
876: PetscFree(vartypes);
877: }
878: PetscStackCallHypre(0,HYPRE_SStructGridAssemble,(ex->ss_grid));
880: sw[1] = sw[0];
881: sw[2] = sw[1];
882: /* PetscStackCallHypre(0,HYPRE_SStructGridSetNumGhost,(ex->ss_grid,sw)); */
884: /* create the hypre stencil object and set its information */
885: if (sw[0] > 1) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_SUP,"Ask us to add support for wider stencils");
886: if (st == DMDA_STENCIL_BOX) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_SUP,"Ask us to add support for box stencils");
888: if (dim == 1) {
889: PetscInt offsets[3][1] = {{-1},{0},{1}};
890: PetscInt j, cnt;
892: ssize = 3*(ex->nvars);
893: PetscStackCallHypre(0,HYPRE_SStructStencilCreate,(dim,ssize,&ex->ss_stencil));
894: cnt= 0;
895: for (i= 0; i< (ex->nvars); i++) {
896: for (j= 0; j< 3; j++) {
897: PetscStackCallHypre(0,HYPRE_SStructStencilSetEntry,(ex->ss_stencil, cnt, offsets[j], i));
898: cnt++;
899: }
900: }
901: } else if (dim == 2) {
902: PetscInt offsets[5][2] = {{0,-1},{-1,0},{0,0},{1,0},{0,1}};
903: PetscInt j, cnt;
905: ssize = 5*(ex->nvars);
906: PetscStackCallHypre(0,HYPRE_SStructStencilCreate,(dim,ssize,&ex->ss_stencil));
907: cnt= 0;
908: for (i= 0; i< (ex->nvars); i++) {
909: for (j= 0; j< 5; j++) {
910: PetscStackCallHypre(0,HYPRE_SStructStencilSetEntry,(ex->ss_stencil, cnt, offsets[j], i));
911: cnt++;
912: }
913: }
914: } else if (dim == 3) {
915: PetscInt offsets[7][3] = {{0,0,-1},{0,-1,0},{-1,0,0},{0,0,0},{1,0,0},{0,1,0},{0,0,1}};
916: PetscInt j, cnt;
918: ssize = 7*(ex->nvars);
919: PetscStackCallHypre(0,HYPRE_SStructStencilCreate,(dim,ssize,&ex->ss_stencil));
920: cnt= 0;
921: for (i= 0; i< (ex->nvars); i++) {
922: for (j= 0; j< 7; j++) {
923: PetscStackCallHypre(0,HYPRE_SStructStencilSetEntry,(ex->ss_stencil, cnt, offsets[j], i));
924: cnt++;
925: }
926: }
927: }
929: /* create the HYPRE graph */
930: PetscStackCallHypre(0,HYPRE_SStructGraphCreate,(ex->hcomm, ex->ss_grid, &(ex->ss_graph)));
932: /* set the stencil graph. Note that each variable has the same graph. This means that each
933: variable couples to all the other variable and with the same stencil pattern. */
934: for (i= 0; i< (ex->nvars); i++) {
935: PetscStackCallHypre(0,HYPRE_SStructGraphSetStencil,(ex->ss_graph,part,i,ex->ss_stencil));
936: }
937: PetscStackCallHypre(0,HYPRE_SStructGraphAssemble,(ex->ss_graph));
939: /* create the HYPRE sstruct vectors for rhs and solution */
940: PetscStackCallHypre(0,HYPRE_SStructVectorCreate,(ex->hcomm,ex->ss_grid,&ex->ss_b));
941: PetscStackCallHypre(0,HYPRE_SStructVectorCreate,(ex->hcomm,ex->ss_grid,&ex->ss_x));
942: PetscStackCallHypre(0,HYPRE_SStructVectorInitialize,(ex->ss_b));
943: PetscStackCallHypre(0,HYPRE_SStructVectorInitialize,(ex->ss_x));
944: PetscStackCallHypre(0,HYPRE_SStructVectorAssemble,(ex->ss_b));
945: PetscStackCallHypre(0,HYPRE_SStructVectorAssemble,(ex->ss_x));
947: /* create the hypre matrix object and set its information */
948: PetscStackCallHypre(0,HYPRE_SStructMatrixCreate,(ex->hcomm,ex->ss_graph,&ex->ss_mat));
949: PetscStackCallHypre(0,HYPRE_SStructGridDestroy,(ex->ss_grid));
950: PetscStackCallHypre(0,HYPRE_SStructStencilDestroy,(ex->ss_stencil));
951: if (ex->needsinitialization) {
952: PetscStackCallHypre(0,HYPRE_SStructMatrixInitialize,(ex->ss_mat));
953: ex->needsinitialization = PETSC_FALSE;
954: }
956: /* set the global and local sizes of the matrix */
957: DMDAGetCorners(da,0,0,0,&nx,&ny,&nz);
958: MatSetSizes(mat,dof*nx*ny*nz,dof*nx*ny*nz,PETSC_DECIDE,PETSC_DECIDE);
959: PetscLayoutSetBlockSize(mat->rmap,1);
960: PetscLayoutSetBlockSize(mat->cmap,1);
961: PetscLayoutSetUp(mat->rmap);
962: PetscLayoutSetUp(mat->cmap);
963:
964: if (dim == 3) {
965: mat->ops->setvalueslocal = MatSetValuesLocal_HYPRESStruct_3d;
966: mat->ops->zerorowslocal = MatZeroRowsLocal_HYPRESStruct_3d;
967: mat->ops->zeroentries = MatZeroEntries_HYPRESStruct_3d;
968: MatZeroEntries_HYPRESStruct_3d(mat);
969: } else SETERRQ(((PetscObject)da)->comm,PETSC_ERR_SUP,"Only support for 3d DMDA currently");
970:
971: /* get values that will be used repeatedly in MatSetValuesLocal() and MatZeroRowsLocal() repeatedly */
972: MatGetOwnershipRange(mat,&ex->rstart,PETSC_NULL);
973: DMDAGetGlobalIndices(ex->da,PETSC_NULL,&ex->gindices);
974: DMDAGetGhostCorners(ex->da,0,0,0,&ex->gnx,&ex->gnxgny,&ex->gnxgnygnz);
975: ex->gnxgny *= ex->gnx;
976: ex->gnxgnygnz *= ex->gnxgny;
977: DMDAGetCorners(ex->da,&ex->xs,&ex->ys,&ex->zs,&ex->nx,&ex->ny,&ex->nz);
978: ex->nxny = ex->nx*ex->ny;
979: ex->nxnynz = ex->nz*ex->nxny;
980: return(0);
981: }
982:
985: PetscErrorCode MatMult_HYPRESStruct(Mat A,Vec x,Vec y)
986: {
987: PetscErrorCode ierr;
988: PetscScalar *xx,*yy;
989: PetscInt ilower[3],iupper[3];
990: Mat_HYPRESStruct *mx = (Mat_HYPRESStruct *)(A->data);
991: PetscInt ordering= mx->dofs_order;
992: PetscInt nvars= mx->nvars;
993: PetscInt part= 0;
994: PetscInt size;
995: PetscInt i;
996:
998: DMDAGetCorners(mx->da,&ilower[0],&ilower[1],&ilower[2],&iupper[0],&iupper[1],&iupper[2]);
999: iupper[0] += ilower[0] - 1;
1000: iupper[1] += ilower[1] - 1;
1001: iupper[2] += ilower[2] - 1;
1003: size= 1;
1004: for (i= 0; i< 3; i++) {
1005: size*= (iupper[i]-ilower[i]+1);
1006: }
1008: /* copy x values over to hypre for variable ordering */
1009: if (ordering) {
1010: PetscStackCallHypre(0,HYPRE_SStructVectorSetConstantValues,(mx->ss_b,0.0));
1011: VecGetArray(x,&xx);
1012: for (i= 0; i< nvars; i++) {
1013: PetscStackCallHypre(0,HYPRE_SStructVectorSetBoxValues,(mx->ss_b,part,ilower,iupper,i,xx+(size*i)));
1014: }
1015: VecRestoreArray(x,&xx);
1016: PetscStackCallHypre(0,HYPRE_SStructVectorAssemble,(mx->ss_b));
1017: PetscStackCallHypre(0,HYPRE_SStructMatrixMatvec,(1.0,mx->ss_mat,mx->ss_b,0.0,mx->ss_x));
1019: /* copy solution values back to PETSc */
1020: VecGetArray(y,&yy);
1021: for (i= 0; i< nvars; i++) {
1022: PetscStackCallHypre(0,HYPRE_SStructVectorGetBoxValues,(mx->ss_x,part,ilower,iupper,i,yy+(size*i)));
1023: }
1024: VecRestoreArray(y,&yy);
1025: } else { /* nodal ordering must be mapped to variable ordering for sys_pfmg */
1026: PetscScalar *z;
1027: PetscInt j, k;
1029: PetscMalloc(nvars*size*sizeof(PetscScalar),&z);
1030: PetscStackCallHypre(0,HYPRE_SStructVectorSetConstantValues,(mx->ss_b,0.0));
1031: VecGetArray(x,&xx);
1033: /* transform nodal to hypre's variable ordering for sys_pfmg */
1034: for (i= 0; i< size; i++) {
1035: k= i*nvars;
1036: for (j= 0; j< nvars; j++) {
1037: z[j*size+i]= xx[k+j];
1038: }
1039: }
1040: for (i= 0; i< nvars; i++) {
1041: PetscStackCallHypre(0,HYPRE_SStructVectorSetBoxValues,(mx->ss_b,part,ilower,iupper,i,z+(size*i)));
1042: }
1043: VecRestoreArray(x,&xx);
1044: PetscStackCallHypre(0,HYPRE_SStructVectorAssemble,(mx->ss_b));
1045: PetscStackCallHypre(0,HYPRE_SStructMatrixMatvec,(1.0,mx->ss_mat,mx->ss_b,0.0,mx->ss_x));
1046:
1047: /* copy solution values back to PETSc */
1048: VecGetArray(y,&yy);
1049: for (i= 0; i< nvars; i++) {
1050: PetscStackCallHypre(0,HYPRE_SStructVectorGetBoxValues,(mx->ss_x,part,ilower,iupper,i,z+(size*i)));
1051: }
1052: /* transform hypre's variable ordering for sys_pfmg to nodal ordering */
1053: for (i= 0; i< size; i++) {
1054: k= i*nvars;
1055: for (j= 0; j< nvars; j++) {
1056: yy[k+j]= z[j*size+i];
1057: }
1058: }
1059: VecRestoreArray(y,&yy);
1060: PetscFree(z);
1061: }
1062: return(0);
1063: }
1067: PetscErrorCode MatAssemblyEnd_HYPRESStruct(Mat mat,MatAssemblyType mode)
1068: {
1069: Mat_HYPRESStruct *ex = (Mat_HYPRESStruct*) mat->data;
1070: PetscErrorCode ierr;
1073: PetscStackCallHypre(0,HYPRE_SStructMatrixAssemble,(ex->ss_mat));
1074: return(0);
1075: }
1079: PetscErrorCode MatZeroEntries_HYPRESStruct(Mat mat)
1080: {
1082: /* before the DMDA is set to the matrix the zero doesn't need to do anything */
1083: return(0);
1084: }
1089: PetscErrorCode MatDestroy_HYPRESStruct(Mat mat)
1090: {
1091: Mat_HYPRESStruct *ex = (Mat_HYPRESStruct*) mat->data;
1092: PetscErrorCode ierr;
1095: PetscStackCallHypre(0,HYPRE_SStructGraphDestroy,(ex->ss_graph));
1096: PetscStackCallHypre(0,HYPRE_SStructMatrixDestroy,(ex->ss_mat));
1097: PetscStackCallHypre(0,HYPRE_SStructVectorDestroy,(ex->ss_x));
1098: PetscStackCallHypre(0,HYPRE_SStructVectorDestroy,(ex->ss_b));
1099: return(0);
1100: }
1102: EXTERN_C_BEGIN
1105: PetscErrorCode MatCreate_HYPRESStruct(Mat B)
1106: {
1107: Mat_HYPRESStruct *ex;
1108: PetscErrorCode ierr;
1111: PetscNewLog(B,Mat_HYPRESStruct,&ex);
1112: B->data = (void*)ex;
1113: B->rmap->bs = 1;
1114: B->assembled = PETSC_FALSE;
1116: B->insertmode = NOT_SET_VALUES;
1118: B->ops->assemblyend = MatAssemblyEnd_HYPRESStruct;
1119: B->ops->mult = MatMult_HYPRESStruct;
1120: B->ops->zeroentries = MatZeroEntries_HYPRESStruct;
1121: B->ops->destroy = MatDestroy_HYPRESStruct;
1123: ex->needsinitialization = PETSC_TRUE;
1124:
1125: MPI_Comm_dup(((PetscObject)B)->comm,&(ex->hcomm));
1126: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatSetDM_C","MatSetDM_HYPRESStruct",MatSetDM_HYPRESStruct);
1127: PetscObjectChangeTypeName((PetscObject)B,MATHYPRESSTRUCT);
1128: return(0);
1129: }
1130: EXTERN_C_END
1133: