Actual source code: mhyp.c
petsc-3.4.5 2014-06-29
2: /*
3: Creates hypre ijmatrix from PETSc matrix
4: */
5: #include <petscsys.h>
6: #include <petsc-private/matimpl.h>
7: #include <petscdmda.h> /*I "petscdmda.h" I*/
8: #include <../src/dm/impls/da/hypre/mhyp.h>
12: PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o,HYPRE_IJMatrix ij)
13: {
15: PetscInt i,n_d,n_o;
16: const PetscInt *ia_d,*ia_o;
17: PetscBool done_d=PETSC_FALSE,done_o=PETSC_FALSE;
18: PetscInt *nnz_d=NULL,*nnz_o=NULL;
21: if (A_d) { /* determine number of nonzero entries in local diagonal part */
22: MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);
23: if (done_d) {
24: PetscMalloc(n_d*sizeof(PetscInt),&nnz_d);
25: for (i=0; i<n_d; i++) {
26: nnz_d[i] = ia_d[i+1] - ia_d[i];
27: }
28: }
29: MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);
30: }
31: if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
32: MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);
33: if (done_o) {
34: PetscMalloc(n_o*sizeof(PetscInt),&nnz_o);
35: for (i=0; i<n_o; i++) {
36: nnz_o[i] = ia_o[i+1] - ia_o[i];
37: }
38: }
39: MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);
40: }
41: if (done_d) { /* set number of nonzeros in HYPRE IJ matrix */
42: if (!done_o) { /* only diagonal part */
43: PetscMalloc(n_d*sizeof(PetscInt),&nnz_o);
44: for (i=0; i<n_d; i++) {
45: nnz_o[i] = 0;
46: }
47: }
48: PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o));
49: PetscFree(nnz_d);
50: PetscFree(nnz_o);
51: }
52: return(0);
53: }
57: PetscErrorCode MatHYPRE_IJMatrixCreate(Mat A,HYPRE_IJMatrix *ij)
58: {
60: PetscInt rstart,rend,cstart,cend;
66: MatSetUp(A);
67: rstart = A->rmap->rstart;
68: rend = A->rmap->rend;
69: cstart = A->cmap->rstart;
70: cend = A->cmap->rend;
71: PetscStackCallStandard(HYPRE_IJMatrixCreate,(PetscObjectComm((PetscObject)A),rstart,rend-1,cstart,cend-1,ij));
72: PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(*ij,HYPRE_PARCSR));
73: {
74: PetscBool same;
75: Mat A_d,A_o;
76: const PetscInt *colmap;
77: PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&same);
78: if (same) {
79: MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);
80: MatHYPRE_IJMatrixPreallocate(A_d,A_o,*ij);
81: return(0);
82: }
83: PetscObjectTypeCompare((PetscObject)A,MATMPIBAIJ,&same);
84: if (same) {
85: MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);
86: MatHYPRE_IJMatrixPreallocate(A_d,A_o,*ij);
87: return(0);
88: }
89: PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&same);
90: if (same) {
91: MatHYPRE_IJMatrixPreallocate(A,NULL,*ij);
92: return(0);
93: }
94: PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&same);
95: if (same) {
96: MatHYPRE_IJMatrixPreallocate(A,NULL,*ij);
97: return(0);
98: }
99: }
100: return(0);
101: }
103: extern PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix);
104: extern PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix);
105: /*
106: Copies the data over (column indices, numerical values) to hypre matrix
107: */
111: PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A,HYPRE_IJMatrix ij)
112: {
113: PetscErrorCode ierr;
114: PetscInt i,rstart,rend,ncols;
115: const PetscScalar *values;
116: const PetscInt *cols;
117: PetscBool flg;
120: PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);
121: if (flg) {
122: MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);
123: return(0);
124: }
125: PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);
126: if (flg) {
127: MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);
128: return(0);
129: }
131: PetscLogEventBegin(MAT_Convert,A,0,0,0);
132: PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
133: MatGetOwnershipRange(A,&rstart,&rend);
134: for (i=rstart; i<rend; i++) {
135: MatGetRow(A,i,&ncols,&cols,&values);
136: PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,&ncols,&i,cols,values));
137: MatRestoreRow(A,i,&ncols,&cols,&values);
138: }
139: PetscStackCallStandard(HYPRE_IJMatrixAssemble,(ij));
140: PetscLogEventEnd(MAT_Convert,A,0,0,0);
141: return(0);
142: }
144: /*
145: This copies the CSR format directly from the PETSc data structure to the hypre
146: data structure without calls to MatGetRow() or hypre's set values.
148: */
149: #include <_hypre_IJ_mv.h>
150: #include <HYPRE_IJ_mv.h>
151: #include <../src/mat/impls/aij/mpi/mpiaij.h>
155: PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A,HYPRE_IJMatrix ij)
156: {
158: Mat_SeqAIJ *pdiag = (Mat_SeqAIJ*)A->data;
160: hypre_ParCSRMatrix *par_matrix;
161: hypre_AuxParCSRMatrix *aux_matrix;
162: hypre_CSRMatrix *hdiag;
169: PetscLogEventBegin(MAT_Convert,A,0,0,0);
170: PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
171: par_matrix = (hypre_ParCSRMatrix*)hypre_IJMatrixObject(ij);
172: aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
173: hdiag = hypre_ParCSRMatrixDiag(par_matrix);
175: /*
176: this is the Hack part where we monkey directly with the hypre datastructures
177: */
178: PetscMemcpy(hdiag->i,pdiag->i,(A->rmap->n + 1)*sizeof(PetscInt));
179: PetscMemcpy(hdiag->j,pdiag->j,pdiag->nz*sizeof(PetscInt));
180: PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));
182: hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
183: PetscStackCallStandard(HYPRE_IJMatrixAssemble,(ij));
184: PetscLogEventEnd(MAT_Convert,A,0,0,0);
185: return(0);
186: }
190: PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A,HYPRE_IJMatrix ij)
191: {
193: Mat_MPIAIJ *pA = (Mat_MPIAIJ*)A->data;
194: Mat_SeqAIJ *pdiag,*poffd;
195: PetscInt i,*garray = pA->garray,*jj,cstart,*pjj;
197: hypre_ParCSRMatrix *par_matrix;
198: hypre_AuxParCSRMatrix *aux_matrix;
199: hypre_CSRMatrix *hdiag,*hoffd;
205: pdiag = (Mat_SeqAIJ*) pA->A->data;
206: poffd = (Mat_SeqAIJ*) pA->B->data;
207: /* cstart is only valid for square MPIAIJ layed out in the usual way */
208: MatGetOwnershipRange(A,&cstart,NULL);
210: PetscLogEventBegin(MAT_Convert,A,0,0,0);
211: PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
212: par_matrix = (hypre_ParCSRMatrix*)hypre_IJMatrixObject(ij);
213: aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
214: hdiag = hypre_ParCSRMatrixDiag(par_matrix);
215: hoffd = hypre_ParCSRMatrixOffd(par_matrix);
217: /*
218: this is the Hack part where we monkey directly with the hypre datastructures
219: */
220: PetscMemcpy(hdiag->i,pdiag->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
221: /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */
222: jj = hdiag->j;
223: pjj = pdiag->j;
224: for (i=0; i<pdiag->nz; i++) jj[i] = cstart + pjj[i];
225: PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));
226: PetscMemcpy(hoffd->i,poffd->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
227: /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this
228: If we hacked a hypre a bit more we might be able to avoid this step */
229: jj = hoffd->j;
230: pjj = poffd->j;
231: for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]];
232: PetscMemcpy(hoffd->data,poffd->a,poffd->nz*sizeof(PetscScalar));
234: hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
235: PetscStackCallStandard(HYPRE_IJMatrixAssemble,(ij));
236: PetscLogEventEnd(MAT_Convert,A,0,0,0);
237: return(0);
238: }
240: /*
241: Does NOT copy the data over, instead uses DIRECTLY the pointers from the PETSc MPIAIJ format
243: This is UNFINISHED and does NOT work! The problem is that hypre puts the diagonal entry first
244: which will corrupt the PETSc data structure if we did this. Need a work around to this problem.
245: */
246: #include <_hypre_IJ_mv.h>
247: #include <HYPRE_IJ_mv.h>
251: PetscErrorCode MatHYPRE_IJMatrixLink(Mat A,HYPRE_IJMatrix *ij)
252: {
253: PetscErrorCode ierr;
254: PetscInt rstart,rend,cstart,cend;
255: PetscBool flg;
256: hypre_AuxParCSRMatrix *aux_matrix;
262: PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);
263: if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Can only use with PETSc MPIAIJ matrices");
264: MatSetUp(A);
266: PetscLogEventBegin(MAT_Convert,A,0,0,0);
267: rstart = A->rmap->rstart;
268: rend = A->rmap->rend;
269: cstart = A->cmap->rstart;
270: cend = A->cmap->rend;
271: PetscStackCallStandard(HYPRE_IJMatrixCreate,(PetscObjectComm((PetscObject)A),rstart,rend-1,cstart,cend-1,ij));
272: PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(*ij,HYPRE_PARCSR));
274: PetscStackCallStandard(HYPRE_IJMatrixInitialize,(*ij));
275: PetscStackCall("hypre_IJMatrixTranslator",aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(*ij));
277: hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
279: /* this is the Hack part where we monkey directly with the hypre datastructures */
281: PetscStackCallStandard(HYPRE_IJMatrixAssemble,(*ij));
282: PetscLogEventEnd(MAT_Convert,A,0,0,0);
283: return(0);
284: }
286: /* -----------------------------------------------------------------------------------------------------------------*/
288: /*MC
289: MATHYPRESTRUCT - MATHYPRESTRUCT = "hyprestruct" - A matrix type to be used for parallel sparse matrices
290: based on the hypre HYPRE_StructMatrix.
292: Level: intermediate
294: Notes: Unlike the more general support for blocks in hypre this allows only one block per process and requires the block
295: be defined by a DMDA.
297: The matrix needs a DMDA associated with it by either a call to MatSetupDM() or if the matrix is obtained from DMCreateMatrix()
299: .seealso: MatCreate(), PCPFMG, MatSetupDM(), DMCreateMatrix()
300: M*/
305: PetscErrorCode MatSetValuesLocal_HYPREStruct_3d(Mat mat,PetscInt nrow,const PetscInt irow[],PetscInt ncol,const PetscInt icol[],const PetscScalar y[],InsertMode addv)
306: {
307: PetscErrorCode ierr;
308: PetscInt i,j,stencil,index[3],row,entries[7];
309: const PetscScalar *values = y;
310: Mat_HYPREStruct *ex = (Mat_HYPREStruct*) mat->data;
313: for (i=0; i<nrow; i++) {
314: for (j=0; j<ncol; j++) {
315: stencil = icol[j] - irow[i];
316: if (!stencil) {
317: entries[j] = 3;
318: } else if (stencil == -1) {
319: entries[j] = 2;
320: } else if (stencil == 1) {
321: entries[j] = 4;
322: } else if (stencil == -ex->gnx) {
323: entries[j] = 1;
324: } else if (stencil == ex->gnx) {
325: entries[j] = 5;
326: } else if (stencil == -ex->gnxgny) {
327: entries[j] = 0;
328: } else if (stencil == ex->gnxgny) {
329: entries[j] = 6;
330: } else SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row %D local column %D have bad stencil %D",irow[i],icol[j],stencil);
331: }
332: row = ex->gindices[irow[i]] - ex->rstart;
333: index[0] = ex->xs + (row % ex->nx);
334: index[1] = ex->ys + ((row/ex->nx) % ex->ny);
335: index[2] = ex->zs + (row/(ex->nxny));
336: if (addv == ADD_VALUES) {
337: PetscStackCallStandard(HYPRE_StructMatrixAddToValues,(ex->hmat,index,ncol,entries,(PetscScalar*)values));
338: } else {
339: PetscStackCallStandard(HYPRE_StructMatrixSetValues,(ex->hmat,index,ncol,entries,(PetscScalar*)values));
340: }
341: values += ncol;
342: }
343: return(0);
344: }
348: PetscErrorCode MatZeroRowsLocal_HYPREStruct_3d(Mat mat,PetscInt nrow,const PetscInt irow[],PetscScalar d,Vec x,Vec b)
349: {
350: PetscErrorCode ierr;
351: PetscInt i,index[3],row,entries[7] = {0,1,2,3,4,5,6};
352: PetscScalar values[7];
353: Mat_HYPREStruct *ex = (Mat_HYPREStruct*) mat->data;
356: if (x && b) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"No support");
357: PetscMemzero(values,7*sizeof(PetscScalar));
358: values[3] = d;
359: for (i=0; i<nrow; i++) {
360: row = ex->gindices[irow[i]] - ex->rstart;
361: index[0] = ex->xs + (row % ex->nx);
362: index[1] = ex->ys + ((row/ex->nx) % ex->ny);
363: index[2] = ex->zs + (row/(ex->nxny));
364: PetscStackCallStandard(HYPRE_StructMatrixSetValues,(ex->hmat,index,7,entries,values));
365: }
366: PetscStackCallStandard(HYPRE_StructMatrixAssemble,(ex->hmat));
367: return(0);
368: }
372: PetscErrorCode MatZeroEntries_HYPREStruct_3d(Mat mat)
373: {
374: PetscErrorCode ierr;
375: PetscInt indices[7] = {0,1,2,3,4,5,6};
376: Mat_HYPREStruct *ex = (Mat_HYPREStruct*) mat->data;
379: /* hypre has no public interface to do this */
380: PetscStackCallStandard(hypre_StructMatrixClearBoxValues,(ex->hmat,&ex->hbox,7,indices,0,1));
381: PetscStackCallStandard(HYPRE_StructMatrixAssemble,(ex->hmat));
382: return(0);
383: }
387: static PetscErrorCode MatSetupDM_HYPREStruct(Mat mat,DM da)
388: {
389: PetscErrorCode ierr;
390: Mat_HYPREStruct *ex = (Mat_HYPREStruct*) mat->data;
391: PetscInt dim,dof,sw[3],nx,ny,nz,ilower[3],iupper[3],ssize,i;
392: DMDABoundaryType px,py,pz;
393: DMDAStencilType st;
396: ex->da = da;
397: PetscObjectReference((PetscObject)da);
399: DMDAGetInfo(ex->da,&dim,0,0,0,0,0,0,&dof,&sw[0],&px,&py,&pz,&st);
400: DMDAGetCorners(ex->da,&ilower[0],&ilower[1],&ilower[2],&iupper[0],&iupper[1],&iupper[2]);
401: iupper[0] += ilower[0] - 1;
402: iupper[1] += ilower[1] - 1;
403: iupper[2] += ilower[2] - 1;
405: /* the hypre_Box is used to zero out the matrix entries in MatZeroValues() */
406: ex->hbox.imin[0] = ilower[0];
407: ex->hbox.imin[1] = ilower[1];
408: ex->hbox.imin[2] = ilower[2];
409: ex->hbox.imax[0] = iupper[0];
410: ex->hbox.imax[1] = iupper[1];
411: ex->hbox.imax[2] = iupper[2];
413: /* create the hypre grid object and set its information */
414: if (dof > 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Currently only support for scalar problems");
415: if (px || py || pz) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Ask us to add periodic support by calling HYPRE_StructGridSetPeriodic()");
416: PetscStackCallStandard(HYPRE_StructGridCreate,(ex->hcomm,dim,&ex->hgrid));
417: PetscStackCallStandard(HYPRE_StructGridSetExtents,(ex->hgrid,ilower,iupper));
418: PetscStackCallStandard(HYPRE_StructGridAssemble,(ex->hgrid));
420: sw[1] = sw[0];
421: sw[2] = sw[1];
422: PetscStackCallStandard(HYPRE_StructGridSetNumGhost,(ex->hgrid,sw));
424: /* create the hypre stencil object and set its information */
425: if (sw[0] > 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Ask us to add support for wider stencils");
426: if (st == DMDA_STENCIL_BOX) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Ask us to add support for box stencils");
427: if (dim == 1) {
428: PetscInt offsets[3][1] = {{-1},{0},{1}};
429: ssize = 3;
430: PetscStackCallStandard(HYPRE_StructStencilCreate,(dim,ssize,&ex->hstencil));
431: for (i=0; i<ssize; i++) {
432: PetscStackCallStandard(HYPRE_StructStencilSetElement,(ex->hstencil,i,offsets[i]));
433: }
434: } else if (dim == 2) {
435: PetscInt offsets[5][2] = {{0,-1},{-1,0},{0,0},{1,0},{0,1}};
436: ssize = 5;
437: PetscStackCallStandard(HYPRE_StructStencilCreate,(dim,ssize,&ex->hstencil));
438: for (i=0; i<ssize; i++) {
439: PetscStackCallStandard(HYPRE_StructStencilSetElement,(ex->hstencil,i,offsets[i]));
440: }
441: } else if (dim == 3) {
442: PetscInt offsets[7][3] = {{0,0,-1},{0,-1,0},{-1,0,0},{0,0,0},{1,0,0},{0,1,0},{0,0,1}};
443: ssize = 7;
444: PetscStackCallStandard(HYPRE_StructStencilCreate,(dim,ssize,&ex->hstencil));
445: for (i=0; i<ssize; i++) {
446: PetscStackCallStandard(HYPRE_StructStencilSetElement,(ex->hstencil,i,offsets[i]));
447: }
448: }
450: /* create the HYPRE vector for rhs and solution */
451: PetscStackCallStandard(HYPRE_StructVectorCreate,(ex->hcomm,ex->hgrid,&ex->hb));
452: PetscStackCallStandard(HYPRE_StructVectorCreate,(ex->hcomm,ex->hgrid,&ex->hx));
453: PetscStackCallStandard(HYPRE_StructVectorInitialize,(ex->hb));
454: PetscStackCallStandard(HYPRE_StructVectorInitialize,(ex->hx));
455: PetscStackCallStandard(HYPRE_StructVectorAssemble,(ex->hb));
456: PetscStackCallStandard(HYPRE_StructVectorAssemble,(ex->hx));
458: /* create the hypre matrix object and set its information */
459: PetscStackCallStandard(HYPRE_StructMatrixCreate,(ex->hcomm,ex->hgrid,ex->hstencil,&ex->hmat));
460: PetscStackCallStandard(HYPRE_StructGridDestroy,(ex->hgrid));
461: PetscStackCallStandard(HYPRE_StructStencilDestroy,(ex->hstencil));
462: if (ex->needsinitialization) {
463: PetscStackCallStandard(HYPRE_StructMatrixInitialize,(ex->hmat));
464: ex->needsinitialization = PETSC_FALSE;
465: }
467: /* set the global and local sizes of the matrix */
468: DMDAGetCorners(da,0,0,0,&nx,&ny,&nz);
469: MatSetSizes(mat,dof*nx*ny*nz,dof*nx*ny*nz,PETSC_DECIDE,PETSC_DECIDE);
470: PetscLayoutSetBlockSize(mat->rmap,1);
471: PetscLayoutSetBlockSize(mat->cmap,1);
472: PetscLayoutSetUp(mat->rmap);
473: PetscLayoutSetUp(mat->cmap);
475: if (dim == 3) {
476: mat->ops->setvalueslocal = MatSetValuesLocal_HYPREStruct_3d;
477: mat->ops->zerorowslocal = MatZeroRowsLocal_HYPREStruct_3d;
478: mat->ops->zeroentries = MatZeroEntries_HYPREStruct_3d;
480: MatZeroEntries_HYPREStruct_3d(mat);
481: } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Only support for 3d DMDA currently");
483: /* get values that will be used repeatedly in MatSetValuesLocal() and MatZeroRowsLocal() repeatedly */
484: MatGetOwnershipRange(mat,&ex->rstart,NULL);
485: DMDAGetGlobalIndices(ex->da,NULL,&ex->gindices);
486: DMDAGetGhostCorners(ex->da,0,0,0,&ex->gnx,&ex->gnxgny,0);
487: ex->gnxgny *= ex->gnx;
488: DMDAGetCorners(ex->da,&ex->xs,&ex->ys,&ex->zs,&ex->nx,&ex->ny,0);
489: ex->nxny = ex->nx*ex->ny;
490: return(0);
491: }
495: PetscErrorCode MatMult_HYPREStruct(Mat A,Vec x,Vec y)
496: {
497: PetscErrorCode ierr;
498: PetscScalar *xx,*yy;
499: PetscInt ilower[3],iupper[3];
500: Mat_HYPREStruct *mx = (Mat_HYPREStruct*)(A->data);
503: DMDAGetCorners(mx->da,&ilower[0],&ilower[1],&ilower[2],&iupper[0],&iupper[1],&iupper[2]);
505: iupper[0] += ilower[0] - 1;
506: iupper[1] += ilower[1] - 1;
507: iupper[2] += ilower[2] - 1;
509: /* copy x values over to hypre */
510: PetscStackCallStandard(HYPRE_StructVectorSetConstantValues,(mx->hb,0.0));
511: VecGetArray(x,&xx);
512: PetscStackCallStandard(HYPRE_StructVectorSetBoxValues,(mx->hb,ilower,iupper,xx));
513: VecRestoreArray(x,&xx);
514: PetscStackCallStandard(HYPRE_StructVectorAssemble,(mx->hb));
515: PetscStackCallStandard(HYPRE_StructMatrixMatvec,(1.0,mx->hmat,mx->hb,0.0,mx->hx));
517: /* copy solution values back to PETSc */
518: VecGetArray(y,&yy);
519: PetscStackCallStandard(HYPRE_StructVectorGetBoxValues,(mx->hx,ilower,iupper,yy));
520: VecRestoreArray(y,&yy);
521: return(0);
522: }
526: PetscErrorCode MatAssemblyEnd_HYPREStruct(Mat mat,MatAssemblyType mode)
527: {
528: Mat_HYPREStruct *ex = (Mat_HYPREStruct*) mat->data;
529: PetscErrorCode ierr;
532: PetscStackCallStandard(HYPRE_StructMatrixAssemble,(ex->hmat));
533: /* PetscStackCallStandard(HYPRE_StructMatrixPrint,("dummy",ex->hmat,0)); */
534: return(0);
535: }
539: PetscErrorCode MatZeroEntries_HYPREStruct(Mat mat)
540: {
542: /* before the DMDA is set to the matrix the zero doesn't need to do anything */
543: return(0);
544: }
549: PetscErrorCode MatDestroy_HYPREStruct(Mat mat)
550: {
551: Mat_HYPREStruct *ex = (Mat_HYPREStruct*) mat->data;
552: PetscErrorCode ierr;
555: PetscStackCallStandard(HYPRE_StructMatrixDestroy,(ex->hmat));
556: PetscStackCallStandard(HYPRE_StructVectorDestroy,(ex->hx));
557: PetscStackCallStandard(HYPRE_StructVectorDestroy,(ex->hb));
558: return(0);
559: }
564: PETSC_EXTERN PetscErrorCode MatCreate_HYPREStruct(Mat B)
565: {
566: Mat_HYPREStruct *ex;
567: PetscErrorCode ierr;
570: PetscNewLog(B,Mat_HYPREStruct,&ex);
571: B->data = (void*)ex;
572: B->rmap->bs = 1;
573: B->assembled = PETSC_FALSE;
575: B->insertmode = NOT_SET_VALUES;
577: B->ops->assemblyend = MatAssemblyEnd_HYPREStruct;
578: B->ops->mult = MatMult_HYPREStruct;
579: B->ops->zeroentries = MatZeroEntries_HYPREStruct;
580: B->ops->destroy = MatDestroy_HYPREStruct;
582: ex->needsinitialization = PETSC_TRUE;
584: MPI_Comm_dup(PetscObjectComm((PetscObject)B),&(ex->hcomm));
585: PetscObjectComposeFunction((PetscObject)B,"MatSetupDM_C",MatSetupDM_HYPREStruct);
586: PetscObjectChangeTypeName((PetscObject)B,MATHYPRESTRUCT);
587: return(0);
588: }
590: /*MC
591: MATHYPRESSTRUCT - MATHYPRESSTRUCT = "hypresstruct" - A matrix type to be used for parallel sparse matrices
592: based on the hypre HYPRE_SStructMatrix.
595: Level: intermediate
597: Notes: Unlike hypre's general semi-struct object consisting of a collection of structured-grid objects and unstructured
598: grid objects, we will restrict the semi-struct objects to consist of only structured-grid components.
600: Unlike the more general support for parts and blocks in hypre this allows only one part, and one block per process and requires the block
601: be defined by a DMDA.
603: The matrix needs a DMDA associated with it by either a call to MatSetupDM() or if the matrix is obtained from DMCreateMatrix()
605: M*/
609: PetscErrorCode MatSetValuesLocal_HYPRESStruct_3d(Mat mat,PetscInt nrow,const PetscInt irow[],PetscInt ncol,const PetscInt icol[],const PetscScalar y[],InsertMode addv)
610: {
611: PetscErrorCode ierr;
612: PetscInt i,j,stencil,index[3];
613: const PetscScalar *values = y;
614: Mat_HYPRESStruct *ex = (Mat_HYPRESStruct*) mat->data;
616: PetscInt part = 0; /* Petsc sstruct interface only allows 1 part */
617: PetscInt ordering;
618: PetscInt grid_rank, to_grid_rank;
619: PetscInt var_type, to_var_type;
620: PetscInt to_var_entry = 0;
622: PetscInt nvars= ex->nvars;
623: PetscInt row,*entries;
626: PetscMalloc(7*nvars*sizeof(PetscInt),&entries);
627: ordering= ex->dofs_order; /* ordering= 0 nodal ordering
628: 1 variable ordering */
629: /* stencil entries are orderer by variables: var0_stencil0, var0_stencil1, ..., var0_stencil6, var1_stencil0, var1_stencil1, ... */
631: if (!ordering) {
632: for (i=0; i<nrow; i++) {
633: grid_rank= irow[i]/nvars;
634: var_type = (irow[i] % nvars);
636: for (j=0; j<ncol; j++) {
637: to_grid_rank= icol[j]/nvars;
638: to_var_type = (icol[j] % nvars);
640: to_var_entry= to_var_entry*7;
641: entries[j] = to_var_entry;
643: stencil = to_grid_rank-grid_rank;
644: if (!stencil) {
645: entries[j] += 3;
646: } else if (stencil == -1) {
647: entries[j] += 2;
648: } else if (stencil == 1) {
649: entries[j] += 4;
650: } else if (stencil == -ex->gnx) {
651: entries[j] += 1;
652: } else if (stencil == ex->gnx) {
653: entries[j] += 5;
654: } else if (stencil == -ex->gnxgny) {
655: entries[j] += 0;
656: } else if (stencil == ex->gnxgny) {
657: entries[j] += 6;
658: } else SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row %D local column %D have bad stencil %D",irow[i],icol[j],stencil);
659: }
661: row = ex->gindices[grid_rank] - ex->rstart;
662: index[0] = ex->xs + (row % ex->nx);
663: index[1] = ex->ys + ((row/ex->nx) % ex->ny);
664: index[2] = ex->zs + (row/(ex->nxny));
666: if (addv == ADD_VALUES) {
667: PetscStackCallStandard(HYPRE_SStructMatrixAddToValues,(ex->ss_mat,part,index,var_type,ncol,entries,(PetscScalar*)values));
668: } else {
669: PetscStackCallStandard(HYPRE_SStructMatrixSetValues,(ex->ss_mat,part,index,var_type,ncol,entries,(PetscScalar*)values));
670: }
671: values += ncol;
672: }
673: } else {
674: for (i=0; i<nrow; i++) {
675: var_type = irow[i]/(ex->gnxgnygnz);
676: grid_rank= irow[i] - var_type*(ex->gnxgnygnz);
678: for (j=0; j<ncol; j++) {
679: to_var_type = icol[j]/(ex->gnxgnygnz);
680: to_grid_rank= icol[j] - to_var_type*(ex->gnxgnygnz);
682: to_var_entry= to_var_entry*7;
683: entries[j] = to_var_entry;
685: stencil = to_grid_rank-grid_rank;
686: if (!stencil) {
687: entries[j] += 3;
688: } else if (stencil == -1) {
689: entries[j] += 2;
690: } else if (stencil == 1) {
691: entries[j] += 4;
692: } else if (stencil == -ex->gnx) {
693: entries[j] += 1;
694: } else if (stencil == ex->gnx) {
695: entries[j] += 5;
696: } else if (stencil == -ex->gnxgny) {
697: entries[j] += 0;
698: } else if (stencil == ex->gnxgny) {
699: entries[j] += 6;
700: } else SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row %D local column %D have bad stencil %D",irow[i],icol[j],stencil);
701: }
703: row = ex->gindices[grid_rank] - ex->rstart;
704: index[0] = ex->xs + (row % ex->nx);
705: index[1] = ex->ys + ((row/ex->nx) % ex->ny);
706: index[2] = ex->zs + (row/(ex->nxny));
708: if (addv == ADD_VALUES) {
709: PetscStackCallStandard(HYPRE_SStructMatrixAddToValues,(ex->ss_mat,part,index,var_type,ncol,entries,(PetscScalar*)values));
710: } else {
711: PetscStackCallStandard(HYPRE_SStructMatrixSetValues,(ex->ss_mat,part,index,var_type,ncol,entries,(PetscScalar*)values));
712: }
713: values += ncol;
714: }
715: }
716: PetscFree(entries);
717: return(0);
718: }
722: PetscErrorCode MatZeroRowsLocal_HYPRESStruct_3d(Mat mat,PetscInt nrow,const PetscInt irow[],PetscScalar d,Vec x,Vec b)
723: {
724: PetscErrorCode ierr;
725: PetscInt i,index[3];
726: PetscScalar **values;
727: Mat_HYPRESStruct *ex = (Mat_HYPRESStruct*) mat->data;
729: PetscInt part = 0; /* Petsc sstruct interface only allows 1 part */
730: PetscInt ordering = ex->dofs_order;
731: PetscInt grid_rank;
732: PetscInt var_type;
733: PetscInt nvars= ex->nvars;
734: PetscInt row,*entries;
737: if (x && b) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"No support");
738: PetscMalloc(7*nvars*sizeof(PetscInt),&entries);
740: PetscMalloc(nvars*sizeof(PetscScalar*),&values);
741: PetscMalloc(7*nvars*nvars*sizeof(PetscScalar),&values[0]);
742: for (i=1; i<nvars; i++) {
743: values[i] = values[i-1] + nvars*7;
744: }
746: for (i=0; i< nvars; i++) {
747: PetscMemzero(values[i],nvars*7*sizeof(PetscScalar));
748: *(values[i]+3) = d;
749: }
751: for (i= 0; i< nvars*7; i++) entries[i] = i;
753: if (!ordering) {
754: for (i=0; i<nrow; i++) {
755: grid_rank = irow[i] / nvars;
756: var_type =(irow[i] % nvars);
758: row = ex->gindices[grid_rank] - ex->rstart;
759: index[0] = ex->xs + (row % ex->nx);
760: index[1] = ex->ys + ((row/ex->nx) % ex->ny);
761: index[2] = ex->zs + (row/(ex->nxny));
762: PetscStackCallStandard(HYPRE_SStructMatrixSetValues,(ex->ss_mat,part,index,var_type,7*nvars,entries,values[var_type]));
763: }
764: } else {
765: for (i=0; i<nrow; i++) {
766: var_type = irow[i]/(ex->gnxgnygnz);
767: grid_rank= irow[i] - var_type*(ex->gnxgnygnz);
769: row = ex->gindices[grid_rank] - ex->rstart;
770: index[0] = ex->xs + (row % ex->nx);
771: index[1] = ex->ys + ((row/ex->nx) % ex->ny);
772: index[2] = ex->zs + (row/(ex->nxny));
773: PetscStackCallStandard(HYPRE_SStructMatrixSetValues,(ex->ss_mat,part,index,var_type,7*nvars,entries,values[var_type]));
774: }
775: }
776: PetscStackCallStandard(HYPRE_SStructMatrixAssemble,(ex->ss_mat));
777: PetscFree(values[0]);
778: PetscFree(values);
779: PetscFree(entries);
780: return(0);
781: }
785: PetscErrorCode MatZeroEntries_HYPRESStruct_3d(Mat mat)
786: {
787: PetscErrorCode ierr;
788: Mat_HYPRESStruct *ex = (Mat_HYPRESStruct*) mat->data;
789: PetscInt nvars= ex->nvars;
790: PetscInt size;
791: PetscInt part= 0; /* only one part */
794: size = ((ex->hbox.imax[0])-(ex->hbox.imin[0])+1)*((ex->hbox.imax[1])-(ex->hbox.imin[1])+1)*((ex->hbox.imax[2])-(ex->hbox.imin[2])+1);
795: {
796: PetscInt i,*entries,iupper[3],ilower[3];
797: PetscScalar *values;
800: for (i= 0; i< 3; i++) {
801: ilower[i]= ex->hbox.imin[i];
802: iupper[i]= ex->hbox.imax[i];
803: }
805: PetscMalloc2(nvars*7,PetscInt,&entries,nvars*7*size,PetscScalar,&values);
806: for (i= 0; i< nvars*7; i++) entries[i]= i;
807: PetscMemzero(values,nvars*7*size*sizeof(PetscScalar));
809: for (i= 0; i< nvars; i++) {
810: PetscStackCallStandard(HYPRE_SStructMatrixSetBoxValues,(ex->ss_mat,part,ilower,iupper,i,nvars*7,entries,values));
811: }
812: PetscFree2(entries,values);
813: }
814: PetscStackCallStandard(HYPRE_SStructMatrixAssemble,(ex->ss_mat));
815: return(0);
816: }
821: static PetscErrorCode MatSetupDM_HYPRESStruct(Mat mat,DM da)
822: {
823: PetscErrorCode ierr;
824: Mat_HYPRESStruct *ex = (Mat_HYPRESStruct*) mat->data;
825: PetscInt dim,dof,sw[3],nx,ny,nz;
826: PetscInt ilower[3],iupper[3],ssize,i;
827: DMDABoundaryType px,py,pz;
828: DMDAStencilType st;
829: PetscInt nparts= 1; /* assuming only one part */
830: PetscInt part = 0;
833: ex->da = da;
834: PetscObjectReference((PetscObject)da);
836: DMDAGetInfo(ex->da,&dim,0,0,0,0,0,0,&dof,&sw[0],&px,&py,&pz,&st);
837: DMDAGetCorners(ex->da,&ilower[0],&ilower[1],&ilower[2],&iupper[0],&iupper[1],&iupper[2]);
838: iupper[0] += ilower[0] - 1;
839: iupper[1] += ilower[1] - 1;
840: iupper[2] += ilower[2] - 1;
841: /* the hypre_Box is used to zero out the matrix entries in MatZeroValues() */
842: ex->hbox.imin[0] = ilower[0];
843: ex->hbox.imin[1] = ilower[1];
844: ex->hbox.imin[2] = ilower[2];
845: ex->hbox.imax[0] = iupper[0];
846: ex->hbox.imax[1] = iupper[1];
847: ex->hbox.imax[2] = iupper[2];
849: ex->dofs_order = 0;
851: /* assuming that the same number of dofs on each gridpoint. Also assume all cell-centred based */
852: ex->nvars= dof;
854: /* create the hypre grid object and set its information */
855: if (px || py || pz) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Ask us to add periodic support by calling HYPRE_SStructGridSetPeriodic()");
856: PetscStackCallStandard(HYPRE_SStructGridCreate,(ex->hcomm,dim,nparts,&ex->ss_grid));
858: PetscStackCallStandard(HYPRE_SStructGridSetExtents,(ex->ss_grid,part,ex->hbox.imin,ex->hbox.imax));
860: {
861: HYPRE_SStructVariable *vartypes;
862: PetscMalloc(ex->nvars*sizeof(HYPRE_SStructVariable),&vartypes);
863: for (i= 0; i< ex->nvars; i++) vartypes[i]= HYPRE_SSTRUCT_VARIABLE_CELL;
864: PetscStackCallStandard(HYPRE_SStructGridSetVariables,(ex->ss_grid, part, ex->nvars,vartypes));
865: PetscFree(vartypes);
866: }
867: PetscStackCallStandard(HYPRE_SStructGridAssemble,(ex->ss_grid));
869: sw[1] = sw[0];
870: sw[2] = sw[1];
871: /* PetscStackCallStandard(HYPRE_SStructGridSetNumGhost,(ex->ss_grid,sw)); */
873: /* create the hypre stencil object and set its information */
874: if (sw[0] > 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Ask us to add support for wider stencils");
875: if (st == DMDA_STENCIL_BOX) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Ask us to add support for box stencils");
877: if (dim == 1) {
878: PetscInt offsets[3][1] = {{-1},{0},{1}};
879: PetscInt j, cnt;
881: ssize = 3*(ex->nvars);
882: PetscStackCallStandard(HYPRE_SStructStencilCreate,(dim,ssize,&ex->ss_stencil));
883: cnt= 0;
884: for (i = 0; i < (ex->nvars); i++) {
885: for (j = 0; j < 3; j++) {
886: PetscStackCallStandard(HYPRE_SStructStencilSetEntry,(ex->ss_stencil, cnt, offsets[j], i));
887: cnt++;
888: }
889: }
890: } else if (dim == 2) {
891: PetscInt offsets[5][2] = {{0,-1},{-1,0},{0,0},{1,0},{0,1}};
892: PetscInt j, cnt;
894: ssize = 5*(ex->nvars);
895: PetscStackCallStandard(HYPRE_SStructStencilCreate,(dim,ssize,&ex->ss_stencil));
896: cnt= 0;
897: for (i= 0; i< (ex->nvars); i++) {
898: for (j= 0; j< 5; j++) {
899: PetscStackCallStandard(HYPRE_SStructStencilSetEntry,(ex->ss_stencil, cnt, offsets[j], i));
900: cnt++;
901: }
902: }
903: } else if (dim == 3) {
904: PetscInt offsets[7][3] = {{0,0,-1},{0,-1,0},{-1,0,0},{0,0,0},{1,0,0},{0,1,0},{0,0,1}};
905: PetscInt j, cnt;
907: ssize = 7*(ex->nvars);
908: PetscStackCallStandard(HYPRE_SStructStencilCreate,(dim,ssize,&ex->ss_stencil));
909: cnt= 0;
910: for (i= 0; i< (ex->nvars); i++) {
911: for (j= 0; j< 7; j++) {
912: PetscStackCallStandard(HYPRE_SStructStencilSetEntry,(ex->ss_stencil, cnt, offsets[j], i));
913: cnt++;
914: }
915: }
916: }
918: /* create the HYPRE graph */
919: PetscStackCallStandard(HYPRE_SStructGraphCreate,(ex->hcomm, ex->ss_grid, &(ex->ss_graph)));
921: /* set the stencil graph. Note that each variable has the same graph. This means that each
922: variable couples to all the other variable and with the same stencil pattern. */
923: for (i= 0; i< (ex->nvars); i++) {
924: PetscStackCallStandard(HYPRE_SStructGraphSetStencil,(ex->ss_graph,part,i,ex->ss_stencil));
925: }
926: PetscStackCallStandard(HYPRE_SStructGraphAssemble,(ex->ss_graph));
928: /* create the HYPRE sstruct vectors for rhs and solution */
929: PetscStackCallStandard(HYPRE_SStructVectorCreate,(ex->hcomm,ex->ss_grid,&ex->ss_b));
930: PetscStackCallStandard(HYPRE_SStructVectorCreate,(ex->hcomm,ex->ss_grid,&ex->ss_x));
931: PetscStackCallStandard(HYPRE_SStructVectorInitialize,(ex->ss_b));
932: PetscStackCallStandard(HYPRE_SStructVectorInitialize,(ex->ss_x));
933: PetscStackCallStandard(HYPRE_SStructVectorAssemble,(ex->ss_b));
934: PetscStackCallStandard(HYPRE_SStructVectorAssemble,(ex->ss_x));
936: /* create the hypre matrix object and set its information */
937: PetscStackCallStandard(HYPRE_SStructMatrixCreate,(ex->hcomm,ex->ss_graph,&ex->ss_mat));
938: PetscStackCallStandard(HYPRE_SStructGridDestroy,(ex->ss_grid));
939: PetscStackCallStandard(HYPRE_SStructStencilDestroy,(ex->ss_stencil));
940: if (ex->needsinitialization) {
941: PetscStackCallStandard(HYPRE_SStructMatrixInitialize,(ex->ss_mat));
942: ex->needsinitialization = PETSC_FALSE;
943: }
945: /* set the global and local sizes of the matrix */
946: DMDAGetCorners(da,0,0,0,&nx,&ny,&nz);
947: MatSetSizes(mat,dof*nx*ny*nz,dof*nx*ny*nz,PETSC_DECIDE,PETSC_DECIDE);
948: PetscLayoutSetBlockSize(mat->rmap,1);
949: PetscLayoutSetBlockSize(mat->cmap,1);
950: PetscLayoutSetUp(mat->rmap);
951: PetscLayoutSetUp(mat->cmap);
953: if (dim == 3) {
954: mat->ops->setvalueslocal = MatSetValuesLocal_HYPRESStruct_3d;
955: mat->ops->zerorowslocal = MatZeroRowsLocal_HYPRESStruct_3d;
956: mat->ops->zeroentries = MatZeroEntries_HYPRESStruct_3d;
958: MatZeroEntries_HYPRESStruct_3d(mat);
959: } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Only support for 3d DMDA currently");
961: /* get values that will be used repeatedly in MatSetValuesLocal() and MatZeroRowsLocal() repeatedly */
962: MatGetOwnershipRange(mat,&ex->rstart,NULL);
963: DMDAGetGlobalIndices(ex->da,NULL,&ex->gindices);
964: DMDAGetGhostCorners(ex->da,0,0,0,&ex->gnx,&ex->gnxgny,&ex->gnxgnygnz);
966: ex->gnxgny *= ex->gnx;
967: ex->gnxgnygnz *= ex->gnxgny;
969: DMDAGetCorners(ex->da,&ex->xs,&ex->ys,&ex->zs,&ex->nx,&ex->ny,&ex->nz);
971: ex->nxny = ex->nx*ex->ny;
972: ex->nxnynz = ex->nz*ex->nxny;
973: return(0);
974: }
978: PetscErrorCode MatMult_HYPRESStruct(Mat A,Vec x,Vec y)
979: {
980: PetscErrorCode ierr;
981: PetscScalar *xx,*yy;
982: PetscInt ilower[3],iupper[3];
983: Mat_HYPRESStruct *mx = (Mat_HYPRESStruct*)(A->data);
984: PetscInt ordering= mx->dofs_order;
985: PetscInt nvars = mx->nvars;
986: PetscInt part = 0;
987: PetscInt size;
988: PetscInt i;
991: DMDAGetCorners(mx->da,&ilower[0],&ilower[1],&ilower[2],&iupper[0],&iupper[1],&iupper[2]);
992: iupper[0] += ilower[0] - 1;
993: iupper[1] += ilower[1] - 1;
994: iupper[2] += ilower[2] - 1;
996: size= 1;
997: for (i = 0; i < 3; i++) size *= (iupper[i]-ilower[i]+1);
999: /* copy x values over to hypre for variable ordering */
1000: if (ordering) {
1001: PetscStackCallStandard(HYPRE_SStructVectorSetConstantValues,(mx->ss_b,0.0));
1002: VecGetArray(x,&xx);
1003: for (i= 0; i< nvars; i++) {
1004: PetscStackCallStandard(HYPRE_SStructVectorSetBoxValues,(mx->ss_b,part,ilower,iupper,i,xx+(size*i)));
1005: }
1006: VecRestoreArray(x,&xx);
1007: PetscStackCallStandard(HYPRE_SStructVectorAssemble,(mx->ss_b));
1008: PetscStackCallStandard(HYPRE_SStructMatrixMatvec,(1.0,mx->ss_mat,mx->ss_b,0.0,mx->ss_x));
1010: /* copy solution values back to PETSc */
1011: VecGetArray(y,&yy);
1012: for (i= 0; i< nvars; i++) {
1013: PetscStackCallStandard(HYPRE_SStructVectorGetBoxValues,(mx->ss_x,part,ilower,iupper,i,yy+(size*i)));
1014: }
1015: VecRestoreArray(y,&yy);
1016: } else { /* nodal ordering must be mapped to variable ordering for sys_pfmg */
1017: PetscScalar *z;
1018: PetscInt j, k;
1020: PetscMalloc(nvars*size*sizeof(PetscScalar),&z);
1021: PetscStackCallStandard(HYPRE_SStructVectorSetConstantValues,(mx->ss_b,0.0));
1022: VecGetArray(x,&xx);
1024: /* transform nodal to hypre's variable ordering for sys_pfmg */
1025: for (i= 0; i< size; i++) {
1026: k= i*nvars;
1027: for (j= 0; j< nvars; j++) z[j*size+i]= xx[k+j];
1028: }
1029: for (i= 0; i< nvars; i++) {
1030: PetscStackCallStandard(HYPRE_SStructVectorSetBoxValues,(mx->ss_b,part,ilower,iupper,i,z+(size*i)));
1031: }
1032: VecRestoreArray(x,&xx);
1033: PetscStackCallStandard(HYPRE_SStructVectorAssemble,(mx->ss_b));
1034: PetscStackCallStandard(HYPRE_SStructMatrixMatvec,(1.0,mx->ss_mat,mx->ss_b,0.0,mx->ss_x));
1036: /* copy solution values back to PETSc */
1037: VecGetArray(y,&yy);
1038: for (i= 0; i< nvars; i++) {
1039: PetscStackCallStandard(HYPRE_SStructVectorGetBoxValues,(mx->ss_x,part,ilower,iupper,i,z+(size*i)));
1040: }
1041: /* transform hypre's variable ordering for sys_pfmg to nodal ordering */
1042: for (i= 0; i< size; i++) {
1043: k= i*nvars;
1044: for (j= 0; j< nvars; j++) yy[k+j]= z[j*size+i];
1045: }
1046: VecRestoreArray(y,&yy);
1047: PetscFree(z);
1048: }
1049: return(0);
1050: }
1054: PetscErrorCode MatAssemblyEnd_HYPRESStruct(Mat mat,MatAssemblyType mode)
1055: {
1056: Mat_HYPRESStruct *ex = (Mat_HYPRESStruct*) mat->data;
1057: PetscErrorCode ierr;
1060: PetscStackCallStandard(HYPRE_SStructMatrixAssemble,(ex->ss_mat));
1061: return(0);
1062: }
1066: PetscErrorCode MatZeroEntries_HYPRESStruct(Mat mat)
1067: {
1069: /* before the DMDA is set to the matrix the zero doesn't need to do anything */
1070: return(0);
1071: }
1076: PetscErrorCode MatDestroy_HYPRESStruct(Mat mat)
1077: {
1078: Mat_HYPRESStruct *ex = (Mat_HYPRESStruct*) mat->data;
1079: PetscErrorCode ierr;
1082: PetscStackCallStandard(HYPRE_SStructGraphDestroy,(ex->ss_graph));
1083: PetscStackCallStandard(HYPRE_SStructMatrixDestroy,(ex->ss_mat));
1084: PetscStackCallStandard(HYPRE_SStructVectorDestroy,(ex->ss_x));
1085: PetscStackCallStandard(HYPRE_SStructVectorDestroy,(ex->ss_b));
1086: return(0);
1087: }
1091: PETSC_EXTERN PetscErrorCode MatCreate_HYPRESStruct(Mat B)
1092: {
1093: Mat_HYPRESStruct *ex;
1094: PetscErrorCode ierr;
1097: PetscNewLog(B,Mat_HYPRESStruct,&ex);
1098: B->data = (void*)ex;
1099: B->rmap->bs = 1;
1100: B->assembled = PETSC_FALSE;
1102: B->insertmode = NOT_SET_VALUES;
1104: B->ops->assemblyend = MatAssemblyEnd_HYPRESStruct;
1105: B->ops->mult = MatMult_HYPRESStruct;
1106: B->ops->zeroentries = MatZeroEntries_HYPRESStruct;
1107: B->ops->destroy = MatDestroy_HYPRESStruct;
1109: ex->needsinitialization = PETSC_TRUE;
1111: MPI_Comm_dup(PetscObjectComm((PetscObject)B),&(ex->hcomm));
1112: PetscObjectComposeFunction((PetscObject)B,"MatSetupDM_C",MatSetupDM_HYPRESStruct);
1113: PetscObjectChangeTypeName((PetscObject)B,MATHYPRESSTRUCT);
1114: return(0);
1115: }