Actual source code: mhypre.c

petsc-3.11.4 2019-09-28
Report Typos and Errors

  2: /*
  3:     Creates hypre ijmatrix from PETSc matrix
  4: */

  6:  #include <petscmathypre.h>
  7:  #include <petsc/private/matimpl.h>
  8:  #include <../src/mat/impls/hypre/mhypre.h>
  9:  #include <../src/mat/impls/aij/mpi/mpiaij.h>
 10:  #include <../src/vec/vec/impls/hypre/vhyp.h>
 11: #include <HYPRE.h>
 12: #include <HYPRE_utilities.h>
 13: #include <_hypre_parcsr_ls.h>
 14: #include <_hypre_sstruct_ls.h>

 16: PETSC_INTERN PetscErrorCode MatPtAP_IS_XAIJ(Mat,Mat,MatReuse,PetscReal,Mat*);

 18: static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*);
 19: static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix);
 20: static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix);
 21: static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix);
 22: static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,PetscScalar,Vec,PetscScalar,Vec,PetscBool);
 23: static PetscErrorCode hypre_array_destroy(void*);
 24: PetscErrorCode MatSetValues_HYPRE(Mat, PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode ins);

 26: static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
 27: {
 29:   PetscInt       i,n_d,n_o;
 30:   const PetscInt *ia_d,*ia_o;
 31:   PetscBool      done_d=PETSC_FALSE,done_o=PETSC_FALSE;
 32:   PetscInt       *nnz_d=NULL,*nnz_o=NULL;

 35:   if (A_d) { /* determine number of nonzero entries in local diagonal part */
 36:     MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);
 37:     if (done_d) {
 38:       PetscMalloc1(n_d,&nnz_d);
 39:       for (i=0; i<n_d; i++) {
 40:         nnz_d[i] = ia_d[i+1] - ia_d[i];
 41:       }
 42:     }
 43:     MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);
 44:   }
 45:   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
 46:     MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);
 47:     if (done_o) {
 48:       PetscMalloc1(n_o,&nnz_o);
 49:       for (i=0; i<n_o; i++) {
 50:         nnz_o[i] = ia_o[i+1] - ia_o[i];
 51:       }
 52:     }
 53:     MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);
 54:   }
 55:   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
 56:     if (!done_o) { /* only diagonal part */
 57:       PetscMalloc1(n_d,&nnz_o);
 58:       for (i=0; i<n_d; i++) {
 59:         nnz_o[i] = 0;
 60:       }
 61:     }
 62:     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,(HYPRE_Int *)nnz_d,(HYPRE_Int *)nnz_o));
 63:     PetscFree(nnz_d);
 64:     PetscFree(nnz_o);
 65:   }
 66:   return(0);
 67: }

 69: static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
 70: {
 72:   PetscInt       rstart,rend,cstart,cend;

 75:   PetscLayoutSetUp(A->rmap);
 76:   PetscLayoutSetUp(A->cmap);
 77:   rstart = A->rmap->rstart;
 78:   rend   = A->rmap->rend;
 79:   cstart = A->cmap->rstart;
 80:   cend   = A->cmap->rend;
 81:   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
 82:   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
 83:   {
 84:     PetscBool      same;
 85:     Mat            A_d,A_o;
 86:     const PetscInt *colmap;
 87:     PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&same);
 88:     if (same) {
 89:       MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);
 90:       MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);
 91:       return(0);
 92:     }
 93:     PetscObjectTypeCompare((PetscObject)A,MATMPIBAIJ,&same);
 94:     if (same) {
 95:       MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);
 96:       MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);
 97:       return(0);
 98:     }
 99:     PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&same);
100:     if (same) {
101:       MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);
102:       return(0);
103:     }
104:     PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&same);
105:     if (same) {
106:       MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);
107:       return(0);
108:     }
109:   }
110:   return(0);
111: }

113: static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij)
114: {
115:   PetscErrorCode    ierr;
116:   PetscInt          i,rstart,rend,ncols,nr,nc;
117:   const PetscScalar *values;
118:   const PetscInt    *cols;
119:   PetscBool         flg;

122:   PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);
123:   MatGetSize(A,&nr,&nc);
124:   if (flg && nr == nc) {
125:     MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);
126:     return(0);
127:   }
128:   PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);
129:   if (flg) {
130:     MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);
131:     return(0);
132:   }

134:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
135:   MatGetOwnershipRange(A,&rstart,&rend);
136:   for (i=rstart; i<rend; i++) {
137:     MatGetRow(A,i,&ncols,&cols,&values);
138:     if (ncols) {
139:       PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,(HYPRE_Int *)&ncols,(HYPRE_Int *)&i,(HYPRE_Int *)cols,values));
140:     }
141:     MatRestoreRow(A,i,&ncols,&cols,&values);
142:   }
143:   return(0);
144: }

146: static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
147: {
148:   PetscErrorCode        ierr;
149:   Mat_SeqAIJ            *pdiag = (Mat_SeqAIJ*)A->data;
150:   HYPRE_Int             type;
151:   hypre_ParCSRMatrix    *par_matrix;
152:   hypre_AuxParCSRMatrix *aux_matrix;
153:   hypre_CSRMatrix       *hdiag;

156:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
157:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
158:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
159:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
160:   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
161:   /*
162:        this is the Hack part where we monkey directly with the hypre datastructures
163:   */
164:   PetscMemcpy(hdiag->i,pdiag->i,(A->rmap->n + 1)*sizeof(PetscInt));
165:   PetscMemcpy(hdiag->j,pdiag->j,pdiag->nz*sizeof(PetscInt));
166:   PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));

168:   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
169:   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
170:   return(0);
171: }

173: static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
174: {
175:   PetscErrorCode        ierr;
176:   Mat_MPIAIJ            *pA = (Mat_MPIAIJ*)A->data;
177:   Mat_SeqAIJ            *pdiag,*poffd;
178:   PetscInt              i,*garray = pA->garray,*jj,cstart,*pjj;
179:   HYPRE_Int             type;
180:   hypre_ParCSRMatrix    *par_matrix;
181:   hypre_AuxParCSRMatrix *aux_matrix;
182:   hypre_CSRMatrix       *hdiag,*hoffd;

185:   pdiag = (Mat_SeqAIJ*) pA->A->data;
186:   poffd = (Mat_SeqAIJ*) pA->B->data;
187:   /* cstart is only valid for square MPIAIJ layed out in the usual way */
188:   MatGetOwnershipRange(A,&cstart,NULL);

190:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
191:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
192:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
193:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
194:   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
195:   hoffd = hypre_ParCSRMatrixOffd(par_matrix);

197:   /*
198:        this is the Hack part where we monkey directly with the hypre datastructures
199:   */
200:   PetscMemcpy(hdiag->i,pdiag->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
201:   /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */
202:   jj  = (PetscInt*)hdiag->j;
203:   pjj = (PetscInt*)pdiag->j;
204:   for (i=0; i<pdiag->nz; i++) jj[i] = cstart + pjj[i];
205:   PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));
206:   PetscMemcpy(hoffd->i,poffd->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
207:   /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this
208:      If we hacked a hypre a bit more we might be able to avoid this step */
209:   jj  = (PetscInt*) hoffd->j;
210:   pjj = (PetscInt*) poffd->j;
211:   for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]];
212:   PetscMemcpy(hoffd->data,poffd->a,poffd->nz*sizeof(PetscScalar));

214:   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
215:   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
216:   return(0);
217: }

219: static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B)
220: {
221:   Mat_HYPRE*             mhA = (Mat_HYPRE*)(A->data);
222:   Mat                    lA;
223:   ISLocalToGlobalMapping rl2g,cl2g;
224:   IS                     is;
225:   hypre_ParCSRMatrix     *hA;
226:   hypre_CSRMatrix        *hdiag,*hoffd;
227:   MPI_Comm               comm;
228:   PetscScalar            *hdd,*hod,*aa,*data;
229:   HYPRE_Int              *col_map_offd,*hdi,*hdj,*hoi,*hoj;
230:   PetscInt               *ii,*jj,*iptr,*jptr;
231:   PetscInt               cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N;
232:   HYPRE_Int              type;
233:   PetscErrorCode         ierr;

236:   comm = PetscObjectComm((PetscObject)A);
237:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type));
238:   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
239:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA));
240:   M     = hypre_ParCSRMatrixGlobalNumRows(hA);
241:   N     = hypre_ParCSRMatrixGlobalNumCols(hA);
242:   str   = hypre_ParCSRMatrixFirstRowIndex(hA);
243:   stc   = hypre_ParCSRMatrixFirstColDiag(hA);
244:   hdiag = hypre_ParCSRMatrixDiag(hA);
245:   hoffd = hypre_ParCSRMatrixOffd(hA);
246:   dr    = hypre_CSRMatrixNumRows(hdiag);
247:   dc    = hypre_CSRMatrixNumCols(hdiag);
248:   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
249:   hdi   = hypre_CSRMatrixI(hdiag);
250:   hdj   = hypre_CSRMatrixJ(hdiag);
251:   hdd   = hypre_CSRMatrixData(hdiag);
252:   oc    = hypre_CSRMatrixNumCols(hoffd);
253:   nnz  += hypre_CSRMatrixNumNonzeros(hoffd);
254:   hoi   = hypre_CSRMatrixI(hoffd);
255:   hoj   = hypre_CSRMatrixJ(hoffd);
256:   hod   = hypre_CSRMatrixData(hoffd);
257:   if (reuse != MAT_REUSE_MATRIX) {
258:     PetscInt *aux;

260:     /* generate l2g maps for rows and cols */
261:     ISCreateStride(comm,dr,str,1,&is);
262:     ISLocalToGlobalMappingCreateIS(is,&rl2g);
263:     ISDestroy(&is);
264:     col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
265:     PetscMalloc1(dc+oc,&aux);
266:     for (i=0; i<dc; i++) aux[i] = i+stc;
267:     for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i];
268:     ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);
269:     ISLocalToGlobalMappingCreateIS(is,&cl2g);
270:     ISDestroy(&is);
271:     /* create MATIS object */
272:     MatCreate(comm,B);
273:     MatSetSizes(*B,dr,dc,M,N);
274:     MatSetType(*B,MATIS);
275:     MatSetLocalToGlobalMapping(*B,rl2g,cl2g);
276:     ISLocalToGlobalMappingDestroy(&rl2g);
277:     ISLocalToGlobalMappingDestroy(&cl2g);

279:     /* allocate CSR for local matrix */
280:     PetscMalloc1(dr+1,&iptr);
281:     PetscMalloc1(nnz,&jptr);
282:     PetscMalloc1(nnz,&data);
283:   } else {
284:     PetscInt  nr;
285:     PetscBool done;
286:     MatISGetLocalMat(*B,&lA);
287:     MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);
288:     if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr);
289:     if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz);
290:     MatSeqAIJGetArray(lA,&data);
291:   }
292:   /* merge local matrices */
293:   ii   = iptr;
294:   jj   = jptr;
295:   aa   = data;
296:   *ii  = *(hdi++) + *(hoi++);
297:   for (jd=0,jo=0,cum=0; *ii<nnz; cum++) {
298:     PetscScalar *aold = aa;
299:     PetscInt    *jold = jj,nc = jd+jo;
300:     for (; jd<*hdi; jd++) { *jj++ = *hdj++;      *aa++ = *hdd++; }
301:     for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
302:     *(++ii) = *(hdi++) + *(hoi++);
303:     PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);
304:   }
305:   for (; cum<dr; cum++) *(++ii) = nnz;
306:   if (reuse != MAT_REUSE_MATRIX) {
307:     Mat_SeqAIJ* a;

309:     MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);
310:     MatISSetLocalMat(*B,lA);
311:     /* hack SeqAIJ */
312:     a          = (Mat_SeqAIJ*)(lA->data);
313:     a->free_a  = PETSC_TRUE;
314:     a->free_ij = PETSC_TRUE;
315:     MatDestroy(&lA);
316:   }
317:   MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);
318:   MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);
319:   if (reuse == MAT_INPLACE_MATRIX) {
320:     MatHeaderReplace(A,B);
321:   }
322:   return(0);
323: }

325: PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
326: {
327:   Mat            M = NULL;
328:   Mat_HYPRE      *hB;
329:   MPI_Comm       comm = PetscObjectComm((PetscObject)A);

333:   if (reuse == MAT_REUSE_MATRIX) {
334:     /* always destroy the old matrix and create a new memory;
335:        hope this does not churn the memory too much. The problem
336:        is I do not know if it is possible to put the matrix back to
337:        its initial state so that we can directly copy the values
338:        the second time through. */
339:     hB = (Mat_HYPRE*)((*B)->data);
340:     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij));
341:   } else {
342:     MatCreate(comm,&M);
343:     MatSetType(M,MATHYPRE);
344:     MatSetSizes(M,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
345:     hB   = (Mat_HYPRE*)(M->data);
346:     if (reuse == MAT_INITIAL_MATRIX) *B = M;
347:   }
348:   MatHYPRE_CreateFromMat(A,hB);
349:   MatHYPRE_IJMatrixCopy(A,hB->ij);
350:   if (reuse == MAT_INPLACE_MATRIX) {
351:     MatHeaderReplace(A,&M);
352:   }
353:   (*B)->preallocated = PETSC_TRUE;
354:   MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);
355:   MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);
356:   return(0);
357: }

359: static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
360: {
361:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
362:   hypre_ParCSRMatrix *parcsr;
363:   hypre_CSRMatrix    *hdiag,*hoffd;
364:   MPI_Comm           comm;
365:   PetscScalar        *da,*oa,*aptr;
366:   PetscInt           *dii,*djj,*oii,*ojj,*iptr;
367:   PetscInt           i,dnnz,onnz,m,n;
368:   HYPRE_Int          type;
369:   PetscMPIInt        size;
370:   PetscErrorCode     ierr;

373:   comm = PetscObjectComm((PetscObject)A);
374:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
375:   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
376:   if (reuse == MAT_REUSE_MATRIX) {
377:     PetscBool ismpiaij,isseqaij;
378:     PetscObjectTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);
379:     PetscObjectBaseTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);
380:     if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported");
381:   }
382:   MPI_Comm_size(comm,&size);

384:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
385:   hdiag = hypre_ParCSRMatrixDiag(parcsr);
386:   hoffd = hypre_ParCSRMatrixOffd(parcsr);
387:   m     = hypre_CSRMatrixNumRows(hdiag);
388:   n     = hypre_CSRMatrixNumCols(hdiag);
389:   dnnz  = hypre_CSRMatrixNumNonzeros(hdiag);
390:   onnz  = hypre_CSRMatrixNumNonzeros(hoffd);
391:   if (reuse == MAT_INITIAL_MATRIX) {
392:     PetscMalloc1(m+1,&dii);
393:     PetscMalloc1(dnnz,&djj);
394:     PetscMalloc1(dnnz,&da);
395:   } else if (reuse == MAT_REUSE_MATRIX) {
396:     PetscInt  nr;
397:     PetscBool done;
398:     if (size > 1) {
399:       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);

401:       MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);
402:       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m);
403:       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz);
404:       MatSeqAIJGetArray(b->A,&da);
405:     } else {
406:       MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);
407:       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m);
408:       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz);
409:       MatSeqAIJGetArray(*B,&da);
410:     }
411:   } else { /* MAT_INPLACE_MATRIX */
412:     dii = (PetscInt*)hypre_CSRMatrixI(hdiag);
413:     djj = (PetscInt*)hypre_CSRMatrixJ(hdiag);
414:     da  = hypre_CSRMatrixData(hdiag);
415:   }
416:   PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));
417:   PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));
418:   PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));
419:   iptr = djj;
420:   aptr = da;
421:   for (i=0; i<m; i++) {
422:     PetscInt nc = dii[i+1]-dii[i];
423:     PetscSortIntWithScalarArray(nc,iptr,aptr);
424:     iptr += nc;
425:     aptr += nc;
426:   }
427:   if (size > 1) {
428:     HYPRE_Int *offdj,*coffd;

430:     if (reuse == MAT_INITIAL_MATRIX) {
431:       PetscMalloc1(m+1,&oii);
432:       PetscMalloc1(onnz,&ojj);
433:       PetscMalloc1(onnz,&oa);
434:     } else if (reuse == MAT_REUSE_MATRIX) {
435:       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
436:       PetscInt   nr,hr = hypre_CSRMatrixNumRows(hoffd);
437:       PetscBool  done;

439:       MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);
440:       if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr);
441:       if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz);
442:       MatSeqAIJGetArray(b->B,&oa);
443:     } else { /* MAT_INPLACE_MATRIX */
444:       oii = (PetscInt*)hypre_CSRMatrixI(hoffd);
445:       ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd);
446:       oa  = hypre_CSRMatrixData(hoffd);
447:     }
448:     PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));
449:     offdj = hypre_CSRMatrixJ(hoffd);
450:     coffd = hypre_ParCSRMatrixColMapOffd(parcsr);
451:     for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]];
452:     PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));
453:     iptr = ojj;
454:     aptr = oa;
455:     for (i=0; i<m; i++) {
456:        PetscInt nc = oii[i+1]-oii[i];
457:        PetscSortIntWithScalarArray(nc,iptr,aptr);
458:        iptr += nc;
459:        aptr += nc;
460:     }
461:     if (reuse == MAT_INITIAL_MATRIX) {
462:       Mat_MPIAIJ *b;
463:       Mat_SeqAIJ *d,*o;

465:       MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);
466:       /* hack MPIAIJ */
467:       b          = (Mat_MPIAIJ*)((*B)->data);
468:       d          = (Mat_SeqAIJ*)b->A->data;
469:       o          = (Mat_SeqAIJ*)b->B->data;
470:       d->free_a  = PETSC_TRUE;
471:       d->free_ij = PETSC_TRUE;
472:       o->free_a  = PETSC_TRUE;
473:       o->free_ij = PETSC_TRUE;
474:     } else if (reuse == MAT_INPLACE_MATRIX) {
475:       Mat T;
476:       MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);
477:       hypre_CSRMatrixI(hdiag)    = NULL;
478:       hypre_CSRMatrixJ(hdiag)    = NULL;
479:       hypre_CSRMatrixData(hdiag) = NULL;
480:       hypre_CSRMatrixI(hoffd)    = NULL;
481:       hypre_CSRMatrixJ(hoffd)    = NULL;
482:       hypre_CSRMatrixData(hoffd) = NULL;
483:       MatHeaderReplace(A,&T);
484:     }
485:   } else {
486:     oii  = NULL;
487:     ojj  = NULL;
488:     oa   = NULL;
489:     if (reuse == MAT_INITIAL_MATRIX) {
490:       Mat_SeqAIJ* b;
491:       MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);
492:       /* hack SeqAIJ */
493:       b          = (Mat_SeqAIJ*)((*B)->data);
494:       b->free_a  = PETSC_TRUE;
495:       b->free_ij = PETSC_TRUE;
496:     } else if (reuse == MAT_INPLACE_MATRIX) {
497:       Mat T;
498:       MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);
499:       hypre_CSRMatrixI(hdiag)    = NULL;
500:       hypre_CSRMatrixJ(hdiag)    = NULL;
501:       hypre_CSRMatrixData(hdiag) = NULL;
502:       MatHeaderReplace(A,&T);
503:     }
504:   }

506:   /* we have to use hypre_Tfree to free the arrays */
507:   if (reuse == MAT_INPLACE_MATRIX) {
508:     void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
509:     const char *names[6] = {"_hypre_csr_dii",
510:                             "_hypre_csr_djj",
511:                             "_hypre_csr_da",
512:                             "_hypre_csr_oii",
513:                             "_hypre_csr_ojj",
514:                             "_hypre_csr_oa"};
515:     for (i=0; i<6; i++) {
516:       PetscContainer c;

518:       PetscContainerCreate(comm,&c);
519:       PetscContainerSetPointer(c,ptrs[i]);
520:       PetscContainerSetUserDestroy(c,hypre_array_destroy);
521:       PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);
522:       PetscContainerDestroy(&c);
523:     }
524:   }
525:   return(0);
526: }

528: static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
529: {
530:   hypre_ParCSRMatrix *tA;
531:   hypre_CSRMatrix    *hdiag,*hoffd;
532:   Mat_SeqAIJ         *diag,*offd;
533:   PetscInt           *garray,noffd,dnnz,onnz,*row_starts,*col_starts;
534:   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
535:   PetscBool          ismpiaij,isseqaij;
536:   PetscErrorCode     ierr;

539:   PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);
540:   PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);
541:   if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type_name);
542:   if (ismpiaij) {
543:     Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data);

545:     diag   = (Mat_SeqAIJ*)a->A->data;
546:     offd   = (Mat_SeqAIJ*)a->B->data;
547:     garray = a->garray;
548:     noffd  = a->B->cmap->N;
549:     dnnz   = diag->nz;
550:     onnz   = offd->nz;
551:   } else {
552:     diag   = (Mat_SeqAIJ*)A->data;
553:     offd   = NULL;
554:     garray = NULL;
555:     noffd  = 0;
556:     dnnz   = diag->nz;
557:     onnz   = 0;
558:   }

560:   /* create a temporary ParCSR */
561:   if (HYPRE_AssumedPartitionCheck()) {
562:     PetscMPIInt myid;

564:     MPI_Comm_rank(comm,&myid);
565:     row_starts = A->rmap->range + myid;
566:     col_starts = A->cmap->range + myid;
567:   } else {
568:     row_starts = A->rmap->range;
569:     col_starts = A->cmap->range;
570:   }
571:   tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_Int*)row_starts,(HYPRE_Int*)col_starts,noffd,dnnz,onnz);
572:   hypre_ParCSRMatrixSetRowStartsOwner(tA,0);
573:   hypre_ParCSRMatrixSetColStartsOwner(tA,0);

575:   /* set diagonal part */
576:   hdiag = hypre_ParCSRMatrixDiag(tA);
577:   hypre_CSRMatrixI(hdiag)           = (HYPRE_Int*)diag->i;
578:   hypre_CSRMatrixJ(hdiag)           = (HYPRE_Int*)diag->j;
579:   hypre_CSRMatrixData(hdiag)        = diag->a;
580:   hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz;
581:   hypre_CSRMatrixSetRownnz(hdiag);
582:   hypre_CSRMatrixSetDataOwner(hdiag,0);

584:   /* set offdiagonal part */
585:   hoffd = hypre_ParCSRMatrixOffd(tA);
586:   if (offd) {
587:     hypre_CSRMatrixI(hoffd)           = (HYPRE_Int*)offd->i;
588:     hypre_CSRMatrixJ(hoffd)           = (HYPRE_Int*)offd->j;
589:     hypre_CSRMatrixData(hoffd)        = offd->a;
590:     hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz;
591:     hypre_CSRMatrixSetRownnz(hoffd);
592:     hypre_CSRMatrixSetDataOwner(hoffd,0);
593:     hypre_ParCSRMatrixSetNumNonzeros(tA);
594:     hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_Int*)garray;
595:   }
596:   *hA = tA;
597:   return(0);
598: }

600: static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
601: {
602:   hypre_CSRMatrix    *hdiag,*hoffd;

605:   hdiag = hypre_ParCSRMatrixDiag(*hA);
606:   hoffd = hypre_ParCSRMatrixOffd(*hA);
607:   /* set pointers to NULL before destroying tA */
608:   hypre_CSRMatrixI(hdiag)           = NULL;
609:   hypre_CSRMatrixJ(hdiag)           = NULL;
610:   hypre_CSRMatrixData(hdiag)        = NULL;
611:   hypre_CSRMatrixI(hoffd)           = NULL;
612:   hypre_CSRMatrixJ(hoffd)           = NULL;
613:   hypre_CSRMatrixData(hoffd)        = NULL;
614:   hypre_ParCSRMatrixColMapOffd(*hA) = NULL;
615:   hypre_ParCSRMatrixDestroy(*hA);
616:   *hA = NULL;
617:   return(0);
618: }

620: /* calls RAP from BoomerAMG:
621:    the resulting ParCSR will not own the column and row starts
622:    It looks like we don't need to have the diagonal entries
623:    ordered first in the rows of the diagonal part
624:    for boomerAMGBuildCoarseOperator to work */
625: static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA,hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP)
626: {
627:   HYPRE_Int P_owns_col_starts,R_owns_row_starts;

630:   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP);
631:   R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR);
632:   PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP));
633:   PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP));
634:   /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */
635:   hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0);
636:   hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0);
637:   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1);
638:   if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1);
639:   return(0);
640: }

642: static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C)
643: {
644:   Mat                B;
645:   hypre_ParCSRMatrix *hA,*hP,*hPtAP;
646:   PetscErrorCode     ierr;

649:   MatAIJGetParCSR_Private(A,&hA);
650:   MatAIJGetParCSR_Private(P,&hP);
651:   MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);
652:   MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);
653:   MatHeaderMerge(C,&B);
654:   MatAIJRestoreParCSR_Private(A,&hA);
655:   MatAIJRestoreParCSR_Private(P,&hP);
656:   return(0);
657: }

659: PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat *C)
660: {

664:   MatCreate(PetscObjectComm((PetscObject)A),C);
665:   MatSetType(*C,MATAIJ);
666:   (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_AIJ_wHYPRE;
667:   return(0);
668: }

670: static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C)
671: {
672:   Mat                B;
673:   Mat_HYPRE          *hP;
674:   hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr;
675:   HYPRE_Int          type;
676:   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
677:   PetscBool          ishypre;
678:   PetscErrorCode     ierr;

681:   PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);
682:   if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
683:   hP = (Mat_HYPRE*)P->data;
684:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
685:   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
686:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));

688:   MatAIJGetParCSR_Private(A,&hA);
689:   MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);
690:   MatAIJRestoreParCSR_Private(A,&hA);

692:   /* create temporary matrix and merge to C */
693:   MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);
694:   MatHeaderMerge(C,&B);
695:   return(0);
696: }

698: static PetscErrorCode MatPtAP_AIJ_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
699: {

703:   if (scall == MAT_INITIAL_MATRIX) {
704:     const char *deft = MATAIJ;
705:     char       type[256];
706:     PetscBool  flg;

708:     PetscObjectOptionsBegin((PetscObject)A);
709:     PetscOptionsFList("-matptap_hypre_outtype","Matrix type",NULL,MatList,deft,type,256,&flg);
710:     PetscOptionsEnd();
711:     PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);
712:     MatCreate(PetscObjectComm((PetscObject)A),C);
713:     if (flg) {
714:       MatSetType(*C,type);
715:     } else {
716:       MatSetType(*C,deft);
717:     }
718:     (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_HYPRE;
719:     PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);
720:   }
721:   PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);
722:   (*(*C)->ops->ptapnumeric)(A,P,*C);
723:   PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);
724:   return(0);
725: }

727: static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C)
728: {
729:   Mat                B;
730:   hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr;
731:   Mat_HYPRE          *hA,*hP;
732:   PetscBool          ishypre;
733:   HYPRE_Int          type;
734:   PetscErrorCode     ierr;

737:   PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);
738:   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
739:   PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);
740:   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
741:   hA = (Mat_HYPRE*)A->data;
742:   hP = (Mat_HYPRE*)P->data;
743:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
744:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
745:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
746:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
747:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
748:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
749:   MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);
750:   MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);
751:   MatHeaderMerge(C,&B);
752:   return(0);
753: }

755: static PetscErrorCode MatPtAP_HYPRE_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
756: {

760:   if (scall == MAT_INITIAL_MATRIX) {
761:     PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);
762:     MatCreate(PetscObjectComm((PetscObject)A),C);
763:     MatSetType(*C,MATHYPRE);
764:     (*C)->ops->ptapnumeric = MatPtAPNumeric_HYPRE_HYPRE;
765:     PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);
766:   }
767:   PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);
768:   (*(*C)->ops->ptapnumeric)(A,P,*C);
769:   PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);
770:   return(0);
771: }

773: /* calls hypre_ParMatmul
774:    hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA
775:    hypre_ParMatrixCreate does not duplicate the communicator
776:    It looks like we don't need to have the diagonal entries
777:    ordered first in the rows of the diagonal part
778:    for boomerAMGBuildCoarseOperator to work */
779: static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB)
780: {
782:   PetscStackPush("hypre_ParMatmul");
783:   *hAB = hypre_ParMatmul(hA,hB);
784:   PetscStackPop;
785:   return(0);
786: }

788: static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C)
789: {
790:   Mat                D;
791:   hypre_ParCSRMatrix *hA,*hB,*hAB = NULL;
792:   PetscErrorCode     ierr;

795:   MatAIJGetParCSR_Private(A,&hA);
796:   MatAIJGetParCSR_Private(B,&hB);
797:   MatHYPRE_ParCSR_MatMatMult(hA,hB,&hAB);
798:   MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);
799:   MatHeaderMerge(C,&D);
800:   MatAIJRestoreParCSR_Private(A,&hA);
801:   MatAIJRestoreParCSR_Private(B,&hB);
802:   return(0);
803: }

805: PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat *C)
806: {

810:   MatCreate(PetscObjectComm((PetscObject)A),C);
811:   MatSetType(*C,MATAIJ);
812:   (*C)->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE;
813:   return(0);
814: }

816: static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A,Mat B,Mat C)
817: {
818:   Mat                D;
819:   hypre_ParCSRMatrix *Aparcsr,*Bparcsr,*ABparcsr = NULL;
820:   Mat_HYPRE          *hA,*hB;
821:   PetscBool          ishypre;
822:   HYPRE_Int          type;
823:   PetscErrorCode     ierr;

826:   PetscObjectTypeCompare((PetscObject)B,MATHYPRE,&ishypre);
827:   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_USER,"B should be of type %s",MATHYPRE);
828:   PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);
829:   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
830:   hA = (Mat_HYPRE*)A->data;
831:   hB = (Mat_HYPRE*)B->data;
832:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
833:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
834:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hB->ij,&type));
835:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
836:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
837:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hB->ij,(void**)&Bparcsr));
838:   MatHYPRE_ParCSR_MatMatMult(Aparcsr,Bparcsr,&ABparcsr);
839:   MatCreateFromParCSR(ABparcsr,MATHYPRE,PETSC_OWN_POINTER,&D);
840:   /* need to use HeaderReplace because HeaderMerge messes up with the communicator */
841:   MatHeaderReplace(C,&D);
842:   C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
843:   return(0);
844: }

846: static PetscErrorCode MatMatMult_HYPRE_HYPRE(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
847: {

851:   if (scall == MAT_INITIAL_MATRIX) {
852:     PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);
853:     MatCreate(PetscObjectComm((PetscObject)A),C);
854:     MatSetType(*C,MATHYPRE);
855:     (*C)->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
856:     PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);
857:   }
858:   PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);
859:   (*(*C)->ops->matmultnumeric)(A,B,*C);
860:   PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);
861:   return(0);
862: }

864: PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D)
865: {
866:   Mat                E;
867:   hypre_ParCSRMatrix *hA,*hB,*hC,*hABC;
868:   PetscErrorCode     ierr;

871:   MatAIJGetParCSR_Private(A,&hA);
872:   MatAIJGetParCSR_Private(B,&hB);
873:   MatAIJGetParCSR_Private(C,&hC);
874:   MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC);
875:   MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E);
876:   MatHeaderMerge(D,&E);
877:   MatAIJRestoreParCSR_Private(A,&hA);
878:   MatAIJRestoreParCSR_Private(B,&hB);
879:   MatAIJRestoreParCSR_Private(C,&hC);
880:   return(0);
881: }

883: PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat *D)
884: {

888:   MatCreate(PetscObjectComm((PetscObject)A),D);
889:   MatSetType(*D,MATAIJ);
890:   return(0);
891: }

893: static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
894: {

898:   MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_TRUE);
899:   return(0);
900: }

902: static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
903: {

907:   MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_FALSE);
908:   return(0);
909: }

911: static PetscErrorCode MatMultAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
912: {

916:   if (y != z) {
917:     VecCopy(y,z);
918:   }
919:   MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_FALSE);
920:   return(0);
921: }

923: static PetscErrorCode MatMultTransposeAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
924: {

928:   if (y != z) {
929:     VecCopy(y,z);
930:   }
931:   MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_TRUE);
932:   return(0);
933: }

935: /* y = a * A * x + b * y or y = a * A^t * x + b * y depending on trans */
936: static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, PetscScalar a, Vec x, PetscScalar b, Vec y, PetscBool trans)
937: {
938:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
939:   hypre_ParCSRMatrix *parcsr;
940:   hypre_ParVector    *hx,*hy;
941:   PetscScalar        *ax,*ay,*sax,*say;
942:   PetscErrorCode     ierr;

945:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
946:   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx));
947:   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy));
948:   VecGetArrayRead(x,(const PetscScalar**)&ax);
949:   VecGetArray(y,&ay);
950:   if (trans) {
951:     VecHYPRE_ParVectorReplacePointer(hA->x,ay,say);
952:     VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax);
953:     hypre_ParCSRMatrixMatvecT(a,parcsr,hy,b,hx);
954:     VecHYPRE_ParVectorReplacePointer(hA->x,say,ay);
955:     VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax);
956:   } else {
957:     VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax);
958:     VecHYPRE_ParVectorReplacePointer(hA->b,ay,say);
959:     hypre_ParCSRMatrixMatvec(a,parcsr,hx,b,hy);
960:     VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax);
961:     VecHYPRE_ParVectorReplacePointer(hA->b,say,ay);
962:   }
963:   VecRestoreArrayRead(x,(const PetscScalar**)&ax);
964:   VecRestoreArray(y,&ay);
965:   return(0);
966: }

968: static PetscErrorCode MatDestroy_HYPRE(Mat A)
969: {
970:   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;

974:   if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x));
975:   if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b));
976:   if (hA->ij) {
977:     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
978:     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij));
979:   }
980:   if (hA->comm) { MPI_Comm_free(&hA->comm); }

982:   MatStashDestroy_Private(&A->stash);

984:   PetscFree(hA->array);

986:   PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);
987:   PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);
988:   PetscObjectComposeFunction((PetscObject)A,"MatPtAP_seqaij_hypre_C",NULL);
989:   PetscObjectComposeFunction((PetscObject)A,"MatPtAP_mpiaij_hypre_C",NULL);
990:   PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);
991:   PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);
992:   PetscObjectComposeFunction((PetscObject)A,"MatPtAP_is_hypre_C",NULL);
993:   PetscFree(A->data);
994:   return(0);
995: }

997: static PetscErrorCode MatSetUp_HYPRE(Mat A)
998: {

1002:   MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);
1003:   return(0);
1004: }

1006: static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
1007: {
1008:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1009:   Vec                x,b;
1010:   PetscMPIInt        n;
1011:   PetscInt           i,j,rstart,ncols,flg;
1012:   PetscInt           *row,*col;
1013:   PetscScalar        *val;
1014:   PetscErrorCode     ierr;

1017:   if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE");

1019:   if (!A->nooffprocentries) {
1020:     while (1) {
1021:       MatStashScatterGetMesg_Private(&A->stash,&n,&row,&col,&val,&flg);
1022:       if (!flg) break;

1024:       for (i=0; i<n; ) {
1025:         /* Now identify the consecutive vals belonging to the same row */
1026:         for (j=i,rstart=row[j]; j<n; j++) {
1027:           if (row[j] != rstart) break;
1028:         }
1029:         if (j < n) ncols = j-i;
1030:         else       ncols = n-i;
1031:         /* Now assemble all these values with a single function call */
1032:         MatSetValues_HYPRE(A,1,row+i,ncols,col+i,val+i,A->insertmode);

1034:         i = j;
1035:       }
1036:     }
1037:     MatStashScatterEnd_Private(&A->stash);
1038:   }

1040:   PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij));
1041:   /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitalize */
1042:   {
1043:     hypre_AuxParCSRMatrix *aux_matrix;

1045:     /* call destroy just to make sure we do not leak anything */
1046:     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1047:     PetscStackCallStandard(hypre_AuxParCSRMatrixDestroy,(aux_matrix));
1048:     hypre_IJMatrixTranslator(hA->ij) = NULL;

1050:     /* Initialize with assembled flag -> it only recreates the aux_par_matrix */
1051:     PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1052:     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1053:     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */
1054:     PetscStackCallStandard(hypre_AuxParCSRMatrixInitialize,(aux_matrix));
1055:   }
1056:   if (hA->x) return(0);
1057:   PetscLayoutSetUp(A->rmap);
1058:   PetscLayoutSetUp(A->cmap);
1059:   VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);
1060:   VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);
1061:   VecHYPRE_IJVectorCreate(x,&hA->x);
1062:   VecHYPRE_IJVectorCreate(b,&hA->b);
1063:   VecDestroy(&x);
1064:   VecDestroy(&b);
1065:   return(0);
1066: }

1068: static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array)
1069: {
1070:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1071:   PetscErrorCode     ierr;

1074:   if (!hA->available) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Temporary space is in use");

1076:   if (hA->size >= size) *array = hA->array;
1077:   else {
1078:     PetscFree(hA->array);
1079:     hA->size = size;
1080:     PetscMalloc(hA->size,&hA->array);
1081:     *array = hA->array;
1082:   }

1084:   hA->available = PETSC_FALSE;
1085:   return(0);
1086: }

1088: static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array)
1089: {
1090:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;

1093:   *array = NULL;
1094:   hA->available = PETSC_TRUE;
1095:   return(0);
1096: }

1098: PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins)
1099: {
1100:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1101:   PetscScalar        *vals = (PetscScalar *)v;
1102:   PetscScalar        *sscr;
1103:   PetscInt           *cscr[2];
1104:   PetscInt           i,nzc;
1105:   void               *array = NULL;
1106:   PetscErrorCode     ierr;

1109:   MatGetArray_HYPRE(A,sizeof(PetscInt)*(2*nc)+sizeof(PetscScalar)*nc*nr,&array);
1110:   cscr[0] = (PetscInt*)array;
1111:   cscr[1] = ((PetscInt*)array)+nc;
1112:   sscr = (PetscScalar*)(((PetscInt*)array)+nc*2);
1113:   for (i=0,nzc=0;i<nc;i++) {
1114:     if (cols[i] >= 0) {
1115:       cscr[0][nzc  ] = cols[i];
1116:       cscr[1][nzc++] = i;
1117:     }
1118:   }
1119:   if (!nzc) {
1120:     MatRestoreArray_HYPRE(A,&array);
1121:     return(0);
1122:   }

1124:   if (ins == ADD_VALUES) {
1125:     for (i=0;i<nr;i++) {
1126:       if (rows[i] >= 0 && nzc) {
1127:         PetscInt j;
1128:         for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]];
1129:         PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,(HYPRE_Int*)&nzc,(HYPRE_Int*)(rows+i),(HYPRE_Int*)cscr[0],sscr));
1130:       }
1131:       vals += nc;
1132:     }
1133:   } else { /* INSERT_VALUES */

1135:     PetscInt rst,ren;
1136:     MatGetOwnershipRange(A,&rst,&ren);

1138:     for (i=0;i<nr;i++) {
1139:       if (rows[i] >= 0 && nzc) {
1140:         PetscInt j;
1141:         for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]];
1142:         /* nonlocal values */
1143:         if (rows[i] < rst || rows[i] >= ren) { MatStashValuesRow_Private(&A->stash,rows[i],nzc,cscr[0],sscr,PETSC_FALSE); }
1144:         /* local values */
1145:         else PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,(HYPRE_Int*)&nzc,(HYPRE_Int*)(rows+i),(HYPRE_Int*)cscr[0],sscr));
1146:       }
1147:       vals += nc;
1148:     }
1149:   }

1151:   MatRestoreArray_HYPRE(A,&array);
1152:   return(0);
1153: }

1155: static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1156: {
1157:   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
1158:   HYPRE_Int      *hdnnz,*honnz;
1159:   PetscInt       i,rs,re,cs,ce,bs;
1160:   PetscMPIInt    size;

1164:   MatGetBlockSize(A,&bs);
1165:   PetscLayoutSetUp(A->rmap);
1166:   PetscLayoutSetUp(A->cmap);
1167:   rs   = A->rmap->rstart;
1168:   re   = A->rmap->rend;
1169:   cs   = A->cmap->rstart;
1170:   ce   = A->cmap->rend;
1171:   if (!hA->ij) {
1172:     PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij));
1173:     PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1174:   } else {
1175:     HYPRE_Int hrs,hre,hcs,hce;
1176:     PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce));
1177:     if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%d)",hrs,hre+1,rs,re);
1178:     if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%d)",hcs,hce+1,cs,ce);
1179:   }
1180:   if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs;
1181:   if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs;

1183:   if (!dnnz) {
1184:     PetscMalloc1(A->rmap->n,&hdnnz);
1185:     for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz;
1186:   } else {
1187:     hdnnz = (HYPRE_Int*)dnnz;
1188:   }
1189:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
1190:   if (size > 1) {
1191:     hypre_AuxParCSRMatrix *aux_matrix;
1192:     if (!onnz) {
1193:       PetscMalloc1(A->rmap->n,&honnz);
1194:       for (i=0;i<A->rmap->n;i++) honnz[i] = onz;
1195:     } else {
1196:       honnz = (HYPRE_Int*)onnz;
1197:     }
1198:     /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems
1199:        they assume the user will input the entire row values, properly sorted
1200:        In PETSc, we don't make such an assumption, and we instead set this flag to 1
1201:        Also, to avoid possible memory leaks, we destroy and recreate the translator
1202:        This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize
1203:        the IJ matrix for us */
1204:     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1205:     hypre_AuxParCSRMatrixDestroy(aux_matrix);
1206:     hypre_IJMatrixTranslator(hA->ij) = NULL;
1207:     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz));
1208:     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1209:     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
1210:   } else {
1211:     honnz = NULL;
1212:     PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz));
1213:   }

1215:   /* reset assembled flag and call the initialize method */
1216:   hypre_IJMatrixAssembleFlag(hA->ij) = 0;
1217:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1218:   if (!dnnz) {
1219:     PetscFree(hdnnz);
1220:   }
1221:   if (!onnz && honnz) {
1222:     PetscFree(honnz);
1223:   }

1225:   /* Match AIJ logic */
1226:   A->preallocated = PETSC_TRUE;
1227:   A->assembled    = PETSC_FALSE;
1228:   return(0);
1229: }

1231: /*@C
1232:    MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format

1234:    Collective on Mat

1236:    Input Parameters:
1237: +  A - the matrix
1238: .  dnz  - number of nonzeros per row in DIAGONAL portion of local submatrix
1239:           (same value is used for all local rows)
1240: .  dnnz - array containing the number of nonzeros in the various rows of the
1241:           DIAGONAL portion of the local submatrix (possibly different for each row)
1242:           or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure.
1243:           The size of this array is equal to the number of local rows, i.e 'm'.
1244:           For matrices that will be factored, you must leave room for (and set)
1245:           the diagonal entry even if it is zero.
1246: .  onz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
1247:           submatrix (same value is used for all local rows).
1248: -  onnz - array containing the number of nonzeros in the various rows of the
1249:           OFF-DIAGONAL portion of the local submatrix (possibly different for
1250:           each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero
1251:           structure. The size of this array is equal to the number
1252:           of local rows, i.e 'm'.

1254:    Notes:
1255:     If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored.

1257:    Level: intermediate

1259: .keywords: matrix, aij, compressed row, sparse, parallel

1261: .seealso: MatCreate(), MatMPIAIJSetPreallocation(), MATHYPRE
1262: @*/
1263: PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1264: {

1270:   PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));
1271:   return(0);
1272: }

1274: /*
1275:    MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix

1277:    Collective

1279:    Input Parameters:
1280: +  parcsr   - the pointer to the hypre_ParCSRMatrix
1281: .  mtype    - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported.
1282: -  copymode - PETSc copying options

1284:    Output Parameter:
1285: .  A  - the matrix

1287:    Level: intermediate

1289: .seealso: MatHYPRE, PetscCopyMode
1290: */
1291: PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat* A)
1292: {
1293:   Mat                   T;
1294:   Mat_HYPRE             *hA;
1295:   MPI_Comm              comm;
1296:   PetscInt              rstart,rend,cstart,cend,M,N;
1297:   PetscBool             isseqaij,ismpiaij,isaij,ishyp,isis;
1298:   PetscErrorCode        ierr;

1301:   comm   = hypre_ParCSRMatrixComm(parcsr);
1302:   PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);
1303:   PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);
1304:   PetscStrcmp(mtype,MATAIJ,&isaij);
1305:   PetscStrcmp(mtype,MATHYPRE,&ishyp);
1306:   PetscStrcmp(mtype,MATIS,&isis);
1307:   isaij  = (PetscBool)(isseqaij || ismpiaij || isaij);
1308:   if (!isaij && !ishyp && !isis) SETERRQ6(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATMPIAIJ,MATIS,MATHYPRE);
1309:   /* access ParCSRMatrix */
1310:   rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
1311:   rend   = hypre_ParCSRMatrixLastRowIndex(parcsr);
1312:   cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
1313:   cend   = hypre_ParCSRMatrixLastColDiag(parcsr);
1314:   M      = hypre_ParCSRMatrixGlobalNumRows(parcsr);
1315:   N      = hypre_ParCSRMatrixGlobalNumCols(parcsr);

1317:   /* fix for empty local rows/columns */
1318:   if (rend < rstart) rend = rstart;
1319:   if (cend < cstart) cend = cstart;

1321:   /* PETSc convention */
1322:   rend++;
1323:   cend++;
1324:   rend = PetscMin(rend,M);
1325:   cend = PetscMin(cend,N);

1327:   /* create PETSc matrix with MatHYPRE */
1328:   MatCreate(comm,&T);
1329:   MatSetSizes(T,rend-rstart,cend-cstart,M,N);
1330:   MatSetType(T,MATHYPRE);
1331:   hA   = (Mat_HYPRE*)(T->data);

1333:   /* create HYPRE_IJMatrix */
1334:   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
1335:   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));

1337:   /* create new ParCSR object if needed */
1338:   if (ishyp && copymode == PETSC_COPY_VALUES) {
1339:     hypre_ParCSRMatrix *new_parcsr;
1340:     hypre_CSRMatrix    *hdiag,*hoffd,*ndiag,*noffd;

1342:     new_parcsr = hypre_ParCSRMatrixCompleteClone(parcsr);
1343:     hdiag      = hypre_ParCSRMatrixDiag(parcsr);
1344:     hoffd      = hypre_ParCSRMatrixOffd(parcsr);
1345:     ndiag      = hypre_ParCSRMatrixDiag(new_parcsr);
1346:     noffd      = hypre_ParCSRMatrixOffd(new_parcsr);
1347:     PetscMemcpy(hypre_CSRMatrixData(ndiag),hypre_CSRMatrixData(hdiag),hypre_CSRMatrixNumNonzeros(hdiag)*sizeof(PetscScalar));
1348:     PetscMemcpy(hypre_CSRMatrixData(noffd),hypre_CSRMatrixData(hoffd),hypre_CSRMatrixNumNonzeros(hoffd)*sizeof(PetscScalar));
1349:     parcsr     = new_parcsr;
1350:     copymode   = PETSC_OWN_POINTER;
1351:   }

1353:   /* set ParCSR object */
1354:   hypre_IJMatrixObject(hA->ij) = parcsr;
1355:   T->preallocated = PETSC_TRUE;

1357:   /* set assembled flag */
1358:   hypre_IJMatrixAssembleFlag(hA->ij) = 1;
1359:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1360:   if (ishyp) {
1361:     PetscMPIInt myid = 0;

1363:     /* make sure we always have row_starts and col_starts available */
1364:     if (HYPRE_AssumedPartitionCheck()) {
1365:       MPI_Comm_rank(comm,&myid);
1366:     }
1367:     if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) {
1368:       PetscLayout map;

1370:       MatGetLayouts(T,NULL,&map);
1371:       PetscLayoutSetUp(map);
1372:       hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_Int*)(map->range + myid);
1373:     }
1374:     if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) {
1375:       PetscLayout map;

1377:       MatGetLayouts(T,&map,NULL);
1378:       PetscLayoutSetUp(map);
1379:       hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_Int*)(map->range + myid);
1380:     }
1381:     /* prevent from freeing the pointer */
1382:     if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
1383:     *A   = T;
1384:     MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);
1385:     MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);
1386:   } else if (isaij) {
1387:     if (copymode != PETSC_OWN_POINTER) {
1388:       /* prevent from freeing the pointer */
1389:       hA->inner_free = PETSC_FALSE;
1390:       MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);
1391:       MatDestroy(&T);
1392:     } else { /* AIJ return type with PETSC_OWN_POINTER */
1393:       MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);
1394:       *A   = T;
1395:     }
1396:   } else if (isis) {
1397:     MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);
1398:     if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE;
1399:     MatDestroy(&T);
1400:   }
1401:   return(0);
1402: }

1404: static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr)
1405: {
1406:   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1407:   HYPRE_Int type;

1410:   if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present");
1411:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
1412:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR");
1413:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr));
1414:   return(0);
1415: }

1417: /*
1418:    MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix

1420:    Not collective

1422:    Input Parameters:
1423: +  A  - the MATHYPRE object

1425:    Output Parameter:
1426: .  parcsr  - the pointer to the hypre_ParCSRMatrix

1428:    Level: intermediate

1430: .seealso: MatHYPRE, PetscCopyMode
1431: */
1432: PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr)
1433: {

1439:   PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));
1440:   return(0);
1441: }

1443: static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd)
1444: {
1445:   hypre_ParCSRMatrix *parcsr;
1446:   hypre_CSRMatrix    *ha;
1447:   PetscInt           rst;
1448:   PetscErrorCode     ierr;

1451:   if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented with non-square diagonal blocks");
1452:   MatGetOwnershipRange(A,&rst,NULL);
1453:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1454:   if (missing) *missing = PETSC_FALSE;
1455:   if (dd) *dd = -1;
1456:   ha = hypre_ParCSRMatrixDiag(parcsr);
1457:   if (ha) {
1458:     PetscInt  size,i;
1459:     HYPRE_Int *ii,*jj;

1461:     size = hypre_CSRMatrixNumRows(ha);
1462:     ii   = hypre_CSRMatrixI(ha);
1463:     jj   = hypre_CSRMatrixJ(ha);
1464:     for (i = 0; i < size; i++) {
1465:       PetscInt  j;
1466:       PetscBool found = PETSC_FALSE;

1468:       for (j = ii[i]; j < ii[i+1] && !found; j++)
1469:         found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE;

1471:       if (!found) {
1472:         PetscInfo1(A,"Matrix is missing local diagonal entry %D\n",i);
1473:         if (missing) *missing = PETSC_TRUE;
1474:         if (dd) *dd = i+rst;
1475:         return(0);
1476:       }
1477:     }
1478:     if (!size) {
1479:       PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1480:       if (missing) *missing = PETSC_TRUE;
1481:       if (dd) *dd = rst;
1482:     }
1483:   } else {
1484:     PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1485:     if (missing) *missing = PETSC_TRUE;
1486:     if (dd) *dd = rst;
1487:   }
1488:   return(0);
1489: }

1491: static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s)
1492: {
1493:   hypre_ParCSRMatrix *parcsr;
1494:   hypre_CSRMatrix    *ha;
1495:   PetscErrorCode     ierr;

1498:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1499:   /* diagonal part */
1500:   ha = hypre_ParCSRMatrixDiag(parcsr);
1501:   if (ha) {
1502:     PetscInt    size,i;
1503:     HYPRE_Int   *ii;
1504:     PetscScalar *a;

1506:     size = hypre_CSRMatrixNumRows(ha);
1507:     a    = hypre_CSRMatrixData(ha);
1508:     ii   = hypre_CSRMatrixI(ha);
1509:     for (i = 0; i < ii[size]; i++) a[i] *= s;
1510:   }
1511:   /* offdiagonal part */
1512:   ha = hypre_ParCSRMatrixOffd(parcsr);
1513:   if (ha) {
1514:     PetscInt    size,i;
1515:     HYPRE_Int   *ii;
1516:     PetscScalar *a;

1518:     size = hypre_CSRMatrixNumRows(ha);
1519:     a    = hypre_CSRMatrixData(ha);
1520:     ii   = hypre_CSRMatrixI(ha);
1521:     for (i = 0; i < ii[size]; i++) a[i] *= s;
1522:   }
1523:   return(0);
1524: }

1526: static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1527: {
1528:   hypre_ParCSRMatrix *parcsr;
1529:   HYPRE_Int          *lrows;
1530:   PetscInt           rst,ren,i;
1531:   PetscErrorCode     ierr;

1534:   if (x || b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"To be implemented");
1535:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1536:   PetscMalloc1(numRows,&lrows);
1537:   MatGetOwnershipRange(A,&rst,&ren);
1538:   for (i=0;i<numRows;i++) {
1539:     if (rows[i] < rst || rows[i] >= ren)
1540:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Non-local rows not yet supported");
1541:     lrows[i] = rows[i] - rst;
1542:   }
1543:   PetscStackCallStandard(hypre_ParCSRMatrixEliminateRowsCols,(parcsr,numRows,lrows));
1544:   PetscFree(lrows);
1545:   return(0);
1546: }

1548: static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha)
1549: {
1550:   PetscErrorCode      ierr;

1553:   if (ha) {
1554:     HYPRE_Int     *ii, size;
1555:     HYPRE_Complex *a;

1557:     size = hypre_CSRMatrixNumRows(ha);
1558:     a    = hypre_CSRMatrixData(ha);
1559:     ii   = hypre_CSRMatrixI(ha);

1561:     if (a) { PetscMemzero(a,(ii[size])*sizeof(HYPRE_Complex)); }
1562:   }
1563:   return(0);
1564: }

1566: PetscErrorCode MatZeroEntries_HYPRE(Mat A)
1567: {
1568:   hypre_ParCSRMatrix  *parcsr;
1569:   PetscErrorCode      ierr;

1572:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1573:   /* diagonal part */
1574:   MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr));
1575:   /* off-diagonal part */
1576:   MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr));
1577:   return(0);
1578: }

1580: static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA,PetscInt N,const PetscInt rows[],PetscScalar diag)
1581: {
1582:   PetscInt        ii, jj, ibeg, iend, irow;
1583:   PetscInt        *i, *j;
1584:   PetscScalar     *a;

1587:   if (!hA) return(0);

1589:   i = (PetscInt*) hypre_CSRMatrixI(hA);
1590:   j = (PetscInt*) hypre_CSRMatrixJ(hA);
1591:   a = hypre_CSRMatrixData(hA);

1593:   for (ii = 0; ii < N; ii++) {
1594:     irow = rows[ii];
1595:     ibeg = i[irow];
1596:     iend = i[irow+1];
1597:     for (jj = ibeg; jj < iend; jj++)
1598:       if (j[jj] == irow) a[jj] = diag;
1599:       else a[jj] = 0.0;
1600:    }
1601:    return(0);
1602: }

1604: static PetscErrorCode MatZeroRows_HYPRE(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1605: {
1606:   hypre_ParCSRMatrix  *parcsr;
1607:   PetscInt            *lrows,len;
1608:   PetscErrorCode      ierr;

1611:   if (x || b) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size \n");
1612:   /* retrieve the internal matrix */
1613:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1614:   /* get locally owned rows */
1615:   MatZeroRowsMapLocal_Private(A,N,rows,&len,&lrows);
1616:   /* zero diagonal part */
1617:   MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr),len,lrows,diag);
1618:   /* zero off-diagonal part */
1619:   MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr),len,lrows,0.0);

1621:   PetscFree(lrows);
1622:   return(0);
1623: }

1625: static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat,MatAssemblyType mode)
1626: {

1630:   if (mat->nooffprocentries) return(0);

1632:   MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);
1633:   return(0);
1634: }

1636: static PetscErrorCode MatGetRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1637: {
1638:   hypre_ParCSRMatrix  *parcsr;
1639:   PetscErrorCode      ierr;

1642:   /* retrieve the internal matrix */
1643:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1644:   /* call HYPRE API */
1645:   PetscStackCallStandard(HYPRE_ParCSRMatrixGetRow,(parcsr,row,(HYPRE_Int*)nz,(HYPRE_Int**)idx,v));
1646:   return(0);
1647: }

1649: static PetscErrorCode MatRestoreRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1650: {
1651:   hypre_ParCSRMatrix  *parcsr;
1652:   PetscErrorCode      ierr;

1655:   /* retrieve the internal matrix */
1656:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1657:   /* call HYPRE API */
1658:   PetscStackCallStandard(HYPRE_ParCSRMatrixRestoreRow,(parcsr,row,(HYPRE_Int*)nz,(HYPRE_Int**)idx,v));
1659:   return(0);
1660: }

1662: static PetscErrorCode MatGetValues_HYPRE(Mat A,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
1663: {
1664:   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1665:   PetscInt  i;

1668:   if (!m || !n) return(0);
1669:   /* Ignore negative row indices
1670:    * And negative column indices should be automatically ignored in hypre
1671:    * */
1672:   for (i=0; i<m; i++)
1673:     if (idxm[i] >= 0) PetscStackCallStandard(HYPRE_IJMatrixGetValues,(hA->ij,1,(HYPRE_Int*)&n,(HYPRE_Int*)&idxm[i],(HYPRE_Int*)idxn,&v[i*n]));
1674:   return(0);
1675: }

1677: static PetscErrorCode MatSetOption_HYPRE(Mat A,MatOption op,PetscBool flg)
1678: {
1679:   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;

1682:   switch (op)
1683:   {
1684:   case MAT_NO_OFF_PROC_ENTRIES:
1685:     if (flg) {
1686:       PetscStackCallStandard(HYPRE_IJMatrixSetMaxOffProcElmts,(hA->ij,0));
1687:     }
1688:     break;
1689:   default:
1690:     break;
1691:   }
1692:   return(0);
1693: }

1695: static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view)
1696: {
1697:   hypre_ParCSRMatrix *parcsr;
1698:   PetscErrorCode     ierr;
1699:   Mat                B;
1700:   PetscViewerFormat  format;
1701:   PetscErrorCode     (*mview)(Mat,PetscViewer) = NULL;

1704:   PetscViewerGetFormat(view,&format);
1705:   if (format != PETSC_VIEWER_NATIVE) {
1706:     MatHYPREGetParCSR_HYPRE(A,&parcsr);
1707:     MatCreateFromParCSR(parcsr,MATAIJ,PETSC_USE_POINTER,&B);
1708:     MatGetOperation(B,MATOP_VIEW,(void(**)(void))&mview);
1709:     if (!mview) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Missing view operation");
1710:     (*mview)(B,view);
1711:     MatDestroy(&B);
1712:   } else {
1713:     Mat_HYPRE  *hA = (Mat_HYPRE*)A->data;
1714:     PetscMPIInt size;
1715:     PetscBool   isascii;
1716:     const char *filename;

1718:     /* HYPRE uses only text files */
1719:     PetscObjectTypeCompare((PetscObject)view,PETSCVIEWERASCII,&isascii);
1720:     if (!isascii) SETERRQ1(PetscObjectComm((PetscObject)view),PETSC_ERR_SUP,"PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII",((PetscObject)view)->type_name);
1721:     PetscViewerFileGetName(view,&filename);
1722:     PetscStackCallStandard(HYPRE_IJMatrixPrint,(hA->ij,filename));
1723:     MPI_Comm_size(hA->comm,&size);
1724:     if (size > 1) {
1725:       PetscViewerASCIIPrintf(view,"Matrix files: %s.%05d ... %s.%05d\n",filename,0,filename,size-1);
1726:     } else {
1727:       PetscViewerASCIIPrintf(view,"Matrix file: %s.%05d\n",filename,0);
1728:     }
1729:   }
1730:   return(0);
1731: }

1733: static PetscErrorCode MatDuplicate_HYPRE(Mat A,MatDuplicateOption op, Mat *B)
1734: {
1735:   hypre_ParCSRMatrix *parcsr;
1736:   PetscErrorCode     ierr;
1737:   PetscCopyMode      cpmode;

1740:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1741:   if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) {
1742:     parcsr = hypre_ParCSRMatrixCompleteClone(parcsr);
1743:     cpmode = PETSC_OWN_POINTER;
1744:   } else {
1745:     cpmode = PETSC_COPY_VALUES;
1746:   }
1747:   MatCreateFromParCSR(parcsr,MATHYPRE,cpmode,B);
1748:   return(0);
1749: }

1751: static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str)
1752: {
1753:   hypre_ParCSRMatrix *acsr,*bcsr;
1754:   PetscErrorCode     ierr;

1757:   if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) {
1758:     MatHYPREGetParCSR_HYPRE(A,&acsr);
1759:     MatHYPREGetParCSR_HYPRE(B,&bcsr);
1760:     PetscStackCallStandard(hypre_ParCSRMatrixCopy,(acsr,bcsr,1));
1761:     MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
1762:     MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
1763:   } else {
1764:     MatCopy_Basic(A,B,str);
1765:   }
1766:   return(0);
1767: }

1769: static PetscErrorCode MatGetDiagonal_HYPRE(Mat A, Vec d)
1770: {
1771:   hypre_ParCSRMatrix *parcsr;
1772:   hypre_CSRMatrix    *dmat;
1773:   PetscScalar        *a,*data = NULL;
1774:   PetscInt           i,*diag = NULL;
1775:   PetscBool          cong;
1776:   PetscErrorCode     ierr;

1779:   MatHasCongruentLayouts(A,&cong);
1780:   if (!cong) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only for square matrices with same local distributions of rows and columns");
1781: #if defined(PETSC_USE_DEBUG)
1782:   {
1783:     PetscBool miss;
1784:     MatMissingDiagonal(A,&miss,NULL);
1785:     if (miss && A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented when diagonal entries are missing");
1786:   }
1787: #endif
1788:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1789:   dmat = hypre_ParCSRMatrixDiag(parcsr);
1790:   if (dmat) {
1791:     VecGetArray(d,&a);
1792:     diag = (PetscInt*)(hypre_CSRMatrixI(dmat));
1793:     data = (PetscScalar*)(hypre_CSRMatrixData(dmat));
1794:     for (i=0;i<A->rmap->n;i++) a[i] = data[diag[i]];
1795:     VecRestoreArray(d,&a);
1796:   }
1797:   return(0);
1798: }

1800:  #include <petscblaslapack.h>

1802: static PetscErrorCode MatAXPY_HYPRE(Mat Y,PetscScalar a,Mat X,MatStructure str)
1803: {

1807:   if (str == SAME_NONZERO_PATTERN) {
1808:     hypre_ParCSRMatrix *x,*y;
1809:     hypre_CSRMatrix    *xloc,*yloc;
1810:     PetscInt           xnnz,ynnz;
1811:     PetscScalar        *xarr,*yarr;
1812:     PetscBLASInt       one=1,bnz;

1814:     MatHYPREGetParCSR(Y,&y);
1815:     MatHYPREGetParCSR(X,&x);

1817:     /* diagonal block */
1818:     xloc = hypre_ParCSRMatrixDiag(x);
1819:     yloc = hypre_ParCSRMatrixDiag(y);
1820:     xnnz = 0;
1821:     ynnz = 0;
1822:     xarr = NULL;
1823:     yarr = NULL;
1824:     if (xloc) {
1825:       xarr = (PetscScalar*)(hypre_CSRMatrixData(xloc));
1826:       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
1827:     }
1828:     if (yloc) {
1829:       yarr = (PetscScalar*)(hypre_CSRMatrixData(yloc));
1830:       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
1831:     }
1832:     if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in diagonal block %D != %D",xnnz,ynnz);
1833:     PetscBLASIntCast(xnnz,&bnz);
1834:     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,xarr,&one,yarr,&one));

1836:     /* off-diagonal block */
1837:     xloc = hypre_ParCSRMatrixOffd(x);
1838:     yloc = hypre_ParCSRMatrixOffd(y);
1839:     xnnz = 0;
1840:     ynnz = 0;
1841:     xarr = NULL;
1842:     yarr = NULL;
1843:     if (xloc) {
1844:       xarr = (PetscScalar*)(hypre_CSRMatrixData(xloc));
1845:       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
1846:     }
1847:     if (yloc) {
1848:       yarr = (PetscScalar*)(hypre_CSRMatrixData(yloc));
1849:       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
1850:     }
1851:     if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in off-diagonal block %D != %D",xnnz,ynnz);
1852:     PetscBLASIntCast(xnnz,&bnz);
1853:     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,xarr,&one,yarr,&one));
1854:   } else if (str == SUBSET_NONZERO_PATTERN) {
1855:     MatAXPY_Basic(Y,a,X,str);
1856:   } else {
1857:     Mat B;

1859:     MatAXPY_Basic_Preallocate(Y,X,&B);
1860:     MatAXPY_BasicWithPreallocation(B,Y,a,X,str);
1861:     MatHeaderReplace(Y,&B);
1862:   }
1863:   return(0);
1864: }

1866: /*MC
1867:    MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices
1868:           based on the hypre IJ interface.

1870:    Level: intermediate

1872: .seealso: MatCreate()
1873: M*/

1875: PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
1876: {
1877:   Mat_HYPRE      *hB;

1881:   PetscNewLog(B,&hB);
1882:   hB->inner_free = PETSC_TRUE;
1883:   hB->available  = PETSC_TRUE;
1884:   hB->size       = 0;
1885:   hB->array      = NULL;

1887:   B->data       = (void*)hB;
1888:   B->rmap->bs   = 1;
1889:   B->assembled  = PETSC_FALSE;

1891:   PetscMemzero(B->ops,sizeof(struct _MatOps));
1892:   B->ops->mult             = MatMult_HYPRE;
1893:   B->ops->multtranspose    = MatMultTranspose_HYPRE;
1894:   B->ops->multadd          = MatMultAdd_HYPRE;
1895:   B->ops->multtransposeadd = MatMultTransposeAdd_HYPRE;
1896:   B->ops->setup            = MatSetUp_HYPRE;
1897:   B->ops->destroy          = MatDestroy_HYPRE;
1898:   B->ops->assemblyend      = MatAssemblyEnd_HYPRE;
1899:   B->ops->assemblybegin    = MatAssemblyBegin_HYPRE;
1900:   B->ops->ptap             = MatPtAP_HYPRE_HYPRE;
1901:   B->ops->matmult          = MatMatMult_HYPRE_HYPRE;
1902:   B->ops->setvalues        = MatSetValues_HYPRE;
1903:   B->ops->missingdiagonal  = MatMissingDiagonal_HYPRE;
1904:   B->ops->scale            = MatScale_HYPRE;
1905:   B->ops->zerorowscolumns  = MatZeroRowsColumns_HYPRE;
1906:   B->ops->zeroentries      = MatZeroEntries_HYPRE;
1907:   B->ops->zerorows         = MatZeroRows_HYPRE;
1908:   B->ops->getrow           = MatGetRow_HYPRE;
1909:   B->ops->restorerow       = MatRestoreRow_HYPRE;
1910:   B->ops->getvalues        = MatGetValues_HYPRE;
1911:   B->ops->setoption        = MatSetOption_HYPRE;
1912:   B->ops->duplicate        = MatDuplicate_HYPRE;
1913:   B->ops->copy             = MatCopy_HYPRE;
1914:   B->ops->view             = MatView_HYPRE;
1915:   B->ops->getdiagonal      = MatGetDiagonal_HYPRE;
1916:   B->ops->axpy             = MatAXPY_HYPRE;

1918:   /* build cache for off array entries formed */
1919:   MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);

1921:   MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);
1922:   PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);
1923:   PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);
1924:   PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);
1925:   PetscObjectComposeFunction((PetscObject)B,"MatPtAP_seqaij_hypre_C",MatPtAP_AIJ_HYPRE);
1926:   PetscObjectComposeFunction((PetscObject)B,"MatPtAP_mpiaij_hypre_C",MatPtAP_AIJ_HYPRE);
1927:   PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);
1928:   PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);
1929:   PetscObjectComposeFunction((PetscObject)B,"MatPtAP_is_hypre_C",MatPtAP_IS_XAIJ);
1930:   return(0);
1931: }

1933: static PetscErrorCode hypre_array_destroy(void *ptr)
1934: {
1936:    hypre_TFree(ptr,HYPRE_MEMORY_HOST);
1937:    return(0);
1938: }