Actual source code: mhypre.c

petsc-3.12.5 2020-03-29
Report Typos and Errors

  2: /*
  3:     Creates hypre ijmatrix from PETSc matrix
  4: */

  6: #include <petscpkg_version.h>
  7:  #include <petsc/private/petschypre.h>
  8:  #include <petscmathypre.h>
  9:  #include <petsc/private/matimpl.h>
 10:  #include <../src/mat/impls/hypre/mhypre.h>
 11:  #include <../src/mat/impls/aij/mpi/mpiaij.h>
 12:  #include <../src/vec/vec/impls/hypre/vhyp.h>
 13: #include <HYPRE.h>
 14: #include <HYPRE_utilities.h>
 15: #include <_hypre_parcsr_ls.h>
 16: #include <_hypre_sstruct_ls.h>

 18: #if PETSC_PKG_HYPRE_VERSION_LT(2,18,0)
 19: #define  hypre_ParCSRMatrixClone(A,B) hypre_ParCSRMatrixCompleteClone(A)
 20: #endif

 22: PETSC_INTERN PetscErrorCode MatPtAP_IS_XAIJ(Mat,Mat,MatReuse,PetscReal,Mat*);

 24: static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*);
 25: static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix);
 26: static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix);
 27: static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix);
 28: static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,HYPRE_Complex,Vec,HYPRE_Complex,Vec,PetscBool);
 29: static PetscErrorCode hypre_array_destroy(void*);
 30: PetscErrorCode MatSetValues_HYPRE(Mat, PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode ins);

 32: static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
 33: {
 35:   PetscInt       i,n_d,n_o;
 36:   const PetscInt *ia_d,*ia_o;
 37:   PetscBool      done_d=PETSC_FALSE,done_o=PETSC_FALSE;
 38:   HYPRE_Int      *nnz_d=NULL,*nnz_o=NULL;

 41:   if (A_d) { /* determine number of nonzero entries in local diagonal part */
 42:     MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);
 43:     if (done_d) {
 44:       PetscMalloc1(n_d,&nnz_d);
 45:       for (i=0; i<n_d; i++) {
 46:         nnz_d[i] = ia_d[i+1] - ia_d[i];
 47:       }
 48:     }
 49:     MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);
 50:   }
 51:   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
 52:     MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);
 53:     if (done_o) {
 54:       PetscMalloc1(n_o,&nnz_o);
 55:       for (i=0; i<n_o; i++) {
 56:         nnz_o[i] = ia_o[i+1] - ia_o[i];
 57:       }
 58:     }
 59:     MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);
 60:   }
 61:   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
 62:     if (!done_o) { /* only diagonal part */
 63:       PetscMalloc1(n_d,&nnz_o);
 64:       for (i=0; i<n_d; i++) {
 65:         nnz_o[i] = 0;
 66:       }
 67:     }
 68: #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0)
 69:     { /* If we don't do this, the columns of the matrix will be all zeros! */
 70:       hypre_AuxParCSRMatrix *aux_matrix;
 71:       aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
 72:       hypre_AuxParCSRMatrixDestroy(aux_matrix);
 73:       hypre_IJMatrixTranslator(ij) = NULL;
 74:       PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o));
 75:       aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
 76:       hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
 77:     }
 78: #else
 79:     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o));
 80: #endif
 81:     PetscFree(nnz_d);
 82:     PetscFree(nnz_o);
 83:   }
 84:   return(0);
 85: }

 87: static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
 88: {
 90:   PetscInt       rstart,rend,cstart,cend;

 93:   PetscLayoutSetUp(A->rmap);
 94:   PetscLayoutSetUp(A->cmap);
 95:   rstart = A->rmap->rstart;
 96:   rend   = A->rmap->rend;
 97:   cstart = A->cmap->rstart;
 98:   cend   = A->cmap->rend;
 99:   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
100:   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
101:   {
102:     PetscBool      same;
103:     Mat            A_d,A_o;
104:     const PetscInt *colmap;
105:     PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&same);
106:     if (same) {
107:       MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);
108:       MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);
109:       return(0);
110:     }
111:     PetscObjectBaseTypeCompare((PetscObject)A,MATMPIBAIJ,&same);
112:     if (same) {
113:       MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);
114:       MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);
115:       return(0);
116:     }
117:     PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&same);
118:     if (same) {
119:       MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);
120:       return(0);
121:     }
122:     PetscObjectBaseTypeCompare((PetscObject)A,MATSEQBAIJ,&same);
123:     if (same) {
124:       MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);
125:       return(0);
126:     }
127:   }
128:   return(0);
129: }

131: static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij)
132: {
133:   PetscErrorCode    ierr;
134:   PetscInt          i,rstart,rend,ncols,nr,nc;
135:   const PetscScalar *values;
136:   const PetscInt    *cols;
137:   PetscBool         flg;

140:   PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&flg);
141:   MatGetSize(A,&nr,&nc);
142:   if (flg && nr == nc) {
143:     MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);
144:     return(0);
145:   }
146:   PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&flg);
147:   if (flg) {
148:     MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);
149:     return(0);
150:   }

152:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
153:   MatGetOwnershipRange(A,&rstart,&rend);
154:   for (i=rstart; i<rend; i++) {
155:     MatGetRow(A,i,&ncols,&cols,&values);
156:     if (ncols) {
157:       HYPRE_Int nc = (HYPRE_Int)ncols;

159:       if ((PetscInt)nc != ncols) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",ncols,i);
160:       PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,&nc,(HYPRE_BigInt *)&i,(HYPRE_BigInt *)cols,(HYPRE_Complex *)values));
161:     }
162:     MatRestoreRow(A,i,&ncols,&cols,&values);
163:   }
164:   return(0);
165: }

167: static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
168: {
169:   PetscErrorCode        ierr;
170:   Mat_SeqAIJ            *pdiag = (Mat_SeqAIJ*)A->data;
171:   HYPRE_Int             type;
172:   hypre_ParCSRMatrix    *par_matrix;
173:   hypre_AuxParCSRMatrix *aux_matrix;
174:   hypre_CSRMatrix       *hdiag;
175:   PetscBool             sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));

178:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
179:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
180:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
181:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
182:   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
183:   /*
184:        this is the Hack part where we monkey directly with the hypre datastructures
185:   */
186:   if (sameint) {
187:     PetscArraycpy(hdiag->i,pdiag->i,A->rmap->n + 1);
188:     PetscArraycpy(hdiag->j,pdiag->j,pdiag->nz);
189:   } else {
190:     PetscInt i;

192:     for (i=0;i<A->rmap->n + 1;i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i];
193:     for (i=0;i<pdiag->nz;i++)      hdiag->j[i] = (HYPRE_Int)pdiag->j[i];
194:   }
195:   PetscArraycpy(hdiag->data,pdiag->a,pdiag->nz);

197:   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
198:   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
199:   return(0);
200: }

202: static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
203: {
204:   PetscErrorCode        ierr;
205:   Mat_MPIAIJ            *pA = (Mat_MPIAIJ*)A->data;
206:   Mat_SeqAIJ            *pdiag,*poffd;
207:   PetscInt              i,*garray = pA->garray,*jj,cstart,*pjj;
208:   HYPRE_Int             *hjj,type;
209:   hypre_ParCSRMatrix    *par_matrix;
210:   hypre_AuxParCSRMatrix *aux_matrix;
211:   hypre_CSRMatrix       *hdiag,*hoffd;
212:   PetscBool             sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));

215:   pdiag = (Mat_SeqAIJ*) pA->A->data;
216:   poffd = (Mat_SeqAIJ*) pA->B->data;
217:   /* cstart is only valid for square MPIAIJ layed out in the usual way */
218:   MatGetOwnershipRange(A,&cstart,NULL);

220:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
221:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
222:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
223:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
224:   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
225:   hoffd = hypre_ParCSRMatrixOffd(par_matrix);

227:   /*
228:        this is the Hack part where we monkey directly with the hypre datastructures
229:   */
230:   if (sameint) {
231:     PetscArraycpy(hdiag->i,pdiag->i,pA->A->rmap->n + 1);
232:   } else {
233:     for (i=0; i<pA->A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)(pdiag->i[i]);
234:   }
235:   /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */
236:   hjj = hdiag->j;
237:   pjj = pdiag->j;
238: #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0)
239:   for (i=0; i<pdiag->nz; i++) hjj[i] = pjj[i];
240: #else
241:   for (i=0; i<pdiag->nz; i++) hjj[i] = cstart + pjj[i];
242: #endif
243:   PetscArraycpy(hdiag->data,pdiag->a,pdiag->nz);
244:   if (sameint) {
245:     PetscArraycpy(hoffd->i,poffd->i,pA->A->rmap->n + 1);
246:   } else {
247:     for (i=0; i<pA->A->rmap->n + 1; i++) hoffd->i[i] = (HYPRE_Int)(poffd->i[i]);
248:   }

250:   /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this
251:      If we hacked a hypre a bit more we might be able to avoid this step */
252: #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0)
253:   PetscStackCallStandard(hypre_CSRMatrixBigInitialize,(hoffd));
254:   jj  = (PetscInt*) hoffd->big_j;
255: #else
256:   jj  = (PetscInt*) hoffd->j;
257: #endif
258:   pjj = poffd->j;
259:   for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]];

261:   PetscArraycpy(hoffd->data,poffd->a,poffd->nz);

263:   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
264:   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
265:   return(0);
266: }

268: static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B)
269: {
270:   Mat_HYPRE*             mhA = (Mat_HYPRE*)(A->data);
271:   Mat                    lA;
272:   ISLocalToGlobalMapping rl2g,cl2g;
273:   IS                     is;
274:   hypre_ParCSRMatrix     *hA;
275:   hypre_CSRMatrix        *hdiag,*hoffd;
276:   MPI_Comm               comm;
277:   HYPRE_Complex          *hdd,*hod,*aa;
278:   PetscScalar            *data;
279:   HYPRE_BigInt           *col_map_offd;
280:   HYPRE_Int              *hdi,*hdj,*hoi,*hoj;
281:   PetscInt               *ii,*jj,*iptr,*jptr;
282:   PetscInt               cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N;
283:   HYPRE_Int              type;
284:   PetscErrorCode         ierr;

287:   comm = PetscObjectComm((PetscObject)A);
288:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type));
289:   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
290:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA));
291:   M     = hypre_ParCSRMatrixGlobalNumRows(hA);
292:   N     = hypre_ParCSRMatrixGlobalNumCols(hA);
293:   str   = hypre_ParCSRMatrixFirstRowIndex(hA);
294:   stc   = hypre_ParCSRMatrixFirstColDiag(hA);
295:   hdiag = hypre_ParCSRMatrixDiag(hA);
296:   hoffd = hypre_ParCSRMatrixOffd(hA);
297:   dr    = hypre_CSRMatrixNumRows(hdiag);
298:   dc    = hypre_CSRMatrixNumCols(hdiag);
299:   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
300:   hdi   = hypre_CSRMatrixI(hdiag);
301:   hdj   = hypre_CSRMatrixJ(hdiag);
302:   hdd   = hypre_CSRMatrixData(hdiag);
303:   oc    = hypre_CSRMatrixNumCols(hoffd);
304:   nnz  += hypre_CSRMatrixNumNonzeros(hoffd);
305:   hoi   = hypre_CSRMatrixI(hoffd);
306:   hoj   = hypre_CSRMatrixJ(hoffd);
307:   hod   = hypre_CSRMatrixData(hoffd);
308:   if (reuse != MAT_REUSE_MATRIX) {
309:     PetscInt *aux;

311:     /* generate l2g maps for rows and cols */
312:     ISCreateStride(comm,dr,str,1,&is);
313:     ISLocalToGlobalMappingCreateIS(is,&rl2g);
314:     ISDestroy(&is);
315:     col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
316:     PetscMalloc1(dc+oc,&aux);
317:     for (i=0; i<dc; i++) aux[i] = i+stc;
318:     for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i];
319:     ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);
320:     ISLocalToGlobalMappingCreateIS(is,&cl2g);
321:     ISDestroy(&is);
322:     /* create MATIS object */
323:     MatCreate(comm,B);
324:     MatSetSizes(*B,dr,dc,M,N);
325:     MatSetType(*B,MATIS);
326:     MatSetLocalToGlobalMapping(*B,rl2g,cl2g);
327:     ISLocalToGlobalMappingDestroy(&rl2g);
328:     ISLocalToGlobalMappingDestroy(&cl2g);

330:     /* allocate CSR for local matrix */
331:     PetscMalloc1(dr+1,&iptr);
332:     PetscMalloc1(nnz,&jptr);
333:     PetscMalloc1(nnz,&data);
334:   } else {
335:     PetscInt  nr;
336:     PetscBool done;
337:     MatISGetLocalMat(*B,&lA);
338:     MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);
339:     if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr);
340:     if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz);
341:     MatSeqAIJGetArray(lA,&data);
342:   }
343:   /* merge local matrices */
344:   ii  = iptr;
345:   jj  = jptr;
346:   aa  = (HYPRE_Complex*)data; /* this cast fixes the clang error when doing the assignments below: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */
347:   *ii = *(hdi++) + *(hoi++);
348:   for (jd=0,jo=0,cum=0; *ii<nnz; cum++) {
349:     PetscScalar *aold = (PetscScalar*)aa;
350:     PetscInt    *jold = jj,nc = jd+jo;
351:     for (; jd<*hdi; jd++) { *jj++ = *hdj++;      *aa++ = *hdd++; }
352:     for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
353:     *(++ii) = *(hdi++) + *(hoi++);
354:     PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);
355:   }
356:   for (; cum<dr; cum++) *(++ii) = nnz;
357:   if (reuse != MAT_REUSE_MATRIX) {
358:     Mat_SeqAIJ* a;

360:     MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);
361:     MatISSetLocalMat(*B,lA);
362:     /* hack SeqAIJ */
363:     a          = (Mat_SeqAIJ*)(lA->data);
364:     a->free_a  = PETSC_TRUE;
365:     a->free_ij = PETSC_TRUE;
366:     MatDestroy(&lA);
367:   }
368:   MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);
369:   MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);
370:   if (reuse == MAT_INPLACE_MATRIX) {
371:     MatHeaderReplace(A,B);
372:   }
373:   return(0);
374: }

376: PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
377: {
378:   Mat            M = NULL;
379:   Mat_HYPRE      *hB;
380:   MPI_Comm       comm = PetscObjectComm((PetscObject)A);

384:   if (reuse == MAT_REUSE_MATRIX) {
385:     /* always destroy the old matrix and create a new memory;
386:        hope this does not churn the memory too much. The problem
387:        is I do not know if it is possible to put the matrix back to
388:        its initial state so that we can directly copy the values
389:        the second time through. */
390:     hB = (Mat_HYPRE*)((*B)->data);
391:     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij));
392:   } else {
393:     MatCreate(comm,&M);
394:     MatSetType(M,MATHYPRE);
395:     MatSetSizes(M,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
396:     hB   = (Mat_HYPRE*)(M->data);
397:     if (reuse == MAT_INITIAL_MATRIX) *B = M;
398:   }
399:   MatHYPRE_CreateFromMat(A,hB);
400:   MatHYPRE_IJMatrixCopy(A,hB->ij);
401:   if (reuse == MAT_INPLACE_MATRIX) {
402:     MatHeaderReplace(A,&M);
403:   }
404:   (*B)->preallocated = PETSC_TRUE;
405:   MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);
406:   MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);
407:   return(0);
408: }

410: static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
411: {
412:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
413:   hypre_ParCSRMatrix *parcsr;
414:   hypre_CSRMatrix    *hdiag,*hoffd;
415:   MPI_Comm           comm;
416:   PetscScalar        *da,*oa,*aptr;
417:   PetscInt           *dii,*djj,*oii,*ojj,*iptr;
418:   PetscInt           i,dnnz,onnz,m,n;
419:   HYPRE_Int          type;
420:   PetscMPIInt        size;
421:   PetscBool          sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
422:   PetscErrorCode     ierr;

425:   comm = PetscObjectComm((PetscObject)A);
426:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
427:   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
428:   if (reuse == MAT_REUSE_MATRIX) {
429:     PetscBool ismpiaij,isseqaij;
430:     PetscObjectBaseTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);
431:     PetscObjectBaseTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);
432:     if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported");
433:   }
434:   MPI_Comm_size(comm,&size);

436:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
437:   hdiag = hypre_ParCSRMatrixDiag(parcsr);
438:   hoffd = hypre_ParCSRMatrixOffd(parcsr);
439:   m     = hypre_CSRMatrixNumRows(hdiag);
440:   n     = hypre_CSRMatrixNumCols(hdiag);
441:   dnnz  = hypre_CSRMatrixNumNonzeros(hdiag);
442:   onnz  = hypre_CSRMatrixNumNonzeros(hoffd);
443:   if (reuse == MAT_INITIAL_MATRIX) {
444:     PetscMalloc1(m+1,&dii);
445:     PetscMalloc1(dnnz,&djj);
446:     PetscMalloc1(dnnz,&da);
447:   } else if (reuse == MAT_REUSE_MATRIX) {
448:     PetscInt  nr;
449:     PetscBool done;
450:     if (size > 1) {
451:       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);

453:       MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);
454:       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m);
455:       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz);
456:       MatSeqAIJGetArray(b->A,&da);
457:     } else {
458:       MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);
459:       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m);
460:       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz);
461:       MatSeqAIJGetArray(*B,&da);
462:     }
463:   } else { /* MAT_INPLACE_MATRIX */
464:     if (!sameint) {
465:       PetscMalloc1(m+1,&dii);
466:       PetscMalloc1(dnnz,&djj);
467:     } else {
468:       dii = (PetscInt*)hypre_CSRMatrixI(hdiag);
469:       djj = (PetscInt*)hypre_CSRMatrixJ(hdiag);
470:     }
471:     da = (PetscScalar*)hypre_CSRMatrixData(hdiag);
472:   }

474:   if (!sameint) {
475:     for (i=0;i<m+1;i++)  dii[i] = (PetscInt)(hypre_CSRMatrixI(hdiag)[i]);
476:     for (i=0;i<dnnz;i++) djj[i] = (PetscInt)(hypre_CSRMatrixJ(hdiag)[i]);
477:   } else {
478:     PetscArraycpy(dii,hypre_CSRMatrixI(hdiag),m+1);
479:     PetscArraycpy(djj,hypre_CSRMatrixJ(hdiag),dnnz);
480:   }
481:   PetscArraycpy(da,hypre_CSRMatrixData(hdiag),dnnz);
482:   iptr = djj;
483:   aptr = da;
484:   for (i=0; i<m; i++) {
485:     PetscInt nc = dii[i+1]-dii[i];
486:     PetscSortIntWithScalarArray(nc,iptr,aptr);
487:     iptr += nc;
488:     aptr += nc;
489:   }
490:   if (size > 1) {
491:     HYPRE_BigInt *coffd;
492:     HYPRE_Int    *offdj;

494:     if (reuse == MAT_INITIAL_MATRIX) {
495:       PetscMalloc1(m+1,&oii);
496:       PetscMalloc1(onnz,&ojj);
497:       PetscMalloc1(onnz,&oa);
498:     } else if (reuse == MAT_REUSE_MATRIX) {
499:       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
500:       PetscInt   nr,hr = hypre_CSRMatrixNumRows(hoffd);
501:       PetscBool  done;

503:       MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);
504:       if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr);
505:       if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz);
506:       MatSeqAIJGetArray(b->B,&oa);
507:     } else { /* MAT_INPLACE_MATRIX */
508:       if (!sameint) {
509:         PetscMalloc1(m+1,&oii);
510:         PetscMalloc1(onnz,&ojj);
511:       } else {
512:         oii = (PetscInt*)hypre_CSRMatrixI(hoffd);
513:         ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd);
514:       }
515:       oa = (PetscScalar*)hypre_CSRMatrixData(hoffd);
516:     }
517:     if (!sameint) {
518:       for (i=0;i<m+1;i++) oii[i] = (PetscInt)(hypre_CSRMatrixI(hoffd)[i]);
519:     } else {
520:       PetscArraycpy(oii,hypre_CSRMatrixI(hoffd),m+1);
521:     }
522:     offdj = hypre_CSRMatrixJ(hoffd);
523:     coffd = hypre_ParCSRMatrixColMapOffd(parcsr);
524:     for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]];
525:     PetscArraycpy(oa,hypre_CSRMatrixData(hoffd),onnz);
526:     iptr = ojj;
527:     aptr = oa;
528:     for (i=0; i<m; i++) {
529:        PetscInt nc = oii[i+1]-oii[i];
530:        PetscSortIntWithScalarArray(nc,iptr,aptr);
531:        iptr += nc;
532:        aptr += nc;
533:     }
534:     if (reuse == MAT_INITIAL_MATRIX) {
535:       Mat_MPIAIJ *b;
536:       Mat_SeqAIJ *d,*o;

538:       MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);
539:       /* hack MPIAIJ */
540:       b          = (Mat_MPIAIJ*)((*B)->data);
541:       d          = (Mat_SeqAIJ*)b->A->data;
542:       o          = (Mat_SeqAIJ*)b->B->data;
543:       d->free_a  = PETSC_TRUE;
544:       d->free_ij = PETSC_TRUE;
545:       o->free_a  = PETSC_TRUE;
546:       o->free_ij = PETSC_TRUE;
547:     } else if (reuse == MAT_INPLACE_MATRIX) {
548:       Mat T;

550:       MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);
551:       if (sameint) { /* ownership of CSR pointers is transferred to PETSc */
552:         hypre_CSRMatrixI(hdiag) = NULL;
553:         hypre_CSRMatrixJ(hdiag) = NULL;
554:         hypre_CSRMatrixI(hoffd) = NULL;
555:         hypre_CSRMatrixJ(hoffd) = NULL;
556:       } else { /* Hack MPIAIJ -> free ij but not a */
557:         Mat_MPIAIJ *b = (Mat_MPIAIJ*)(T->data);
558:         Mat_SeqAIJ *d = (Mat_SeqAIJ*)(b->A->data);
559:         Mat_SeqAIJ *o = (Mat_SeqAIJ*)(b->B->data);

561:         d->free_ij = PETSC_TRUE;
562:         o->free_ij = PETSC_TRUE;
563:       }
564:       hypre_CSRMatrixData(hdiag) = NULL;
565:       hypre_CSRMatrixData(hoffd) = NULL;
566:       MatHeaderReplace(A,&T);
567:     }
568:   } else {
569:     oii  = NULL;
570:     ojj  = NULL;
571:     oa   = NULL;
572:     if (reuse == MAT_INITIAL_MATRIX) {
573:       Mat_SeqAIJ* b;

575:       MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);
576:       /* hack SeqAIJ */
577:       b          = (Mat_SeqAIJ*)((*B)->data);
578:       b->free_a  = PETSC_TRUE;
579:       b->free_ij = PETSC_TRUE;
580:     } else if (reuse == MAT_INPLACE_MATRIX) {
581:       Mat T;

583:       MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);
584:       if (sameint) { /* ownership of CSR pointers is transferred to PETSc */
585:         hypre_CSRMatrixI(hdiag) = NULL;
586:         hypre_CSRMatrixJ(hdiag) = NULL;
587:       } else { /* free ij but not a */
588:         Mat_SeqAIJ* b = (Mat_SeqAIJ*)(T->data);

590:         b->free_ij = PETSC_TRUE;
591:       }
592:       hypre_CSRMatrixData(hdiag) = NULL;
593:       MatHeaderReplace(A,&T);
594:     }
595:   }

597:   /* we have to use hypre_Tfree to free the HYPRE arrays
598:      that PETSc now onws */
599:   if (reuse == MAT_INPLACE_MATRIX) {
600:     PetscInt nh;
601:     void *ptrs[6] = {da,oa,dii,djj,oii,ojj};
602:     const char *names[6] = {"_hypre_csr_da",
603:                             "_hypre_csr_oa",
604:                             "_hypre_csr_dii",
605:                             "_hypre_csr_djj",
606:                             "_hypre_csr_oii",
607:                             "_hypre_csr_ojj"};
608:     nh = sameint ? 6 : 2;
609:     for (i=0; i<nh; i++) {
610:       PetscContainer c;

612:       PetscContainerCreate(comm,&c);
613:       PetscContainerSetPointer(c,ptrs[i]);
614:       PetscContainerSetUserDestroy(c,hypre_array_destroy);
615:       PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);
616:       PetscContainerDestroy(&c);
617:     }
618:   }
619:   return(0);
620: }

622: static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
623: {
624:   hypre_ParCSRMatrix *tA;
625:   hypre_CSRMatrix    *hdiag,*hoffd;
626:   Mat_SeqAIJ         *diag,*offd;
627:   PetscInt           *garray,i,noffd,dnnz,onnz,*row_starts,*col_starts;
628:   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
629:   PetscBool          ismpiaij,isseqaij;
630:   PetscBool          sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
631:   PetscErrorCode     ierr;

634:   PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);
635:   PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);
636:   if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type_name);
637:   if (ismpiaij) {
638:     Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data);

640:     diag   = (Mat_SeqAIJ*)a->A->data;
641:     offd   = (Mat_SeqAIJ*)a->B->data;
642:     garray = a->garray;
643:     noffd  = a->B->cmap->N;
644:     dnnz   = diag->nz;
645:     onnz   = offd->nz;
646:   } else {
647:     diag   = (Mat_SeqAIJ*)A->data;
648:     offd   = NULL;
649:     garray = NULL;
650:     noffd  = 0;
651:     dnnz   = diag->nz;
652:     onnz   = 0;
653:   }

655:   /* create a temporary ParCSR */
656:   if (HYPRE_AssumedPartitionCheck()) {
657:     PetscMPIInt myid;

659:     MPI_Comm_rank(comm,&myid);
660:     row_starts = A->rmap->range + myid;
661:     col_starts = A->cmap->range + myid;
662:   } else {
663:     row_starts = A->rmap->range;
664:     col_starts = A->cmap->range;
665:   }
666:   tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_BigInt*)row_starts,(HYPRE_BigInt*)col_starts,noffd,dnnz,onnz);
667:   hypre_ParCSRMatrixSetRowStartsOwner(tA,0);
668:   hypre_ParCSRMatrixSetColStartsOwner(tA,0);

670:   /* set diagonal part */
671:   hdiag = hypre_ParCSRMatrixDiag(tA);
672:   if (sameint) { /* reuse CSR pointers */
673:     hypre_CSRMatrixI(hdiag) = (HYPRE_Int*)diag->i;
674:     hypre_CSRMatrixJ(hdiag) = (HYPRE_Int*)diag->j;
675:   } else { /* malloc CSR pointers */
676:     HYPRE_Int *hi,*hj;

678:     PetscMalloc2(A->rmap->n+1,&hi,dnnz,&hj);
679:     for (i = 0; i < A->rmap->n+1; i++) hi[i] = (HYPRE_Int)(diag->i[i]);
680:     for (i = 0; i < dnnz; i++)         hj[i] = (HYPRE_Int)(diag->j[i]);
681:     hypre_CSRMatrixI(hdiag) = hi;
682:     hypre_CSRMatrixJ(hdiag) = hj;
683:   }
684:   hypre_CSRMatrixData(hdiag)        = (HYPRE_Complex*)diag->a;
685:   hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz;
686:   hypre_CSRMatrixSetRownnz(hdiag);
687:   hypre_CSRMatrixSetDataOwner(hdiag,0);

689:   /* set offdiagonal part */
690:   hoffd = hypre_ParCSRMatrixOffd(tA);
691:   if (offd) {
692:     if (sameint) { /* reuse CSR pointers */
693:       hypre_CSRMatrixI(hoffd) = (HYPRE_Int*)offd->i;
694:       hypre_CSRMatrixJ(hoffd) = (HYPRE_Int*)offd->j;
695:     } else { /* malloc CSR pointers */
696:       HYPRE_Int *hi,*hj;

698:       PetscMalloc2(A->rmap->n+1,&hi,onnz,&hj);
699:       for (i = 0; i < A->rmap->n+1; i++) hi[i] = (HYPRE_Int)(offd->i[i]);
700:       for (i = 0; i < onnz; i++)         hj[i] = (HYPRE_Int)(offd->j[i]);
701:       hypre_CSRMatrixI(hoffd) = hi;
702:       hypre_CSRMatrixJ(hoffd) = hj;
703:     }
704:     hypre_CSRMatrixData(hoffd)        = (HYPRE_Complex*)offd->a;
705:     hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz;
706:     hypre_CSRMatrixSetRownnz(hoffd);
707:     hypre_CSRMatrixSetDataOwner(hoffd,0);
708:     hypre_ParCSRMatrixSetNumNonzeros(tA);
709:     hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_BigInt*)garray;
710:   }
711:   *hA = tA;
712:   return(0);
713: }

715: static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
716: {
717:   hypre_CSRMatrix *hdiag,*hoffd;
718:   PetscBool       sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
719:   PetscErrorCode  ierr;

722:   hdiag = hypre_ParCSRMatrixDiag(*hA);
723:   hoffd = hypre_ParCSRMatrixOffd(*hA);
724:   /* free temporary memory allocated by PETSc */
725:   if (!sameint) {
726:     HYPRE_Int *hi,*hj;

728:     hi = hypre_CSRMatrixI(hdiag);
729:     hj = hypre_CSRMatrixJ(hdiag);
730:     PetscFree2(hi,hj);
731:     if (hoffd) {
732:       hi = hypre_CSRMatrixI(hoffd);
733:       hj = hypre_CSRMatrixJ(hoffd);
734:       PetscFree2(hi,hj);
735:     }
736:   }
737:   /* set pointers to NULL before destroying tA */
738:   hypre_CSRMatrixI(hdiag)           = NULL;
739:   hypre_CSRMatrixJ(hdiag)           = NULL;
740:   hypre_CSRMatrixData(hdiag)        = NULL;
741:   hypre_CSRMatrixI(hoffd)           = NULL;
742:   hypre_CSRMatrixJ(hoffd)           = NULL;
743:   hypre_CSRMatrixData(hoffd)        = NULL;
744:   hypre_ParCSRMatrixColMapOffd(*hA) = NULL;
745:   hypre_ParCSRMatrixDestroy(*hA);
746:   *hA = NULL;
747:   return(0);
748: }

750: /* calls RAP from BoomerAMG:
751:    the resulting ParCSR will not own the column and row starts
752:    It looks like we don't need to have the diagonal entries
753:    ordered first in the rows of the diagonal part
754:    for boomerAMGBuildCoarseOperator to work */
755: static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA,hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP)
756: {
757:   HYPRE_Int P_owns_col_starts,R_owns_row_starts;

760:   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP);
761:   R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR);
762:   PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP));
763:   PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP));
764:   /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */
765:   hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0);
766:   hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0);
767:   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1);
768:   if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1);
769:   return(0);
770: }

772: static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C)
773: {
774:   Mat                B;
775:   hypre_ParCSRMatrix *hA,*hP,*hPtAP;
776:   PetscErrorCode     ierr;

779:   MatAIJGetParCSR_Private(A,&hA);
780:   MatAIJGetParCSR_Private(P,&hP);
781:   MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);
782:   MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);
783:   MatHeaderMerge(C,&B);
784:   MatAIJRestoreParCSR_Private(A,&hA);
785:   MatAIJRestoreParCSR_Private(P,&hP);
786:   return(0);
787: }

789: PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat *C)
790: {

794:   MatCreate(PetscObjectComm((PetscObject)A),C);
795:   MatSetType(*C,MATAIJ);
796:   (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_AIJ_wHYPRE;
797:   return(0);
798: }

800: static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C)
801: {
802:   Mat                B;
803:   Mat_HYPRE          *hP;
804:   hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr;
805:   HYPRE_Int          type;
806:   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
807:   PetscBool          ishypre;
808:   PetscErrorCode     ierr;

811:   PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);
812:   if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
813:   hP = (Mat_HYPRE*)P->data;
814:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
815:   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
816:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));

818:   MatAIJGetParCSR_Private(A,&hA);
819:   MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);
820:   MatAIJRestoreParCSR_Private(A,&hA);

822:   /* create temporary matrix and merge to C */
823:   MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);
824:   MatHeaderMerge(C,&B);
825:   return(0);
826: }

828: static PetscErrorCode MatPtAP_AIJ_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
829: {

833:   if (scall == MAT_INITIAL_MATRIX) {
834:     const char *deft = MATAIJ;
835:     char       type[256];
836:     PetscBool  flg;

838:     PetscObjectOptionsBegin((PetscObject)A);
839:     PetscOptionsFList("-matptap_hypre_outtype","Matrix type",NULL,MatList,deft,type,256,&flg);
840:     PetscOptionsEnd();
841:     PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);
842:     MatCreate(PetscObjectComm((PetscObject)A),C);
843:     if (flg) {
844:       MatSetType(*C,type);
845:     } else {
846:       MatSetType(*C,deft);
847:     }
848:     (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_HYPRE;
849:     PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);
850:   }
851:   PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);
852:   (*(*C)->ops->ptapnumeric)(A,P,*C);
853:   PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);
854:   return(0);
855: }

857: static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C)
858: {
859:   Mat                B;
860:   hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr;
861:   Mat_HYPRE          *hA,*hP;
862:   PetscBool          ishypre;
863:   HYPRE_Int          type;
864:   PetscErrorCode     ierr;

867:   PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);
868:   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
869:   PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);
870:   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
871:   hA = (Mat_HYPRE*)A->data;
872:   hP = (Mat_HYPRE*)P->data;
873:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
874:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
875:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
876:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
877:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
878:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
879:   MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);
880:   MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);
881:   MatHeaderMerge(C,&B);
882:   return(0);
883: }

885: static PetscErrorCode MatPtAP_HYPRE_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
886: {

890:   if (scall == MAT_INITIAL_MATRIX) {
891:     PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);
892:     MatCreate(PetscObjectComm((PetscObject)A),C);
893:     MatSetType(*C,MATHYPRE);
894:     (*C)->ops->ptapnumeric = MatPtAPNumeric_HYPRE_HYPRE;
895:     PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);
896:   }
897:   PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);
898:   (*(*C)->ops->ptapnumeric)(A,P,*C);
899:   PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);
900:   return(0);
901: }

903: /* calls hypre_ParMatmul
904:    hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA
905:    hypre_ParMatrixCreate does not duplicate the communicator
906:    It looks like we don't need to have the diagonal entries
907:    ordered first in the rows of the diagonal part
908:    for boomerAMGBuildCoarseOperator to work */
909: static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB)
910: {
912:   PetscStackPush("hypre_ParMatmul");
913:   *hAB = hypre_ParMatmul(hA,hB);
914:   PetscStackPop;
915:   return(0);
916: }

918: static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C)
919: {
920:   Mat                D;
921:   hypre_ParCSRMatrix *hA,*hB,*hAB = NULL;
922:   PetscErrorCode     ierr;

925:   MatAIJGetParCSR_Private(A,&hA);
926:   MatAIJGetParCSR_Private(B,&hB);
927:   MatHYPRE_ParCSR_MatMatMult(hA,hB,&hAB);
928:   MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);
929:   MatHeaderMerge(C,&D);
930:   MatAIJRestoreParCSR_Private(A,&hA);
931:   MatAIJRestoreParCSR_Private(B,&hB);
932:   return(0);
933: }

935: PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat *C)
936: {

940:   MatCreate(PetscObjectComm((PetscObject)A),C);
941:   MatSetType(*C,MATAIJ);
942:   (*C)->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE;
943:   return(0);
944: }

946: static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A,Mat B,Mat C)
947: {
948:   Mat                D;
949:   hypre_ParCSRMatrix *Aparcsr,*Bparcsr,*ABparcsr = NULL;
950:   Mat_HYPRE          *hA,*hB;
951:   PetscBool          ishypre;
952:   HYPRE_Int          type;
953:   PetscErrorCode     ierr;

956:   PetscObjectTypeCompare((PetscObject)B,MATHYPRE,&ishypre);
957:   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_USER,"B should be of type %s",MATHYPRE);
958:   PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);
959:   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
960:   hA = (Mat_HYPRE*)A->data;
961:   hB = (Mat_HYPRE*)B->data;
962:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
963:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
964:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hB->ij,&type));
965:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
966:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
967:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hB->ij,(void**)&Bparcsr));
968:   MatHYPRE_ParCSR_MatMatMult(Aparcsr,Bparcsr,&ABparcsr);
969:   MatCreateFromParCSR(ABparcsr,MATHYPRE,PETSC_OWN_POINTER,&D);
970:   /* need to use HeaderReplace because HeaderMerge messes up with the communicator */
971:   MatHeaderReplace(C,&D);
972:   C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
973:   return(0);
974: }

976: static PetscErrorCode MatMatMult_HYPRE_HYPRE(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
977: {

981:   if (scall == MAT_INITIAL_MATRIX) {
982:     PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);
983:     MatCreate(PetscObjectComm((PetscObject)A),C);
984:     MatSetType(*C,MATHYPRE);
985:     (*C)->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
986:     PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);
987:   }
988:   PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);
989:   (*(*C)->ops->matmultnumeric)(A,B,*C);
990:   PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);
991:   return(0);
992: }

994: PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D)
995: {
996:   Mat                E;
997:   hypre_ParCSRMatrix *hA,*hB,*hC,*hABC;
998:   PetscErrorCode     ierr;

1001:   MatAIJGetParCSR_Private(A,&hA);
1002:   MatAIJGetParCSR_Private(B,&hB);
1003:   MatAIJGetParCSR_Private(C,&hC);
1004:   MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC);
1005:   MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E);
1006:   MatHeaderMerge(D,&E);
1007:   MatAIJRestoreParCSR_Private(A,&hA);
1008:   MatAIJRestoreParCSR_Private(B,&hB);
1009:   MatAIJRestoreParCSR_Private(C,&hC);
1010:   return(0);
1011: }

1013: PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat *D)
1014: {

1018:   MatCreate(PetscObjectComm((PetscObject)A),D);
1019:   MatSetType(*D,MATAIJ);
1020:   return(0);
1021: }

1023: static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
1024: {

1028:   MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_TRUE);
1029:   return(0);
1030: }

1032: static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
1033: {

1037:   MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_FALSE);
1038:   return(0);
1039: }

1041: static PetscErrorCode MatMultAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1042: {

1046:   if (y != z) {
1047:     VecCopy(y,z);
1048:   }
1049:   MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_FALSE);
1050:   return(0);
1051: }

1053: static PetscErrorCode MatMultTransposeAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1054: {

1058:   if (y != z) {
1059:     VecCopy(y,z);
1060:   }
1061:   MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_TRUE);
1062:   return(0);
1063: }

1065: /* y = a * A * x + b * y or y = a * A^t * x + b * y depending on trans */
1066: static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, HYPRE_Complex a, Vec x, HYPRE_Complex b, Vec y, PetscBool trans)
1067: {
1068:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1069:   hypre_ParCSRMatrix *parcsr;
1070:   hypre_ParVector    *hx,*hy;
1071:   HYPRE_Complex      *ax,*ay,*sax,*say;
1072:   PetscErrorCode     ierr;

1075:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
1076:   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx));
1077:   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy));
1078:   VecGetArrayRead(x,(const PetscScalar**)&ax);
1079:   VecGetArray(y,(PetscScalar**)&ay);
1080:   if (trans) {
1081:     VecHYPRE_ParVectorReplacePointer(hA->x,ay,say);
1082:     VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax);
1083:     hypre_ParCSRMatrixMatvecT(a,parcsr,hy,b,hx);
1084:     VecHYPRE_ParVectorReplacePointer(hA->x,say,ay);
1085:     VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax);
1086:   } else {
1087:     VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax);
1088:     VecHYPRE_ParVectorReplacePointer(hA->b,ay,say);
1089:     hypre_ParCSRMatrixMatvec(a,parcsr,hx,b,hy);
1090:     VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax);
1091:     VecHYPRE_ParVectorReplacePointer(hA->b,say,ay);
1092:   }
1093:   VecRestoreArrayRead(x,(const PetscScalar**)&ax);
1094:   VecRestoreArray(y,(PetscScalar**)&ay);
1095:   return(0);
1096: }

1098: static PetscErrorCode MatDestroy_HYPRE(Mat A)
1099: {
1100:   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;

1104:   if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x));
1105:   if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b));
1106:   if (hA->ij) {
1107:     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
1108:     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij));
1109:   }
1110:   if (hA->comm) { MPI_Comm_free(&hA->comm); }

1112:   MatStashDestroy_Private(&A->stash);

1114:   PetscFree(hA->array);

1116:   PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);
1117:   PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);
1118:   PetscObjectComposeFunction((PetscObject)A,"MatPtAP_seqaij_hypre_C",NULL);
1119:   PetscObjectComposeFunction((PetscObject)A,"MatPtAP_mpiaij_hypre_C",NULL);
1120:   PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);
1121:   PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);
1122:   PetscObjectComposeFunction((PetscObject)A,"MatPtAP_is_hypre_C",NULL);
1123:   PetscFree(A->data);
1124:   return(0);
1125: }

1127: static PetscErrorCode MatSetUp_HYPRE(Mat A)
1128: {

1132:   MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);
1133:   return(0);
1134: }

1136: static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
1137: {
1138:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1139:   Vec                x,b;
1140:   PetscMPIInt        n;
1141:   PetscInt           i,j,rstart,ncols,flg;
1142:   PetscInt           *row,*col;
1143:   PetscScalar        *val;
1144:   PetscErrorCode     ierr;

1147:   if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE");

1149:   if (!A->nooffprocentries) {
1150:     while (1) {
1151:       MatStashScatterGetMesg_Private(&A->stash,&n,&row,&col,&val,&flg);
1152:       if (!flg) break;

1154:       for (i=0; i<n; ) {
1155:         /* Now identify the consecutive vals belonging to the same row */
1156:         for (j=i,rstart=row[j]; j<n; j++) {
1157:           if (row[j] != rstart) break;
1158:         }
1159:         if (j < n) ncols = j-i;
1160:         else       ncols = n-i;
1161:         /* Now assemble all these values with a single function call */
1162:         MatSetValues_HYPRE(A,1,row+i,ncols,col+i,val+i,A->insertmode);

1164:         i = j;
1165:       }
1166:     }
1167:     MatStashScatterEnd_Private(&A->stash);
1168:   }

1170:   PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij));
1171:   /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitalize */
1172:   {
1173:     hypre_AuxParCSRMatrix *aux_matrix;

1175:     /* call destroy just to make sure we do not leak anything */
1176:     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1177:     PetscStackCallStandard(hypre_AuxParCSRMatrixDestroy,(aux_matrix));
1178:     hypre_IJMatrixTranslator(hA->ij) = NULL;

1180:     /* Initialize with assembled flag -> it only recreates the aux_par_matrix */
1181:     PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1182:     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1183:     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */
1184:     PetscStackCallStandard(hypre_AuxParCSRMatrixInitialize,(aux_matrix));
1185:   }
1186:   if (hA->x) return(0);
1187:   PetscLayoutSetUp(A->rmap);
1188:   PetscLayoutSetUp(A->cmap);
1189:   VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);
1190:   VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);
1191:   VecHYPRE_IJVectorCreate(x,&hA->x);
1192:   VecHYPRE_IJVectorCreate(b,&hA->b);
1193:   VecDestroy(&x);
1194:   VecDestroy(&b);
1195:   return(0);
1196: }

1198: static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array)
1199: {
1200:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1201:   PetscErrorCode     ierr;

1204:   if (!hA->available) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Temporary space is in use");

1206:   if (hA->size >= size) {
1207:     *array = hA->array;
1208:   } else {
1209:     PetscFree(hA->array);
1210:     hA->size = size;
1211:     PetscMalloc(hA->size,&hA->array);
1212:     *array = hA->array;
1213:   }

1215:   hA->available = PETSC_FALSE;
1216:   return(0);
1217: }

1219: static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array)
1220: {
1221:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;

1224:   *array = NULL;
1225:   hA->available = PETSC_TRUE;
1226:   return(0);
1227: }

1229: PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins)
1230: {
1231:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1232:   PetscScalar        *vals = (PetscScalar *)v;
1233:   HYPRE_Complex      *sscr;
1234:   PetscInt           *cscr[2];
1235:   PetscInt           i,nzc;
1236:   void               *array = NULL;
1237:   PetscErrorCode     ierr;

1240:   MatGetArray_HYPRE(A,sizeof(PetscInt)*(2*nc)+sizeof(HYPRE_Complex)*nc*nr,&array);
1241:   cscr[0] = (PetscInt*)array;
1242:   cscr[1] = ((PetscInt*)array)+nc;
1243:   sscr = (HYPRE_Complex*)(((PetscInt*)array)+nc*2);
1244:   for (i=0,nzc=0;i<nc;i++) {
1245:     if (cols[i] >= 0) {
1246:       cscr[0][nzc  ] = cols[i];
1247:       cscr[1][nzc++] = i;
1248:     }
1249:   }
1250:   if (!nzc) {
1251:     MatRestoreArray_HYPRE(A,&array);
1252:     return(0);
1253:   }

1255:   if (ins == ADD_VALUES) {
1256:     for (i=0;i<nr;i++) {
1257:       if (rows[i] >= 0 && nzc) {
1258:         PetscInt  j;
1259:         HYPRE_Int hnc = (HYPRE_Int)nzc;

1261:         if ((PetscInt)hnc != nzc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",nzc,rows[i]);
1262:         for (j=0;j<nzc;j++) { PetscHYPREScalarCast(vals[cscr[1][j]],&sscr[j]); }
1263:         PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr));
1264:       }
1265:       vals += nc;
1266:     }
1267:   } else { /* INSERT_VALUES */
1268:     PetscInt rst,ren;

1270:     MatGetOwnershipRange(A,&rst,&ren);
1271:     for (i=0;i<nr;i++) {
1272:       if (rows[i] >= 0 && nzc) {
1273:         PetscInt  j;
1274:         HYPRE_Int hnc = (HYPRE_Int)nzc;

1276:         if ((PetscInt)hnc != nzc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",nzc,rows[i]);
1277:         for (j=0;j<nzc;j++) { PetscHYPREScalarCast(vals[cscr[1][j]],&sscr[j]); }
1278:         /* nonlocal values */
1279:         if (rows[i] < rst || rows[i] >= ren) { MatStashValuesRow_Private(&A->stash,rows[i],nzc,cscr[0],(PetscScalar*)sscr,PETSC_FALSE); }
1280:         /* local values */
1281:         else PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr));
1282:       }
1283:       vals += nc;
1284:     }
1285:   }

1287:   MatRestoreArray_HYPRE(A,&array);
1288:   return(0);
1289: }

1291: static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1292: {
1293:   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
1294:   HYPRE_Int      *hdnnz,*honnz;
1295:   PetscInt       i,rs,re,cs,ce,bs;
1296:   PetscMPIInt    size;

1300:   MatGetBlockSize(A,&bs);
1301:   PetscLayoutSetUp(A->rmap);
1302:   PetscLayoutSetUp(A->cmap);
1303:   rs   = A->rmap->rstart;
1304:   re   = A->rmap->rend;
1305:   cs   = A->cmap->rstart;
1306:   ce   = A->cmap->rend;
1307:   if (!hA->ij) {
1308:     PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij));
1309:     PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1310:   } else {
1311:     HYPRE_BigInt hrs,hre,hcs,hce;
1312:     PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce));
1313:     if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%D)",hrs,hre+1,rs,re);
1314:     if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%D)",hcs,hce+1,cs,ce);
1315:   }
1316:   if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs;
1317:   if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs;

1319:   if (!dnnz) {
1320:     PetscMalloc1(A->rmap->n,&hdnnz);
1321:     for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz;
1322:   } else {
1323:     hdnnz = (HYPRE_Int*)dnnz;
1324:   }
1325:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
1326:   if (size > 1) {
1327:     hypre_AuxParCSRMatrix *aux_matrix;
1328:     if (!onnz) {
1329:       PetscMalloc1(A->rmap->n,&honnz);
1330:       for (i=0;i<A->rmap->n;i++) honnz[i] = onz;
1331:     } else {
1332:       honnz = (HYPRE_Int*)onnz;
1333:     }
1334:     /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems
1335:        they assume the user will input the entire row values, properly sorted
1336:        In PETSc, we don't make such an assumption, and we instead set this flag to 1
1337:        Also, to avoid possible memory leaks, we destroy and recreate the translator
1338:        This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize
1339:        the IJ matrix for us */
1340:     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1341:     hypre_AuxParCSRMatrixDestroy(aux_matrix);
1342:     hypre_IJMatrixTranslator(hA->ij) = NULL;
1343:     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz));
1344:     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1345:     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
1346:   } else {
1347:     honnz = NULL;
1348:     PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz));
1349:   }

1351:   /* reset assembled flag and call the initialize method */
1352:   hypre_IJMatrixAssembleFlag(hA->ij) = 0;
1353:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1354:   if (!dnnz) {
1355:     PetscFree(hdnnz);
1356:   }
1357:   if (!onnz && honnz) {
1358:     PetscFree(honnz);
1359:   }

1361:   /* Match AIJ logic */
1362:   A->preallocated = PETSC_TRUE;
1363:   A->assembled    = PETSC_FALSE;
1364:   return(0);
1365: }

1367: /*@C
1368:    MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format

1370:    Collective on Mat

1372:    Input Parameters:
1373: +  A - the matrix
1374: .  dnz  - number of nonzeros per row in DIAGONAL portion of local submatrix
1375:           (same value is used for all local rows)
1376: .  dnnz - array containing the number of nonzeros in the various rows of the
1377:           DIAGONAL portion of the local submatrix (possibly different for each row)
1378:           or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure.
1379:           The size of this array is equal to the number of local rows, i.e 'm'.
1380:           For matrices that will be factored, you must leave room for (and set)
1381:           the diagonal entry even if it is zero.
1382: .  onz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
1383:           submatrix (same value is used for all local rows).
1384: -  onnz - array containing the number of nonzeros in the various rows of the
1385:           OFF-DIAGONAL portion of the local submatrix (possibly different for
1386:           each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero
1387:           structure. The size of this array is equal to the number
1388:           of local rows, i.e 'm'.

1390:    Notes:
1391:     If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored.

1393:    Level: intermediate

1395: .seealso: MatCreate(), MatMPIAIJSetPreallocation(), MATHYPRE
1396: @*/
1397: PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1398: {

1404:   PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));
1405:   return(0);
1406: }

1408: /*
1409:    MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix

1411:    Collective

1413:    Input Parameters:
1414: +  parcsr   - the pointer to the hypre_ParCSRMatrix
1415: .  mtype    - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported.
1416: -  copymode - PETSc copying options

1418:    Output Parameter:
1419: .  A  - the matrix

1421:    Level: intermediate

1423: .seealso: MatHYPRE, PetscCopyMode
1424: */
1425: PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat* A)
1426: {
1427:   Mat                   T;
1428:   Mat_HYPRE             *hA;
1429:   MPI_Comm              comm;
1430:   PetscInt              rstart,rend,cstart,cend,M,N;
1431:   PetscBool             isseqaij,ismpiaij,isaij,ishyp,isis;
1432:   PetscErrorCode        ierr;

1435:   comm   = hypre_ParCSRMatrixComm(parcsr);
1436:   PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);
1437:   PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);
1438:   PetscStrcmp(mtype,MATAIJ,&isaij);
1439:   PetscStrcmp(mtype,MATHYPRE,&ishyp);
1440:   PetscStrcmp(mtype,MATIS,&isis);
1441:   isaij  = (PetscBool)(isseqaij || ismpiaij || isaij);
1442:   if (!isaij && !ishyp && !isis) SETERRQ6(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATMPIAIJ,MATIS,MATHYPRE);
1443:   /* access ParCSRMatrix */
1444:   rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
1445:   rend   = hypre_ParCSRMatrixLastRowIndex(parcsr);
1446:   cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
1447:   cend   = hypre_ParCSRMatrixLastColDiag(parcsr);
1448:   M      = hypre_ParCSRMatrixGlobalNumRows(parcsr);
1449:   N      = hypre_ParCSRMatrixGlobalNumCols(parcsr);

1451:   /* fix for empty local rows/columns */
1452:   if (rend < rstart) rend = rstart;
1453:   if (cend < cstart) cend = cstart;

1455:   /* PETSc convention */
1456:   rend++;
1457:   cend++;
1458:   rend = PetscMin(rend,M);
1459:   cend = PetscMin(cend,N);

1461:   /* create PETSc matrix with MatHYPRE */
1462:   MatCreate(comm,&T);
1463:   MatSetSizes(T,rend-rstart,cend-cstart,M,N);
1464:   MatSetType(T,MATHYPRE);
1465:   hA   = (Mat_HYPRE*)(T->data);

1467:   /* create HYPRE_IJMatrix */
1468:   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
1469:   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));

1471:   /* create new ParCSR object if needed */
1472:   if (ishyp && copymode == PETSC_COPY_VALUES) {
1473:     hypre_ParCSRMatrix *new_parcsr;
1474:     hypre_CSRMatrix    *hdiag,*hoffd,*ndiag,*noffd;

1476:     new_parcsr = hypre_ParCSRMatrixClone(parcsr,0);
1477:     hdiag      = hypre_ParCSRMatrixDiag(parcsr);
1478:     hoffd      = hypre_ParCSRMatrixOffd(parcsr);
1479:     ndiag      = hypre_ParCSRMatrixDiag(new_parcsr);
1480:     noffd      = hypre_ParCSRMatrixOffd(new_parcsr);
1481:     PetscArraycpy(hypre_CSRMatrixData(ndiag),hypre_CSRMatrixData(hdiag),hypre_CSRMatrixNumNonzeros(hdiag));
1482:     PetscArraycpy(hypre_CSRMatrixData(noffd),hypre_CSRMatrixData(hoffd),hypre_CSRMatrixNumNonzeros(hoffd));
1483:     parcsr     = new_parcsr;
1484:     copymode   = PETSC_OWN_POINTER;
1485:   }

1487:   /* set ParCSR object */
1488:   hypre_IJMatrixObject(hA->ij) = parcsr;
1489:   T->preallocated = PETSC_TRUE;

1491:   /* set assembled flag */
1492:   hypre_IJMatrixAssembleFlag(hA->ij) = 1;
1493:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1494:   if (ishyp) {
1495:     PetscMPIInt myid = 0;

1497:     /* make sure we always have row_starts and col_starts available */
1498:     if (HYPRE_AssumedPartitionCheck()) {
1499:       MPI_Comm_rank(comm,&myid);
1500:     }
1501:     if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) {
1502:       PetscLayout map;

1504:       MatGetLayouts(T,NULL,&map);
1505:       PetscLayoutSetUp(map);
1506:       hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid);
1507:     }
1508:     if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) {
1509:       PetscLayout map;

1511:       MatGetLayouts(T,&map,NULL);
1512:       PetscLayoutSetUp(map);
1513:       hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid);
1514:     }
1515:     /* prevent from freeing the pointer */
1516:     if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
1517:     *A   = T;
1518:     MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);
1519:     MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);
1520:   } else if (isaij) {
1521:     if (copymode != PETSC_OWN_POINTER) {
1522:       /* prevent from freeing the pointer */
1523:       hA->inner_free = PETSC_FALSE;
1524:       MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);
1525:       MatDestroy(&T);
1526:     } else { /* AIJ return type with PETSC_OWN_POINTER */
1527:       MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);
1528:       *A   = T;
1529:     }
1530:   } else if (isis) {
1531:     MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);
1532:     if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE;
1533:     MatDestroy(&T);
1534:   }
1535:   return(0);
1536: }

1538: static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr)
1539: {
1540:   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1541:   HYPRE_Int type;

1544:   if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present");
1545:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
1546:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR");
1547:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr));
1548:   return(0);
1549: }

1551: /*
1552:    MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix

1554:    Not collective

1556:    Input Parameters:
1557: +  A  - the MATHYPRE object

1559:    Output Parameter:
1560: .  parcsr  - the pointer to the hypre_ParCSRMatrix

1562:    Level: intermediate

1564: .seealso: MatHYPRE, PetscCopyMode
1565: */
1566: PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr)
1567: {

1573:   PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));
1574:   return(0);
1575: }

1577: static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd)
1578: {
1579:   hypre_ParCSRMatrix *parcsr;
1580:   hypre_CSRMatrix    *ha;
1581:   PetscInt           rst;
1582:   PetscErrorCode     ierr;

1585:   if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented with non-square diagonal blocks");
1586:   MatGetOwnershipRange(A,&rst,NULL);
1587:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1588:   if (missing) *missing = PETSC_FALSE;
1589:   if (dd) *dd = -1;
1590:   ha = hypre_ParCSRMatrixDiag(parcsr);
1591:   if (ha) {
1592:     PetscInt  size,i;
1593:     HYPRE_Int *ii,*jj;

1595:     size = hypre_CSRMatrixNumRows(ha);
1596:     ii   = hypre_CSRMatrixI(ha);
1597:     jj   = hypre_CSRMatrixJ(ha);
1598:     for (i = 0; i < size; i++) {
1599:       PetscInt  j;
1600:       PetscBool found = PETSC_FALSE;

1602:       for (j = ii[i]; j < ii[i+1] && !found; j++)
1603:         found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE;

1605:       if (!found) {
1606:         PetscInfo1(A,"Matrix is missing local diagonal entry %D\n",i);
1607:         if (missing) *missing = PETSC_TRUE;
1608:         if (dd) *dd = i+rst;
1609:         return(0);
1610:       }
1611:     }
1612:     if (!size) {
1613:       PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1614:       if (missing) *missing = PETSC_TRUE;
1615:       if (dd) *dd = rst;
1616:     }
1617:   } else {
1618:     PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1619:     if (missing) *missing = PETSC_TRUE;
1620:     if (dd) *dd = rst;
1621:   }
1622:   return(0);
1623: }

1625: static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s)
1626: {
1627:   hypre_ParCSRMatrix *parcsr;
1628:   hypre_CSRMatrix    *ha;
1629:   PetscErrorCode     ierr;
1630:   HYPRE_Complex      hs;

1633:   PetscHYPREScalarCast(s,&hs);
1634:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1635:   /* diagonal part */
1636:   ha = hypre_ParCSRMatrixDiag(parcsr);
1637:   if (ha) {
1638:     PetscInt      size,i;
1639:     HYPRE_Int     *ii;
1640:     HYPRE_Complex *a;

1642:     size = hypre_CSRMatrixNumRows(ha);
1643:     a    = hypre_CSRMatrixData(ha);
1644:     ii   = hypre_CSRMatrixI(ha);
1645:     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1646:   }
1647:   /* offdiagonal part */
1648:   ha = hypre_ParCSRMatrixOffd(parcsr);
1649:   if (ha) {
1650:     PetscInt      size,i;
1651:     HYPRE_Int     *ii;
1652:     HYPRE_Complex *a;

1654:     size = hypre_CSRMatrixNumRows(ha);
1655:     a    = hypre_CSRMatrixData(ha);
1656:     ii   = hypre_CSRMatrixI(ha);
1657:     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1658:   }
1659:   return(0);
1660: }

1662: static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1663: {
1664:   hypre_ParCSRMatrix *parcsr;
1665:   HYPRE_Int          *lrows;
1666:   PetscInt           rst,ren,i;
1667:   PetscErrorCode     ierr;

1670:   if (x || b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"To be implemented");
1671:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1672:   PetscMalloc1(numRows,&lrows);
1673:   MatGetOwnershipRange(A,&rst,&ren);
1674:   for (i=0;i<numRows;i++) {
1675:     if (rows[i] < rst || rows[i] >= ren)
1676:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Non-local rows not yet supported");
1677:     lrows[i] = rows[i] - rst;
1678:   }
1679:   PetscStackCallStandard(hypre_ParCSRMatrixEliminateRowsCols,(parcsr,numRows,lrows));
1680:   PetscFree(lrows);
1681:   return(0);
1682: }

1684: static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha)
1685: {
1686:   PetscErrorCode      ierr;

1689:   if (ha) {
1690:     HYPRE_Int     *ii, size;
1691:     HYPRE_Complex *a;

1693:     size = hypre_CSRMatrixNumRows(ha);
1694:     a    = hypre_CSRMatrixData(ha);
1695:     ii   = hypre_CSRMatrixI(ha);

1697:     if (a) {PetscArrayzero(a,ii[size]);}
1698:   }
1699:   return(0);
1700: }

1702: PetscErrorCode MatZeroEntries_HYPRE(Mat A)
1703: {
1704:   hypre_ParCSRMatrix  *parcsr;
1705:   PetscErrorCode      ierr;

1708:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1709:   /* diagonal part */
1710:   MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr));
1711:   /* off-diagonal part */
1712:   MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr));
1713:   return(0);
1714: }

1716: static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA,PetscInt N,const PetscInt rows[],HYPRE_Complex diag)
1717: {
1718:   PetscInt        ii;
1719:   HYPRE_Int       *i, *j;
1720:   HYPRE_Complex   *a;

1723:   if (!hA) return(0);

1725:   i = hypre_CSRMatrixI(hA);
1726:   j = hypre_CSRMatrixJ(hA);
1727:   a = hypre_CSRMatrixData(hA);

1729:   for (ii = 0; ii < N; ii++) {
1730:     HYPRE_Int jj, ibeg, iend, irow;

1732:     irow = rows[ii];
1733:     ibeg = i[irow];
1734:     iend = i[irow+1];
1735:     for (jj = ibeg; jj < iend; jj++)
1736:       if (j[jj] == irow) a[jj] = diag;
1737:       else a[jj] = 0.0;
1738:    }
1739:    return(0);
1740: }

1742: static PetscErrorCode MatZeroRows_HYPRE(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1743: {
1744:   hypre_ParCSRMatrix  *parcsr;
1745:   PetscInt            *lrows,len;
1746:   HYPRE_Complex       hdiag;
1747:   PetscErrorCode      ierr;

1750:   if (x || b) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size");
1751:   PetscHYPREScalarCast(diag,&hdiag);
1752:   /* retrieve the internal matrix */
1753:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1754:   /* get locally owned rows */
1755:   MatZeroRowsMapLocal_Private(A,N,rows,&len,&lrows);
1756:   /* zero diagonal part */
1757:   MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr),len,lrows,hdiag);
1758:   /* zero off-diagonal part */
1759:   MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr),len,lrows,0.0);

1761:   PetscFree(lrows);
1762:   return(0);
1763: }

1765: static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat,MatAssemblyType mode)
1766: {

1770:   if (mat->nooffprocentries) return(0);

1772:   MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);
1773:   return(0);
1774: }

1776: static PetscErrorCode MatGetRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1777: {
1778:   hypre_ParCSRMatrix  *parcsr;
1779:   HYPRE_Int           hnz;
1780:   PetscErrorCode      ierr;

1783:   /* retrieve the internal matrix */
1784:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1785:   /* call HYPRE API */
1786:   PetscStackCallStandard(HYPRE_ParCSRMatrixGetRow,(parcsr,row,&hnz,(HYPRE_BigInt**)idx,(HYPRE_Complex**)v));
1787:   if (nz) *nz = (PetscInt)hnz;
1788:   return(0);
1789: }

1791: static PetscErrorCode MatRestoreRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1792: {
1793:   hypre_ParCSRMatrix  *parcsr;
1794:   HYPRE_Int           hnz;
1795:   PetscErrorCode      ierr;

1798:   /* retrieve the internal matrix */
1799:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1800:   /* call HYPRE API */
1801:   hnz = nz ? (HYPRE_Int)(*nz) : 0;
1802:   PetscStackCallStandard(HYPRE_ParCSRMatrixRestoreRow,(parcsr,row,&hnz,(HYPRE_BigInt**)idx,(HYPRE_Complex**)v));
1803:   return(0);
1804: }

1806: static PetscErrorCode MatGetValues_HYPRE(Mat A,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
1807: {
1808:   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1809:   PetscInt  i;

1812:   if (!m || !n) return(0);
1813:   /* Ignore negative row indices
1814:    * And negative column indices should be automatically ignored in hypre
1815:    * */
1816:   for (i=0; i<m; i++) {
1817:     if (idxm[i] >= 0) {
1818:       HYPRE_Int hn = (HYPRE_Int)n;
1819:       PetscStackCallStandard(HYPRE_IJMatrixGetValues,(hA->ij,1,&hn,(HYPRE_BigInt*)&idxm[i],(HYPRE_BigInt*)idxn,(HYPRE_Complex*)(v + i*n)));
1820:     }
1821:   }
1822:   return(0);
1823: }

1825: static PetscErrorCode MatSetOption_HYPRE(Mat A,MatOption op,PetscBool flg)
1826: {
1827:   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;

1830:   switch (op) {
1831:   case MAT_NO_OFF_PROC_ENTRIES:
1832:     if (flg) {
1833:       PetscStackCallStandard(HYPRE_IJMatrixSetMaxOffProcElmts,(hA->ij,0));
1834:     }
1835:     break;
1836:   default:
1837:     break;
1838:   }
1839:   return(0);
1840: }

1842: static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view)
1843: {
1844:   hypre_ParCSRMatrix *parcsr;
1845:   PetscErrorCode     ierr;
1846:   Mat                B;
1847:   PetscViewerFormat  format;
1848:   PetscErrorCode     (*mview)(Mat,PetscViewer) = NULL;

1851:   PetscViewerGetFormat(view,&format);
1852:   if (format != PETSC_VIEWER_NATIVE) {
1853:     MatHYPREGetParCSR_HYPRE(A,&parcsr);
1854:     MatCreateFromParCSR(parcsr,MATAIJ,PETSC_USE_POINTER,&B);
1855:     MatGetOperation(B,MATOP_VIEW,(void(**)(void))&mview);
1856:     if (!mview) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Missing view operation");
1857:     (*mview)(B,view);
1858:     MatDestroy(&B);
1859:   } else {
1860:     Mat_HYPRE  *hA = (Mat_HYPRE*)A->data;
1861:     PetscMPIInt size;
1862:     PetscBool   isascii;
1863:     const char *filename;

1865:     /* HYPRE uses only text files */
1866:     PetscObjectTypeCompare((PetscObject)view,PETSCVIEWERASCII,&isascii);
1867:     if (!isascii) SETERRQ1(PetscObjectComm((PetscObject)view),PETSC_ERR_SUP,"PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII",((PetscObject)view)->type_name);
1868:     PetscViewerFileGetName(view,&filename);
1869:     PetscStackCallStandard(HYPRE_IJMatrixPrint,(hA->ij,filename));
1870:     MPI_Comm_size(hA->comm,&size);
1871:     if (size > 1) {
1872:       PetscViewerASCIIPrintf(view,"Matrix files: %s.%05d ... %s.%05d\n",filename,0,filename,size-1);
1873:     } else {
1874:       PetscViewerASCIIPrintf(view,"Matrix file: %s.%05d\n",filename,0);
1875:     }
1876:   }
1877:   return(0);
1878: }

1880: static PetscErrorCode MatDuplicate_HYPRE(Mat A,MatDuplicateOption op, Mat *B)
1881: {
1882:   hypre_ParCSRMatrix *parcsr;
1883:   PetscErrorCode     ierr;
1884:   PetscCopyMode      cpmode;

1887:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1888:   if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) {
1889:     parcsr = hypre_ParCSRMatrixClone(parcsr,0);
1890:     cpmode = PETSC_OWN_POINTER;
1891:   } else {
1892:     cpmode = PETSC_COPY_VALUES;
1893:   }
1894:   MatCreateFromParCSR(parcsr,MATHYPRE,cpmode,B);
1895:   return(0);
1896: }

1898: static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str)
1899: {
1900:   hypre_ParCSRMatrix *acsr,*bcsr;
1901:   PetscErrorCode     ierr;

1904:   if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) {
1905:     MatHYPREGetParCSR_HYPRE(A,&acsr);
1906:     MatHYPREGetParCSR_HYPRE(B,&bcsr);
1907:     PetscStackCallStandard(hypre_ParCSRMatrixCopy,(acsr,bcsr,1));
1908:     MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
1909:     MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
1910:   } else {
1911:     MatCopy_Basic(A,B,str);
1912:   }
1913:   return(0);
1914: }

1916: static PetscErrorCode MatGetDiagonal_HYPRE(Mat A, Vec d)
1917: {
1918:   hypre_ParCSRMatrix *parcsr;
1919:   hypre_CSRMatrix    *dmat;
1920:   HYPRE_Complex      *a;
1921:   HYPRE_Complex      *data = NULL;
1922:   HYPRE_Int          *diag = NULL;
1923:   PetscInt           i;
1924:   PetscBool          cong;
1925:   PetscErrorCode     ierr;

1928:   MatHasCongruentLayouts(A,&cong);
1929:   if (!cong) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only for square matrices with same local distributions of rows and columns");
1930: #if defined(PETSC_USE_DEBUG)
1931:   {
1932:     PetscBool miss;
1933:     MatMissingDiagonal(A,&miss,NULL);
1934:     if (miss && A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented when diagonal entries are missing");
1935:   }
1936: #endif
1937:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1938:   dmat = hypre_ParCSRMatrixDiag(parcsr);
1939:   if (dmat) {
1940:     /* this cast fixes the clang error: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */
1941:     VecGetArray(d,(PetscScalar**)&a);
1942:     diag = hypre_CSRMatrixI(dmat);
1943:     data = hypre_CSRMatrixData(dmat);
1944:     for (i=0;i<A->rmap->n;i++) a[i] = data[diag[i]];
1945:     VecRestoreArray(d,(PetscScalar**)&a);
1946:   }
1947:   return(0);
1948: }

1950:  #include <petscblaslapack.h>

1952: static PetscErrorCode MatAXPY_HYPRE(Mat Y,PetscScalar a,Mat X,MatStructure str)
1953: {

1957:   if (str == SAME_NONZERO_PATTERN) {
1958:     hypre_ParCSRMatrix *x,*y;
1959:     hypre_CSRMatrix    *xloc,*yloc;
1960:     PetscInt           xnnz,ynnz;
1961:     HYPRE_Complex      *xarr,*yarr;
1962:     PetscBLASInt       one=1,bnz;

1964:     MatHYPREGetParCSR(Y,&y);
1965:     MatHYPREGetParCSR(X,&x);

1967:     /* diagonal block */
1968:     xloc = hypre_ParCSRMatrixDiag(x);
1969:     yloc = hypre_ParCSRMatrixDiag(y);
1970:     xnnz = 0;
1971:     ynnz = 0;
1972:     xarr = NULL;
1973:     yarr = NULL;
1974:     if (xloc) {
1975:       xarr = hypre_CSRMatrixData(xloc);
1976:       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
1977:     }
1978:     if (yloc) {
1979:       yarr = hypre_CSRMatrixData(yloc);
1980:       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
1981:     }
1982:     if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in diagonal block %D != %D",xnnz,ynnz);
1983:     PetscBLASIntCast(xnnz,&bnz);
1984:     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,(PetscScalar*)xarr,&one,(PetscScalar*)yarr,&one));

1986:     /* off-diagonal block */
1987:     xloc = hypre_ParCSRMatrixOffd(x);
1988:     yloc = hypre_ParCSRMatrixOffd(y);
1989:     xnnz = 0;
1990:     ynnz = 0;
1991:     xarr = NULL;
1992:     yarr = NULL;
1993:     if (xloc) {
1994:       xarr = hypre_CSRMatrixData(xloc);
1995:       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
1996:     }
1997:     if (yloc) {
1998:       yarr = hypre_CSRMatrixData(yloc);
1999:       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
2000:     }
2001:     if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in off-diagonal block %D != %D",xnnz,ynnz);
2002:     PetscBLASIntCast(xnnz,&bnz);
2003:     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,(PetscScalar*)xarr,&one,(PetscScalar*)yarr,&one));
2004:   } else if (str == SUBSET_NONZERO_PATTERN) {
2005:     MatAXPY_Basic(Y,a,X,str);
2006:   } else {
2007:     Mat B;

2009:     MatAXPY_Basic_Preallocate(Y,X,&B);
2010:     MatAXPY_BasicWithPreallocation(B,Y,a,X,str);
2011:     MatHeaderReplace(Y,&B);
2012:   }
2013:   return(0);
2014: }

2016: /*MC
2017:    MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices
2018:           based on the hypre IJ interface.

2020:    Level: intermediate

2022: .seealso: MatCreate()
2023: M*/

2025: PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
2026: {
2027:   Mat_HYPRE      *hB;

2031:   PetscNewLog(B,&hB);
2032:   hB->inner_free = PETSC_TRUE;
2033:   hB->available  = PETSC_TRUE;
2034:   hB->size       = 0;
2035:   hB->array      = NULL;

2037:   B->data       = (void*)hB;
2038:   B->rmap->bs   = 1;
2039:   B->assembled  = PETSC_FALSE;

2041:   PetscMemzero(B->ops,sizeof(struct _MatOps));
2042:   B->ops->mult             = MatMult_HYPRE;
2043:   B->ops->multtranspose    = MatMultTranspose_HYPRE;
2044:   B->ops->multadd          = MatMultAdd_HYPRE;
2045:   B->ops->multtransposeadd = MatMultTransposeAdd_HYPRE;
2046:   B->ops->setup            = MatSetUp_HYPRE;
2047:   B->ops->destroy          = MatDestroy_HYPRE;
2048:   B->ops->assemblyend      = MatAssemblyEnd_HYPRE;
2049:   B->ops->assemblybegin    = MatAssemblyBegin_HYPRE;
2050:   B->ops->ptap             = MatPtAP_HYPRE_HYPRE;
2051:   B->ops->matmult          = MatMatMult_HYPRE_HYPRE;
2052:   B->ops->setvalues        = MatSetValues_HYPRE;
2053:   B->ops->missingdiagonal  = MatMissingDiagonal_HYPRE;
2054:   B->ops->scale            = MatScale_HYPRE;
2055:   B->ops->zerorowscolumns  = MatZeroRowsColumns_HYPRE;
2056:   B->ops->zeroentries      = MatZeroEntries_HYPRE;
2057:   B->ops->zerorows         = MatZeroRows_HYPRE;
2058:   B->ops->getrow           = MatGetRow_HYPRE;
2059:   B->ops->restorerow       = MatRestoreRow_HYPRE;
2060:   B->ops->getvalues        = MatGetValues_HYPRE;
2061:   B->ops->setoption        = MatSetOption_HYPRE;
2062:   B->ops->duplicate        = MatDuplicate_HYPRE;
2063:   B->ops->copy             = MatCopy_HYPRE;
2064:   B->ops->view             = MatView_HYPRE;
2065:   B->ops->getdiagonal      = MatGetDiagonal_HYPRE;
2066:   B->ops->axpy             = MatAXPY_HYPRE;

2068:   /* build cache for off array entries formed */
2069:   MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);

2071:   MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);
2072:   PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);
2073:   PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);
2074:   PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);
2075:   PetscObjectComposeFunction((PetscObject)B,"MatPtAP_seqaij_hypre_C",MatPtAP_AIJ_HYPRE);
2076:   PetscObjectComposeFunction((PetscObject)B,"MatPtAP_mpiaij_hypre_C",MatPtAP_AIJ_HYPRE);
2077:   PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);
2078:   PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);
2079:   PetscObjectComposeFunction((PetscObject)B,"MatPtAP_is_hypre_C",MatPtAP_IS_XAIJ);
2080:   return(0);
2081: }

2083: static PetscErrorCode hypre_array_destroy(void *ptr)
2084: {
2086:    hypre_TFree(ptr,HYPRE_MEMORY_HOST);
2087:    return(0);
2088: }