Actual source code: mhypre.c


  2: /*
  3:     Creates hypre ijmatrix from PETSc matrix
  4: */

  6: #include <petscpkg_version.h>
  7: #include <petsc/private/petschypre.h>
  8: #include <petscmathypre.h>
  9: #include <petsc/private/matimpl.h>
 10: #include <../src/mat/impls/hypre/mhypre.h>
 11: #include <../src/mat/impls/aij/mpi/mpiaij.h>
 12: #include <../src/vec/vec/impls/hypre/vhyp.h>
 13: #include <HYPRE.h>
 14: #include <HYPRE_utilities.h>
 15: #include <_hypre_parcsr_ls.h>
 16: #include <_hypre_sstruct_ls.h>

 18: #if PETSC_PKG_HYPRE_VERSION_LT(2,18,0)
 19: #define  hypre_ParCSRMatrixClone(A,B) hypre_ParCSRMatrixCompleteClone(A)
 20: #endif

 22: static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*);
 23: static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix);
 24: static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix);
 25: static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix);
 26: static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,HYPRE_Complex,Vec,HYPRE_Complex,Vec,PetscBool);
 27: static PetscErrorCode hypre_array_destroy(void*);
 28: static PetscErrorCode MatSetValues_HYPRE(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode ins);

 30: static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
 31: {
 33:   PetscInt       i,n_d,n_o;
 34:   const PetscInt *ia_d,*ia_o;
 35:   PetscBool      done_d=PETSC_FALSE,done_o=PETSC_FALSE;
 36:   HYPRE_Int      *nnz_d=NULL,*nnz_o=NULL;

 39:   if (A_d) { /* determine number of nonzero entries in local diagonal part */
 40:     MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);
 41:     if (done_d) {
 42:       PetscMalloc1(n_d,&nnz_d);
 43:       for (i=0; i<n_d; i++) {
 44:         nnz_d[i] = ia_d[i+1] - ia_d[i];
 45:       }
 46:     }
 47:     MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);
 48:   }
 49:   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
 50:     MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);
 51:     if (done_o) {
 52:       PetscMalloc1(n_o,&nnz_o);
 53:       for (i=0; i<n_o; i++) {
 54:         nnz_o[i] = ia_o[i+1] - ia_o[i];
 55:       }
 56:     }
 57:     MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);
 58:   }
 59:   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
 60:     if (!done_o) { /* only diagonal part */
 61:       PetscCalloc1(n_d,&nnz_o);
 62:     }
 63: #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0)
 64:     { /* If we don't do this, the columns of the matrix will be all zeros! */
 65:       hypre_AuxParCSRMatrix *aux_matrix;
 66:       aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
 67:       hypre_AuxParCSRMatrixDestroy(aux_matrix);
 68:       hypre_IJMatrixTranslator(ij) = NULL;
 69:       PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o));
 70:       /* it seems they partially fixed it in 2.19.0 */
 71: #if PETSC_PKG_HYPRE_VERSION_LT(2,19,0)
 72:       aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
 73:       hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
 74: #endif
 75:     }
 76: #else
 77:     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o));
 78: #endif
 79:     PetscFree(nnz_d);
 80:     PetscFree(nnz_o);
 81:   }
 82:   return(0);
 83: }

 85: static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
 86: {
 88:   PetscInt       rstart,rend,cstart,cend;

 91:   PetscLayoutSetUp(A->rmap);
 92:   PetscLayoutSetUp(A->cmap);
 93:   rstart = A->rmap->rstart;
 94:   rend   = A->rmap->rend;
 95:   cstart = A->cmap->rstart;
 96:   cend   = A->cmap->rend;
 97:   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
 98:   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
 99:   {
100:     PetscBool      same;
101:     Mat            A_d,A_o;
102:     const PetscInt *colmap;
103:     PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&same);
104:     if (same) {
105:       MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);
106:       MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);
107:       return(0);
108:     }
109:     PetscObjectBaseTypeCompare((PetscObject)A,MATMPIBAIJ,&same);
110:     if (same) {
111:       MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);
112:       MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);
113:       return(0);
114:     }
115:     PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&same);
116:     if (same) {
117:       MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);
118:       return(0);
119:     }
120:     PetscObjectBaseTypeCompare((PetscObject)A,MATSEQBAIJ,&same);
121:     if (same) {
122:       MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);
123:       return(0);
124:     }
125:   }
126:   return(0);
127: }

129: static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij)
130: {
131:   PetscErrorCode    ierr;
132:   PetscInt          i,rstart,rend,ncols,nr,nc;
133:   const PetscScalar *values;
134:   const PetscInt    *cols;
135:   PetscBool         flg;

138: #if PETSC_PKG_HYPRE_VERSION_LT(2,19,0)
139:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
140: #else
141:   PetscStackCallStandard(HYPRE_IJMatrixInitialize_v2,(ij,HYPRE_MEMORY_HOST));
142: #endif
143:   PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&flg);
144:   MatGetSize(A,&nr,&nc);
145:   if (flg && nr == nc) {
146:     MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);
147:     return(0);
148:   }
149:   PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&flg);
150:   if (flg) {
151:     MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);
152:     return(0);
153:   }

155:   MatGetOwnershipRange(A,&rstart,&rend);
156:   for (i=rstart; i<rend; i++) {
157:     MatGetRow(A,i,&ncols,&cols,&values);
158:     if (ncols) {
159:       HYPRE_Int nc = (HYPRE_Int)ncols;

161:       if ((PetscInt)nc != ncols) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",ncols,i);
162:       PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,&nc,(HYPRE_BigInt *)&i,(HYPRE_BigInt *)cols,(HYPRE_Complex *)values));
163:     }
164:     MatRestoreRow(A,i,&ncols,&cols,&values);
165:   }
166:   return(0);
167: }

169: static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
170: {
171:   PetscErrorCode        ierr;
172:   Mat_SeqAIJ            *pdiag = (Mat_SeqAIJ*)A->data;
173:   HYPRE_Int             type;
174:   hypre_ParCSRMatrix    *par_matrix;
175:   hypre_AuxParCSRMatrix *aux_matrix;
176:   hypre_CSRMatrix       *hdiag;
177:   PetscBool             sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
178:   const PetscScalar     *pa;

181:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
182:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
183:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
184:   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
185:   /*
186:        this is the Hack part where we monkey directly with the hypre datastructures
187:   */
188:   if (sameint) {
189:     PetscArraycpy(hdiag->i,pdiag->i,A->rmap->n + 1);
190:     PetscArraycpy(hdiag->j,pdiag->j,pdiag->nz);
191:   } else {
192:     PetscInt i;

194:     for (i=0;i<A->rmap->n + 1;i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i];
195:     for (i=0;i<pdiag->nz;i++)      hdiag->j[i] = (HYPRE_Int)pdiag->j[i];
196:   }

198:   MatSeqAIJGetArrayRead(A,&pa);
199:   PetscArraycpy(hdiag->data,pa,pdiag->nz);
200:   MatSeqAIJRestoreArrayRead(A,&pa);

202:   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
203:   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
204:   return(0);
205: }

207: static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
208: {
209:   PetscErrorCode        ierr;
210:   Mat_MPIAIJ            *pA = (Mat_MPIAIJ*)A->data;
211:   Mat_SeqAIJ            *pdiag,*poffd;
212:   PetscInt              i,*garray = pA->garray,*jj,cstart,*pjj;
213:   HYPRE_Int             *hjj,type;
214:   hypre_ParCSRMatrix    *par_matrix;
215:   hypre_AuxParCSRMatrix *aux_matrix;
216:   hypre_CSRMatrix       *hdiag,*hoffd;
217:   PetscBool             sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
218:   const PetscScalar     *pa;

221:   pdiag = (Mat_SeqAIJ*) pA->A->data;
222:   poffd = (Mat_SeqAIJ*) pA->B->data;
223:   /* cstart is only valid for square MPIAIJ layed out in the usual way */
224:   MatGetOwnershipRange(A,&cstart,NULL);

226:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
227:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
228:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
229:   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
230:   hoffd = hypre_ParCSRMatrixOffd(par_matrix);

232:   /*
233:        this is the Hack part where we monkey directly with the hypre datastructures
234:   */
235:   if (sameint) {
236:     PetscArraycpy(hdiag->i,pdiag->i,pA->A->rmap->n + 1);
237:   } else {
238:     for (i=0; i<pA->A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)(pdiag->i[i]);
239:   }
240:   /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */
241:   hjj = hdiag->j;
242:   pjj = pdiag->j;
243: #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0)
244:   for (i=0; i<pdiag->nz; i++) hjj[i] = pjj[i];
245: #else
246:   for (i=0; i<pdiag->nz; i++) hjj[i] = cstart + pjj[i];
247: #endif
248:   MatSeqAIJGetArrayRead(pA->A,&pa);
249:   PetscArraycpy(hdiag->data,pa,pdiag->nz);
250:   MatSeqAIJRestoreArrayRead(pA->A,&pa);
251:   if (sameint) {
252:     PetscArraycpy(hoffd->i,poffd->i,pA->A->rmap->n + 1);
253:   } else {
254:     for (i=0; i<pA->A->rmap->n + 1; i++) hoffd->i[i] = (HYPRE_Int)(poffd->i[i]);
255:   }

257:   /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this
258:      If we hacked a hypre a bit more we might be able to avoid this step */
259: #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0)
260:   PetscStackCallStandard(hypre_CSRMatrixBigInitialize,(hoffd));
261:   jj  = (PetscInt*) hoffd->big_j;
262: #else
263:   jj  = (PetscInt*) hoffd->j;
264: #endif
265:   pjj = poffd->j;
266:   for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]];

268:   MatSeqAIJGetArrayRead(pA->B,&pa);
269:   PetscArraycpy(hoffd->data,pa,poffd->nz);
270:   MatSeqAIJRestoreArrayRead(pA->B,&pa);

272:   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
273:   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
274:   return(0);
275: }

277: static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B)
278: {
279:   Mat_HYPRE*             mhA = (Mat_HYPRE*)(A->data);
280:   Mat                    lA;
281:   ISLocalToGlobalMapping rl2g,cl2g;
282:   IS                     is;
283:   hypre_ParCSRMatrix     *hA;
284:   hypre_CSRMatrix        *hdiag,*hoffd;
285:   MPI_Comm               comm;
286:   HYPRE_Complex          *hdd,*hod,*aa;
287:   PetscScalar            *data;
288:   HYPRE_BigInt           *col_map_offd;
289:   HYPRE_Int              *hdi,*hdj,*hoi,*hoj;
290:   PetscInt               *ii,*jj,*iptr,*jptr;
291:   PetscInt               cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N;
292:   HYPRE_Int              type;
293:   PetscErrorCode         ierr;

296:   comm = PetscObjectComm((PetscObject)A);
297:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type));
298:   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
299:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA));
300:   M     = hypre_ParCSRMatrixGlobalNumRows(hA);
301:   N     = hypre_ParCSRMatrixGlobalNumCols(hA);
302:   str   = hypre_ParCSRMatrixFirstRowIndex(hA);
303:   stc   = hypre_ParCSRMatrixFirstColDiag(hA);
304:   hdiag = hypre_ParCSRMatrixDiag(hA);
305:   hoffd = hypre_ParCSRMatrixOffd(hA);
306:   dr    = hypre_CSRMatrixNumRows(hdiag);
307:   dc    = hypre_CSRMatrixNumCols(hdiag);
308:   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
309:   hdi   = hypre_CSRMatrixI(hdiag);
310:   hdj   = hypre_CSRMatrixJ(hdiag);
311:   hdd   = hypre_CSRMatrixData(hdiag);
312:   oc    = hypre_CSRMatrixNumCols(hoffd);
313:   nnz  += hypre_CSRMatrixNumNonzeros(hoffd);
314:   hoi   = hypre_CSRMatrixI(hoffd);
315:   hoj   = hypre_CSRMatrixJ(hoffd);
316:   hod   = hypre_CSRMatrixData(hoffd);
317:   if (reuse != MAT_REUSE_MATRIX) {
318:     PetscInt *aux;

320:     /* generate l2g maps for rows and cols */
321:     ISCreateStride(comm,dr,str,1,&is);
322:     ISLocalToGlobalMappingCreateIS(is,&rl2g);
323:     ISDestroy(&is);
324:     col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
325:     PetscMalloc1(dc+oc,&aux);
326:     for (i=0; i<dc; i++) aux[i] = i+stc;
327:     for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i];
328:     ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);
329:     ISLocalToGlobalMappingCreateIS(is,&cl2g);
330:     ISDestroy(&is);
331:     /* create MATIS object */
332:     MatCreate(comm,B);
333:     MatSetSizes(*B,dr,dc,M,N);
334:     MatSetType(*B,MATIS);
335:     MatSetLocalToGlobalMapping(*B,rl2g,cl2g);
336:     ISLocalToGlobalMappingDestroy(&rl2g);
337:     ISLocalToGlobalMappingDestroy(&cl2g);

339:     /* allocate CSR for local matrix */
340:     PetscMalloc1(dr+1,&iptr);
341:     PetscMalloc1(nnz,&jptr);
342:     PetscMalloc1(nnz,&data);
343:   } else {
344:     PetscInt  nr;
345:     PetscBool done;
346:     MatISGetLocalMat(*B,&lA);
347:     MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);
348:     if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr);
349:     if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz);
350:     MatSeqAIJGetArray(lA,&data);
351:   }
352:   /* merge local matrices */
353:   ii  = iptr;
354:   jj  = jptr;
355:   aa  = (HYPRE_Complex*)data; /* this cast fixes the clang error when doing the assignments below: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */
356:   *ii = *(hdi++) + *(hoi++);
357:   for (jd=0,jo=0,cum=0; *ii<nnz; cum++) {
358:     PetscScalar *aold = (PetscScalar*)aa;
359:     PetscInt    *jold = jj,nc = jd+jo;
360:     for (; jd<*hdi; jd++) { *jj++ = *hdj++;      *aa++ = *hdd++; }
361:     for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
362:     *(++ii) = *(hdi++) + *(hoi++);
363:     PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);
364:   }
365:   for (; cum<dr; cum++) *(++ii) = nnz;
366:   if (reuse != MAT_REUSE_MATRIX) {
367:     Mat_SeqAIJ* a;

369:     MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);
370:     MatISSetLocalMat(*B,lA);
371:     /* hack SeqAIJ */
372:     a          = (Mat_SeqAIJ*)(lA->data);
373:     a->free_a  = PETSC_TRUE;
374:     a->free_ij = PETSC_TRUE;
375:     MatDestroy(&lA);
376:   }
377:   MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);
378:   MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);
379:   if (reuse == MAT_INPLACE_MATRIX) {
380:     MatHeaderReplace(A,B);
381:   }
382:   return(0);
383: }

385: PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
386: {
387:   Mat            M = NULL;
388:   Mat_HYPRE      *hB;
389:   MPI_Comm       comm = PetscObjectComm((PetscObject)A);

393:   if (reuse == MAT_REUSE_MATRIX) {
394:     /* always destroy the old matrix and create a new memory;
395:        hope this does not churn the memory too much. The problem
396:        is I do not know if it is possible to put the matrix back to
397:        its initial state so that we can directly copy the values
398:        the second time through. */
399:     hB = (Mat_HYPRE*)((*B)->data);
400:     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij));
401:   } else {
402:     MatCreate(comm,&M);
403:     MatSetType(M,MATHYPRE);
404:     MatSetSizes(M,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
405:     hB   = (Mat_HYPRE*)(M->data);
406:     if (reuse == MAT_INITIAL_MATRIX) *B = M;
407:   }
408:   MatSetOption(*B,MAT_SORTED_FULL,PETSC_TRUE); /* "perfect" preallocation, so no need for hypre_AuxParCSRMatrixNeedAux */
409:   MatSetOption(*B,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE);
410:   MatHYPRE_CreateFromMat(A,hB);
411:   MatHYPRE_IJMatrixCopy(A,hB->ij);
412:   if (reuse == MAT_INPLACE_MATRIX) {
413:     MatHeaderReplace(A,&M);
414:   }
415:   (*B)->preallocated = PETSC_TRUE;
416:   MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);
417:   MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);
418:   return(0);
419: }

421: static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
422: {
423:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
424:   hypre_ParCSRMatrix *parcsr;
425:   hypre_CSRMatrix    *hdiag,*hoffd;
426:   MPI_Comm           comm;
427:   PetscScalar        *da,*oa,*aptr;
428:   PetscInt           *dii,*djj,*oii,*ojj,*iptr;
429:   PetscInt           i,dnnz,onnz,m,n;
430:   HYPRE_Int          type;
431:   PetscMPIInt        size;
432:   PetscBool          sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
433:   PetscErrorCode     ierr;

436:   comm = PetscObjectComm((PetscObject)A);
437:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
438:   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
439:   if (reuse == MAT_REUSE_MATRIX) {
440:     PetscBool ismpiaij,isseqaij;
441:     PetscObjectBaseTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);
442:     PetscObjectBaseTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);
443:     if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported");
444:   }
445: #if defined(PETSC_HAVE_HYPRE_DEVICE)
446:   if (HYPRE_MEMORY_DEVICE == hypre_IJMatrixMemoryLocation(hA->ij)) SETERRQ(comm,PETSC_ERR_SUP,"Not yet implemented");
447: #endif
448:   MPI_Comm_size(comm,&size);

450:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
451:   hdiag = hypre_ParCSRMatrixDiag(parcsr);
452:   hoffd = hypre_ParCSRMatrixOffd(parcsr);
453:   m     = hypre_CSRMatrixNumRows(hdiag);
454:   n     = hypre_CSRMatrixNumCols(hdiag);
455:   dnnz  = hypre_CSRMatrixNumNonzeros(hdiag);
456:   onnz  = hypre_CSRMatrixNumNonzeros(hoffd);
457:   if (reuse == MAT_INITIAL_MATRIX) {
458:     PetscMalloc1(m+1,&dii);
459:     PetscMalloc1(dnnz,&djj);
460:     PetscMalloc1(dnnz,&da);
461:   } else if (reuse == MAT_REUSE_MATRIX) {
462:     PetscInt  nr;
463:     PetscBool done;
464:     if (size > 1) {
465:       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);

467:       MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);
468:       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m);
469:       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz);
470:       MatSeqAIJGetArray(b->A,&da);
471:     } else {
472:       MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);
473:       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m);
474:       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz);
475:       MatSeqAIJGetArray(*B,&da);
476:     }
477:   } else { /* MAT_INPLACE_MATRIX */
478:     if (!sameint) {
479:       PetscMalloc1(m+1,&dii);
480:       PetscMalloc1(dnnz,&djj);
481:     } else {
482:       dii = (PetscInt*)hypre_CSRMatrixI(hdiag);
483:       djj = (PetscInt*)hypre_CSRMatrixJ(hdiag);
484:     }
485:     da = (PetscScalar*)hypre_CSRMatrixData(hdiag);
486:   }

488:   if (!sameint) {
489:     if (reuse != MAT_REUSE_MATRIX) { for (i=0;i<m+1;i++)  dii[i] = (PetscInt)(hypre_CSRMatrixI(hdiag)[i]); }
490:     for (i=0;i<dnnz;i++) djj[i] = (PetscInt)(hypre_CSRMatrixJ(hdiag)[i]);
491:   } else {
492:     if (reuse != MAT_REUSE_MATRIX) { PetscArraycpy(dii,hypre_CSRMatrixI(hdiag),m+1); }
493:     PetscArraycpy(djj,hypre_CSRMatrixJ(hdiag),dnnz);
494:   }
495:   PetscArraycpy(da,hypre_CSRMatrixData(hdiag),dnnz);
496:   iptr = djj;
497:   aptr = da;
498:   for (i=0; i<m; i++) {
499:     PetscInt nc = dii[i+1]-dii[i];
500:     PetscSortIntWithScalarArray(nc,iptr,aptr);
501:     iptr += nc;
502:     aptr += nc;
503:   }
504:   if (size > 1) {
505:     HYPRE_BigInt *coffd;
506:     HYPRE_Int    *offdj;

508:     if (reuse == MAT_INITIAL_MATRIX) {
509:       PetscMalloc1(m+1,&oii);
510:       PetscMalloc1(onnz,&ojj);
511:       PetscMalloc1(onnz,&oa);
512:     } else if (reuse == MAT_REUSE_MATRIX) {
513:       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
514:       PetscInt   nr,hr = hypre_CSRMatrixNumRows(hoffd);
515:       PetscBool  done;

517:       MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);
518:       if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr);
519:       if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz);
520:       MatSeqAIJGetArray(b->B,&oa);
521:     } else { /* MAT_INPLACE_MATRIX */
522:       if (!sameint) {
523:         PetscMalloc1(m+1,&oii);
524:         PetscMalloc1(onnz,&ojj);
525:       } else {
526:         oii = (PetscInt*)hypre_CSRMatrixI(hoffd);
527:         ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd);
528:       }
529:       oa = (PetscScalar*)hypre_CSRMatrixData(hoffd);
530:     }
531:     if (reuse != MAT_REUSE_MATRIX) {
532:       if (!sameint) {
533:         for (i=0;i<m+1;i++) oii[i] = (PetscInt)(hypre_CSRMatrixI(hoffd)[i]);
534:       } else {
535:         PetscArraycpy(oii,hypre_CSRMatrixI(hoffd),m+1);
536:       }
537:     }
538:     PetscArraycpy(oa,hypre_CSRMatrixData(hoffd),onnz);

540:     offdj = hypre_CSRMatrixJ(hoffd);
541:     coffd = hypre_ParCSRMatrixColMapOffd(parcsr);
542:     /* we only need the permutation to be computed properly, I don't know if HYPRE
543:        messes up with the ordering. Just in case, allocate some memory and free it
544:        later */
545:     if (reuse == MAT_REUSE_MATRIX) {
546:       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
547:       PetscInt   mnz;

549:       MatSeqAIJGetMaxRowNonzeros(b->B,&mnz);
550:       PetscMalloc1(mnz,&ojj);
551:     } else for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]];
552:     iptr = ojj;
553:     aptr = oa;
554:     for (i=0; i<m; i++) {
555:        PetscInt nc = oii[i+1]-oii[i];
556:        if (reuse == MAT_REUSE_MATRIX) {
557:          PetscInt j;

559:          iptr = ojj;
560:          for (j=0; j<nc; j++) iptr[j] = coffd[offdj[oii[i] + j]];
561:        }
562:        PetscSortIntWithScalarArray(nc,iptr,aptr);
563:        iptr += nc;
564:        aptr += nc;
565:     }
566:     if (reuse == MAT_REUSE_MATRIX) { PetscFree(ojj); }
567:     if (reuse == MAT_INITIAL_MATRIX) {
568:       Mat_MPIAIJ *b;
569:       Mat_SeqAIJ *d,*o;

571:       MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);
572:       /* hack MPIAIJ */
573:       b          = (Mat_MPIAIJ*)((*B)->data);
574:       d          = (Mat_SeqAIJ*)b->A->data;
575:       o          = (Mat_SeqAIJ*)b->B->data;
576:       d->free_a  = PETSC_TRUE;
577:       d->free_ij = PETSC_TRUE;
578:       o->free_a  = PETSC_TRUE;
579:       o->free_ij = PETSC_TRUE;
580:     } else if (reuse == MAT_INPLACE_MATRIX) {
581:       Mat T;

583:       MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);
584:       if (sameint) { /* ownership of CSR pointers is transferred to PETSc */
585:         hypre_CSRMatrixI(hdiag) = NULL;
586:         hypre_CSRMatrixJ(hdiag) = NULL;
587:         hypre_CSRMatrixI(hoffd) = NULL;
588:         hypre_CSRMatrixJ(hoffd) = NULL;
589:       } else { /* Hack MPIAIJ -> free ij but not a */
590:         Mat_MPIAIJ *b = (Mat_MPIAIJ*)(T->data);
591:         Mat_SeqAIJ *d = (Mat_SeqAIJ*)(b->A->data);
592:         Mat_SeqAIJ *o = (Mat_SeqAIJ*)(b->B->data);

594:         d->free_ij = PETSC_TRUE;
595:         o->free_ij = PETSC_TRUE;
596:       }
597:       hypre_CSRMatrixData(hdiag) = NULL;
598:       hypre_CSRMatrixData(hoffd) = NULL;
599:       MatHeaderReplace(A,&T);
600:     }
601:   } else {
602:     oii  = NULL;
603:     ojj  = NULL;
604:     oa   = NULL;
605:     if (reuse == MAT_INITIAL_MATRIX) {
606:       Mat_SeqAIJ* b;

608:       MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);
609:       /* hack SeqAIJ */
610:       b          = (Mat_SeqAIJ*)((*B)->data);
611:       b->free_a  = PETSC_TRUE;
612:       b->free_ij = PETSC_TRUE;
613:     } else if (reuse == MAT_INPLACE_MATRIX) {
614:       Mat T;

616:       MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);
617:       if (sameint) { /* ownership of CSR pointers is transferred to PETSc */
618:         hypre_CSRMatrixI(hdiag) = NULL;
619:         hypre_CSRMatrixJ(hdiag) = NULL;
620:       } else { /* free ij but not a */
621:         Mat_SeqAIJ* b = (Mat_SeqAIJ*)(T->data);

623:         b->free_ij = PETSC_TRUE;
624:       }
625:       hypre_CSRMatrixData(hdiag) = NULL;
626:       MatHeaderReplace(A,&T);
627:     }
628:   }

630:   /* we have to use hypre_Tfree to free the HYPRE arrays
631:      that PETSc now onws */
632:   if (reuse == MAT_INPLACE_MATRIX) {
633:     PetscInt nh;
634:     void *ptrs[6] = {da,oa,dii,djj,oii,ojj};
635:     const char *names[6] = {"_hypre_csr_da",
636:                             "_hypre_csr_oa",
637:                             "_hypre_csr_dii",
638:                             "_hypre_csr_djj",
639:                             "_hypre_csr_oii",
640:                             "_hypre_csr_ojj"};
641:     nh = sameint ? 6 : 2;
642:     for (i=0; i<nh; i++) {
643:       PetscContainer c;

645:       PetscContainerCreate(comm,&c);
646:       PetscContainerSetPointer(c,ptrs[i]);
647:       PetscContainerSetUserDestroy(c,hypre_array_destroy);
648:       PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);
649:       PetscContainerDestroy(&c);
650:     }
651:   }
652:   return(0);
653: }

655: static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
656: {
657:   hypre_ParCSRMatrix *tA;
658:   hypre_CSRMatrix    *hdiag,*hoffd;
659:   Mat_SeqAIJ         *diag,*offd;
660:   PetscInt           *garray,i,noffd,dnnz,onnz,*row_starts,*col_starts;
661:   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
662:   PetscBool          ismpiaij,isseqaij;
663:   PetscBool          sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
664:   PetscErrorCode     ierr;
665:   HYPRE_Int          *hdi = NULL,*hdj = NULL,*hoi = NULL,*hoj = NULL;
666:   PetscInt           *pdi,*pdj,*poi,*poj;
667: #if defined(PETSC_HAVE_HYPRE_DEVICE)
668:   PetscBool          iscuda = PETSC_FALSE;
669: #endif

672:   PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);
673:   PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);
674:   if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type_name);
675:   if (ismpiaij) {
676:     Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data);

678:     diag = (Mat_SeqAIJ*)a->A->data;
679:     offd = (Mat_SeqAIJ*)a->B->data;
680: #if defined(PETSC_HAVE_CUDA) && defined(PETSC_HAVE_HYPRE_DEVICE) && defined(HYPRE_USING_CUDA)
681:     PetscObjectTypeCompare((PetscObject)A,MATMPIAIJCUSPARSE,&iscuda);
682:     if (iscuda && !A->boundtocpu) {
683:       sameint = PETSC_TRUE;
684:       MatSeqAIJCUSPARSEGetIJ(a->A,PETSC_FALSE,(const HYPRE_Int**)&hdi,(const HYPRE_Int**)&hdj);
685:       MatSeqAIJCUSPARSEGetIJ(a->B,PETSC_FALSE,(const HYPRE_Int**)&hoi,(const HYPRE_Int**)&hoj);
686:     } else {
687: #else
688:     {
689: #endif
690:       pdi = diag->i;
691:       pdj = diag->j;
692:       poi = offd->i;
693:       poj = offd->j;
694:       if (sameint) {
695:         hdi = (HYPRE_Int*)pdi;
696:         hdj = (HYPRE_Int*)pdj;
697:         hoi = (HYPRE_Int*)poi;
698:         hoj = (HYPRE_Int*)poj;
699:       }
700:     }
701:     garray = a->garray;
702:     noffd  = a->B->cmap->N;
703:     dnnz   = diag->nz;
704:     onnz   = offd->nz;
705:   } else {
706:     diag = (Mat_SeqAIJ*)A->data;
707:     offd = NULL;
708: #if defined(PETSC_HAVE_CUDA) && defined(PETSC_HAVE_HYPRE_DEVICE)
709:     PetscObjectTypeCompare((PetscObject)A,MATSEQAIJCUSPARSE,&iscuda);
710:     if (iscuda && !A->boundtocpu) {
711:       sameint = PETSC_TRUE;
712:       MatSeqAIJCUSPARSEGetIJ(A,PETSC_FALSE,(const HYPRE_Int**)&hdi,(const HYPRE_Int**)&hdj);
713:     } else {
714: #else
715:     {
716: #endif
717:       pdi = diag->i;
718:       pdj = diag->j;
719:       if (sameint) {
720:         hdi = (HYPRE_Int*)pdi;
721:         hdj = (HYPRE_Int*)pdj;
722:       }
723:     }
724:     garray = NULL;
725:     noffd  = 0;
726:     dnnz   = diag->nz;
727:     onnz   = 0;
728:   }

730:   /* create a temporary ParCSR */
731:   if (HYPRE_AssumedPartitionCheck()) {
732:     PetscMPIInt myid;

734:     MPI_Comm_rank(comm,&myid);
735:     row_starts = A->rmap->range + myid;
736:     col_starts = A->cmap->range + myid;
737:   } else {
738:     row_starts = A->rmap->range;
739:     col_starts = A->cmap->range;
740:   }
741:   tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_BigInt*)row_starts,(HYPRE_BigInt*)col_starts,noffd,dnnz,onnz);
742: #if defined(hypre_ParCSRMatrixOwnsRowStarts)
743:   hypre_ParCSRMatrixSetRowStartsOwner(tA,0);
744:   hypre_ParCSRMatrixSetColStartsOwner(tA,0);
745: #endif

747:   /* set diagonal part */
748:   hdiag = hypre_ParCSRMatrixDiag(tA);
749:   if (!sameint) { /* malloc CSR pointers */
750:     PetscMalloc2(A->rmap->n+1,&hdi,dnnz,&hdj);
751:     for (i = 0; i < A->rmap->n+1; i++) hdi[i] = (HYPRE_Int)(pdi[i]);
752:     for (i = 0; i < dnnz; i++)         hdj[i] = (HYPRE_Int)(pdj[i]);
753:   }
754:   hypre_CSRMatrixI(hdiag)           = hdi;
755:   hypre_CSRMatrixJ(hdiag)           = hdj;
756:   hypre_CSRMatrixData(hdiag)        = (HYPRE_Complex*)diag->a;
757:   hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz;
758:   hypre_CSRMatrixSetRownnz(hdiag);
759:   hypre_CSRMatrixSetDataOwner(hdiag,0);

761:   /* set offdiagonal part */
762:   hoffd = hypre_ParCSRMatrixOffd(tA);
763:   if (offd) {
764:     if (!sameint) { /* malloc CSR pointers */
765:       PetscMalloc2(A->rmap->n+1,&hoi,onnz,&hoj);
766:       for (i = 0; i < A->rmap->n+1; i++) hoi[i] = (HYPRE_Int)(poi[i]);
767:       for (i = 0; i < onnz; i++)         hoj[i] = (HYPRE_Int)(poj[i]);
768:     }
769:     hypre_CSRMatrixI(hoffd)           = hoi;
770:     hypre_CSRMatrixJ(hoffd)           = hoj;
771:     hypre_CSRMatrixData(hoffd)        = (HYPRE_Complex*)offd->a;
772:     hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz;
773:     hypre_CSRMatrixSetRownnz(hoffd);
774:     hypre_CSRMatrixSetDataOwner(hoffd,0);
775:   }
776: #if defined(PETSC_HAVE_HYPRE_DEVICE)
777:   PetscStackCallStandard(hypre_ParCSRMatrixInitialize_v2,(tA,iscuda ? HYPRE_MEMORY_DEVICE : HYPRE_MEMORY_HOST));
778: #else
779: #if PETSC_PKG_HYPRE_VERSION_LT(2,18,0)
780:   PetscStackCallStandard(hypre_ParCSRMatrixInitialize,(tA));
781: #else
782:   PetscStackCallStandard(hypre_ParCSRMatrixInitialize_v2,(tA,HYPRE_MEMORY_HOST));
783: #endif
784: #endif
785:   hypre_TFree(hypre_ParCSRMatrixColMapOffd(tA),HYPRE_MEMORY_HOST);
786:   hypre_ParCSRMatrixSetNumNonzeros(tA);
787:   hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_BigInt*)garray;
788:   if (!hypre_ParCSRMatrixCommPkg(tA)) PetscStackCallStandard(hypre_MatvecCommPkgCreate,(tA));
789:   *hA = tA;
790:   return(0);
791: }

793: static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
794: {
795:   hypre_CSRMatrix *hdiag,*hoffd;
796:   PetscBool       ismpiaij,sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
797:   PetscErrorCode  ierr;
798: #if defined(PETSC_HAVE_HYPRE_DEVICE)
799:   PetscBool       iscuda = PETSC_FALSE;
800: #endif

803:   PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);
804: #if defined(PETSC_HAVE_HYPRE_DEVICE)
805:   PetscObjectTypeCompareAny((PetscObject)A,&iscuda,MATSEQAIJCUSPARSE,MATMPIAIJCUSPARSE,"");
806:   if (iscuda) sameint = PETSC_TRUE;
807: #endif
808:   hdiag = hypre_ParCSRMatrixDiag(*hA);
809:   hoffd = hypre_ParCSRMatrixOffd(*hA);
810:   /* free temporary memory allocated by PETSc
811:      set pointers to NULL before destroying tA */
812:   if (!sameint) {
813:     HYPRE_Int *hi,*hj;

815:     hi = hypre_CSRMatrixI(hdiag);
816:     hj = hypre_CSRMatrixJ(hdiag);
817:     PetscFree2(hi,hj);
818:     if (ismpiaij) {
819:       hi = hypre_CSRMatrixI(hoffd);
820:       hj = hypre_CSRMatrixJ(hoffd);
821:       PetscFree2(hi,hj);
822:     }
823:   }
824:   hypre_CSRMatrixI(hdiag)    = NULL;
825:   hypre_CSRMatrixJ(hdiag)    = NULL;
826:   hypre_CSRMatrixData(hdiag) = NULL;
827:   if (ismpiaij) {
828:     hypre_CSRMatrixI(hoffd)    = NULL;
829:     hypre_CSRMatrixJ(hoffd)    = NULL;
830:     hypre_CSRMatrixData(hoffd) = NULL;
831:   }
832:   hypre_ParCSRMatrixColMapOffd(*hA) = NULL;
833:   hypre_ParCSRMatrixDestroy(*hA);
834:   *hA = NULL;
835:   return(0);
836: }

838: /* calls RAP from BoomerAMG:
839:    the resulting ParCSR will not own the column and row starts
840:    It looks like we don't need to have the diagonal entries ordered first */
841: static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA,hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP)
842: {
843: #if defined(hypre_ParCSRMatrixOwnsRowStarts)
844:   HYPRE_Int P_owns_col_starts,R_owns_row_starts;
845: #endif

848: #if defined(hypre_ParCSRMatrixOwnsRowStarts)
849:   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP);
850:   R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR);
851: #endif
852:   /* can be replaced by version test later */
853: #if defined(PETSC_HAVE_HYPRE_DEVICE)
854:   PetscStackPush("hypre_ParCSRMatrixRAP");
855:   *hRAP = hypre_ParCSRMatrixRAP(hR,hA,hP);
856:   PetscStackPop;
857: #else
858:   PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP));
859:   PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP));
860: #endif
861:   /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */
862: #if defined(hypre_ParCSRMatrixOwnsRowStarts)
863:   hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0);
864:   hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0);
865:   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1);
866:   if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1);
867: #endif
868:   return(0);
869: }

871: static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C)
872: {
873:   Mat                B;
874:   hypre_ParCSRMatrix *hA,*hP,*hPtAP = NULL;
875:   PetscErrorCode     ierr;
876:   Mat_Product        *product=C->product;

879:   MatAIJGetParCSR_Private(A,&hA);
880:   MatAIJGetParCSR_Private(P,&hP);
881:   MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);
882:   MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);

884:   MatHeaderMerge(C,&B);
885:   C->product = product;

887:   MatAIJRestoreParCSR_Private(A,&hA);
888:   MatAIJRestoreParCSR_Private(P,&hP);
889:   return(0);
890: }

892: PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat C)
893: {

897:   MatSetType(C,MATAIJ);
898:   C->ops->ptapnumeric    = MatPtAPNumeric_AIJ_AIJ_wHYPRE;
899:   C->ops->productnumeric = MatProductNumeric_PtAP;
900:   return(0);
901: }

903: static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C)
904: {
905:   Mat                B;
906:   Mat_HYPRE          *hP;
907:   hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr = NULL;
908:   HYPRE_Int          type;
909:   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
910:   PetscBool          ishypre;
911:   PetscErrorCode     ierr;

914:   PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);
915:   if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
916:   hP = (Mat_HYPRE*)P->data;
917:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
918:   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
919:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));

921:   MatAIJGetParCSR_Private(A,&hA);
922:   MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);
923:   MatAIJRestoreParCSR_Private(A,&hA);

925:   /* create temporary matrix and merge to C */
926:   MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);
927:   MatHeaderMerge(C,&B);
928:   return(0);
929: }

931: static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C)
932: {
933:   Mat                B;
934:   hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr = NULL;
935:   Mat_HYPRE          *hA,*hP;
936:   PetscBool          ishypre;
937:   HYPRE_Int          type;
938:   PetscErrorCode     ierr;

941:   PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);
942:   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
943:   PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);
944:   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
945:   hA = (Mat_HYPRE*)A->data;
946:   hP = (Mat_HYPRE*)P->data;
947:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
948:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
949:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
950:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
951:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
952:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
953:   MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);
954:   MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);
955:   MatHeaderMerge(C,&B);
956:   return(0);
957: }

959: /* calls hypre_ParMatmul
960:    hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA
961:    hypre_ParMatrixCreate does not duplicate the communicator
962:    It looks like we don't need to have the diagonal entries ordered first */
963: static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB)
964: {
966:   /* can be replaced by version test later */
967: #if defined(PETSC_HAVE_HYPRE_DEVICE)
968:   PetscStackPush("hypre_ParCSRMatMat");
969:   *hAB = hypre_ParCSRMatMat(hA,hB);
970: #else
971:   PetscStackPush("hypre_ParMatmul");
972:   *hAB = hypre_ParMatmul(hA,hB);
973: #endif
974:   PetscStackPop;
975:   return(0);
976: }

978: static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C)
979: {
980:   Mat                D;
981:   hypre_ParCSRMatrix *hA,*hB,*hAB = NULL;
982:   PetscErrorCode     ierr;
983:   Mat_Product        *product=C->product;

986:   MatAIJGetParCSR_Private(A,&hA);
987:   MatAIJGetParCSR_Private(B,&hB);
988:   MatHYPRE_ParCSR_MatMatMult(hA,hB,&hAB);
989:   MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);

991:   MatHeaderMerge(C,&D);
992:   C->product = product;

994:   MatAIJRestoreParCSR_Private(A,&hA);
995:   MatAIJRestoreParCSR_Private(B,&hB);
996:   return(0);
997: }

999: PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat C)
1000: {

1004:   MatSetType(C,MATAIJ);
1005:   C->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE;
1006:   C->ops->productnumeric = MatProductNumeric_AB;
1007:   return(0);
1008: }

1010: static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A,Mat B,Mat C)
1011: {
1012:   Mat                D;
1013:   hypre_ParCSRMatrix *Aparcsr,*Bparcsr,*ABparcsr = NULL;
1014:   Mat_HYPRE          *hA,*hB;
1015:   PetscBool          ishypre;
1016:   HYPRE_Int          type;
1017:   PetscErrorCode     ierr;
1018:   Mat_Product        *product;

1021:   PetscObjectTypeCompare((PetscObject)B,MATHYPRE,&ishypre);
1022:   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_USER,"B should be of type %s",MATHYPRE);
1023:   PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);
1024:   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
1025:   hA = (Mat_HYPRE*)A->data;
1026:   hB = (Mat_HYPRE*)B->data;
1027:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
1028:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
1029:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hB->ij,&type));
1030:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
1031:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
1032:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hB->ij,(void**)&Bparcsr));
1033:   MatHYPRE_ParCSR_MatMatMult(Aparcsr,Bparcsr,&ABparcsr);
1034:   MatCreateFromParCSR(ABparcsr,MATHYPRE,PETSC_OWN_POINTER,&D);

1036:   /* need to use HeaderReplace because HeaderMerge messes up with the communicator */
1037:   product    = C->product;  /* save it from MatHeaderReplace() */
1038:   C->product = NULL;
1039:   MatHeaderReplace(C,&D);
1040:   C->product = product;
1041:   C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
1042:   C->ops->productnumeric = MatProductNumeric_AB;
1043:   return(0);
1044: }

1046: PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D)
1047: {
1048:   Mat                E;
1049:   hypre_ParCSRMatrix *hA,*hB,*hC,*hABC = NULL;
1050:   PetscErrorCode     ierr;

1053:   MatAIJGetParCSR_Private(A,&hA);
1054:   MatAIJGetParCSR_Private(B,&hB);
1055:   MatAIJGetParCSR_Private(C,&hC);
1056:   MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC);
1057:   MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E);
1058:   MatHeaderMerge(D,&E);
1059:   MatAIJRestoreParCSR_Private(A,&hA);
1060:   MatAIJRestoreParCSR_Private(B,&hB);
1061:   MatAIJRestoreParCSR_Private(C,&hC);
1062:   return(0);
1063: }

1065: PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat D)
1066: {

1070:   MatSetType(D,MATAIJ);
1071:   return(0);
1072: }

1074: /* ---------------------------------------------------- */
1075: static PetscErrorCode MatProductSymbolic_AB_HYPRE(Mat C)
1076: {
1078:   C->ops->productnumeric = MatProductNumeric_AB;
1079:   return(0);
1080: }

1082: static PetscErrorCode MatProductSetFromOptions_HYPRE_AB(Mat C)
1083: {
1085:   Mat_Product    *product = C->product;
1086:   PetscBool      Ahypre;

1089:   PetscObjectTypeCompare((PetscObject)product->A,MATHYPRE,&Ahypre);
1090:   if (Ahypre) { /* A is a Hypre matrix */
1091:     MatSetType(C,MATHYPRE);
1092:     C->ops->productsymbolic = MatProductSymbolic_AB_HYPRE;
1093:     C->ops->matmultnumeric  = MatMatMultNumeric_HYPRE_HYPRE;
1094:     return(0);
1095:   }
1096:   return(0);
1097: }

1099: static PetscErrorCode MatProductSymbolic_PtAP_HYPRE(Mat C)
1100: {
1102:   C->ops->productnumeric = MatProductNumeric_PtAP;
1103:   return(0);
1104: }

1106: static PetscErrorCode MatProductSetFromOptions_HYPRE_PtAP(Mat C)
1107: {
1109:   Mat_Product    *product = C->product;
1110:   PetscBool      flg;
1111:   PetscInt       type = 0;
1112:   const char     *outTypes[4] = {"aij","seqaij","mpiaij","hypre"};
1113:   PetscInt       ntype = 4;
1114:   Mat            A = product->A;
1115:   PetscBool      Ahypre;

1118:   PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&Ahypre);
1119:   if (Ahypre) { /* A is a Hypre matrix */
1120:     MatSetType(C,MATHYPRE);
1121:     C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE;
1122:     C->ops->ptapnumeric     = MatPtAPNumeric_HYPRE_HYPRE;
1123:     return(0);
1124:   }

1126:   /* A is AIJ, P is Hypre, C = PtAP can be either AIJ or Hypre format */
1127:   /* Get runtime option */
1128:   if (product->api_user) {
1129:     PetscOptionsBegin(PetscObjectComm((PetscObject)C),((PetscObject)C)->prefix,"MatPtAP_HYPRE","Mat");
1130:     PetscOptionsEList("-matptap_hypre_outtype","MatPtAP outtype","MatPtAP outtype",outTypes,ntype,outTypes[type],&type,&flg);
1131:     PetscOptionsEnd();
1132:   } else {
1133:     PetscOptionsBegin(PetscObjectComm((PetscObject)C),((PetscObject)C)->prefix,"MatProduct_PtAP_HYPRE","Mat");
1134:     PetscOptionsEList("-matproduct_ptap_hypre_outtype","MatProduct_PtAP outtype","MatProduct_PtAP",outTypes,ntype,outTypes[type],&type,&flg);
1135:     PetscOptionsEnd();
1136:   }

1138:   if (type == 0 || type == 1 || type == 2) {
1139:     MatSetType(C,MATAIJ);
1140:   } else if (type == 3) {
1141:     MatSetType(C,MATHYPRE);
1142:   } else SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_SUP,"MatPtAP outtype is not supported");
1143:   C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE;
1144:   C->ops->ptapnumeric     = MatPtAPNumeric_AIJ_HYPRE;
1145:   return(0);
1146: }

1148: static PetscErrorCode MatProductSetFromOptions_HYPRE(Mat C)
1149: {
1151:   Mat_Product    *product = C->product;

1154:   switch (product->type) {
1155:   case MATPRODUCT_AB:
1156:     MatProductSetFromOptions_HYPRE_AB(C);
1157:     break;
1158:   case MATPRODUCT_PtAP:
1159:     MatProductSetFromOptions_HYPRE_PtAP(C);
1160:     break;
1161:   default:
1162:     break;
1163:   }
1164:   return(0);
1165: }

1167: /* -------------------------------------------------------- */

1169: static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
1170: {

1174:   MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_TRUE);
1175:   return(0);
1176: }

1178: static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
1179: {

1183:   MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_FALSE);
1184:   return(0);
1185: }

1187: static PetscErrorCode MatMultAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1188: {

1192:   if (y != z) {
1193:     VecCopy(y,z);
1194:   }
1195:   MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_FALSE);
1196:   return(0);
1197: }

1199: static PetscErrorCode MatMultTransposeAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1200: {

1204:   if (y != z) {
1205:     VecCopy(y,z);
1206:   }
1207:   MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_TRUE);
1208:   return(0);
1209: }

1211: /* y = a * A * x + b * y or y = a * A^t * x + b * y depending on trans */
1212: static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, HYPRE_Complex a, Vec x, HYPRE_Complex b, Vec y, PetscBool trans)
1213: {
1214:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1215:   hypre_ParCSRMatrix *parcsr;
1216:   hypre_ParVector    *hx,*hy;
1217:   PetscErrorCode     ierr;

1220:   if (trans) {
1221:     VecHYPRE_IJVectorPushVecRead(hA->b,x);
1222:     if (b != 0.0) { VecHYPRE_IJVectorPushVec(hA->x,y); }
1223:     else { VecHYPRE_IJVectorPushVecWrite(hA->x,y); }
1224:     PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b->ij,(void**)&hx));
1225:     PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x->ij,(void**)&hy));
1226:   } else {
1227:     VecHYPRE_IJVectorPushVecRead(hA->x,x);
1228:     if (b != 0.0) { VecHYPRE_IJVectorPushVec(hA->b,y); }
1229:     else { VecHYPRE_IJVectorPushVecWrite(hA->b,y); }
1230:     PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x->ij,(void**)&hx));
1231:     PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b->ij,(void**)&hy));
1232:   }
1233:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
1234:   if (trans) {
1235:     PetscStackCallStandard(hypre_ParCSRMatrixMatvecT,(a,parcsr,hx,b,hy));
1236:   } else {
1237:     PetscStackCallStandard(hypre_ParCSRMatrixMatvec,(a,parcsr,hx,b,hy));
1238:   }
1239:   VecHYPRE_IJVectorPopVec(hA->x);
1240:   VecHYPRE_IJVectorPopVec(hA->b);
1241:   return(0);
1242: }

1244: static PetscErrorCode MatDestroy_HYPRE(Mat A)
1245: {
1246:   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;

1250:   VecHYPRE_IJVectorDestroy(&hA->x);
1251:   VecHYPRE_IJVectorDestroy(&hA->b);
1252:   if (hA->ij) {
1253:     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
1254:     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij));
1255:   }
1256:   if (hA->comm) {MPI_Comm_free(&hA->comm);}

1258:   MatStashDestroy_Private(&A->stash);

1260:   PetscFree(hA->array);

1262:   PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);
1263:   PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);
1264:   PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_seqaij_hypre_C",NULL);
1265:   PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_mpiaij_hypre_C",NULL);
1266:   PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);
1267:   PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);
1268:   PetscFree(A->data);
1269:   return(0);
1270: }

1272: static PetscErrorCode MatSetUp_HYPRE(Mat A)
1273: {

1277:   MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);
1278:   return(0);
1279: }

1281: //TODO FIX hypre_CSRMatrixMatvecOutOfPlace
1282: #if defined(PETSC_HAVE_HYPRE_DEVICE)
1283: static PetscErrorCode MatBindToCPU_HYPRE(Mat A, PetscBool bind)
1284: {
1285:   Mat_HYPRE            *hA = (Mat_HYPRE*)A->data;
1286:   HYPRE_MemoryLocation hmem = bind ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE;
1287:   PetscErrorCode       ierr;

1290:   A->boundtocpu = bind;
1291:   if (hypre_IJMatrixAssembleFlag(hA->ij) && hmem != hypre_IJMatrixMemoryLocation(hA->ij)) {
1292:     hypre_ParCSRMatrix *parcsr;
1293:     PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
1294:     PetscStackCallStandard(hypre_ParCSRMatrixMigrate,(parcsr, hmem));
1295:   }
1296:   if (hA->x) { VecHYPRE_IJBindToCPU(hA->x,bind); }
1297:   if (hA->b) { VecHYPRE_IJBindToCPU(hA->b,bind); }
1298:   return(0);
1299: }
1300: #endif

1302: static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
1303: {
1304:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1305:   PetscMPIInt        n;
1306:   PetscInt           i,j,rstart,ncols,flg;
1307:   PetscInt           *row,*col;
1308:   PetscScalar        *val;
1309:   PetscErrorCode     ierr;

1312:   if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE");

1314:   if (!A->nooffprocentries) {
1315:     while (1) {
1316:       MatStashScatterGetMesg_Private(&A->stash,&n,&row,&col,&val,&flg);
1317:       if (!flg) break;

1319:       for (i=0; i<n;) {
1320:         /* Now identify the consecutive vals belonging to the same row */
1321:         for (j=i,rstart=row[j]; j<n; j++) {
1322:           if (row[j] != rstart) break;
1323:         }
1324:         if (j < n) ncols = j-i;
1325:         else       ncols = n-i;
1326:         /* Now assemble all these values with a single function call */
1327:         MatSetValues_HYPRE(A,1,row+i,ncols,col+i,val+i,A->insertmode);

1329:         i = j;
1330:       }
1331:     }
1332:     MatStashScatterEnd_Private(&A->stash);
1333:   }

1335:   PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij));
1336:   /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitialize */
1337:   /* If the option MAT_SORTED_FULL is set to true, the indices and values can be passed to hypre directly, so we don't need the aux_matrix */
1338:   if (!hA->sorted_full) {
1339:     hypre_AuxParCSRMatrix *aux_matrix;

1341:     /* call destroy just to make sure we do not leak anything */
1342:     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1343:     PetscStackCallStandard(hypre_AuxParCSRMatrixDestroy,(aux_matrix));
1344:     hypre_IJMatrixTranslator(hA->ij) = NULL;

1346:     /* Initialize with assembled flag -> it only recreates the aux_par_matrix */
1347:     PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1348:     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1349:     if (aux_matrix) {
1350:       hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */
1351: #if PETSC_PKG_HYPRE_VERSION_LT(2,19,0)
1352:       PetscStackCallStandard(hypre_AuxParCSRMatrixInitialize,(aux_matrix));
1353: #else
1354:       PetscStackCallStandard(hypre_AuxParCSRMatrixInitialize_v2,(aux_matrix,HYPRE_MEMORY_HOST));
1355: #endif
1356:     }
1357:   }
1358:   {
1359:     hypre_ParCSRMatrix *parcsr;

1361:     PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
1362:     if (!hypre_ParCSRMatrixCommPkg(parcsr)) PetscStackCallStandard(hypre_MatvecCommPkgCreate,(parcsr));
1363:   }
1364:   if (!hA->x) { VecHYPRE_IJVectorCreate(A->cmap,&hA->x); }
1365:   if (!hA->b) { VecHYPRE_IJVectorCreate(A->rmap,&hA->b); }
1366: #if defined(PETSC_HAVE_HYPRE_DEVICE)
1367:   MatBindToCPU_HYPRE(A,A->boundtocpu);
1368: #endif
1369:   return(0);
1370: }

1372: static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array)
1373: {
1374:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1375:   PetscErrorCode     ierr;

1378:   if (!hA->available) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Temporary space is in use");

1380:   if (hA->size >= size) {
1381:     *array = hA->array;
1382:   } else {
1383:     PetscFree(hA->array);
1384:     hA->size = size;
1385:     PetscMalloc(hA->size,&hA->array);
1386:     *array = hA->array;
1387:   }

1389:   hA->available = PETSC_FALSE;
1390:   return(0);
1391: }

1393: static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array)
1394: {
1395:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;

1398:   *array = NULL;
1399:   hA->available = PETSC_TRUE;
1400:   return(0);
1401: }

1403: static PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins)
1404: {
1405:   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
1406:   PetscScalar    *vals = (PetscScalar *)v;
1407:   HYPRE_Complex  *sscr;
1408:   PetscInt       *cscr[2];
1409:   PetscInt       i,nzc;
1410:   void           *array = NULL;

1414:   MatGetArray_HYPRE(A,sizeof(PetscInt)*(2*nc)+sizeof(HYPRE_Complex)*nc*nr,&array);
1415:   cscr[0] = (PetscInt*)array;
1416:   cscr[1] = ((PetscInt*)array)+nc;
1417:   sscr = (HYPRE_Complex*)(((PetscInt*)array)+nc*2);
1418:   for (i=0,nzc=0;i<nc;i++) {
1419:     if (cols[i] >= 0) {
1420:       cscr[0][nzc  ] = cols[i];
1421:       cscr[1][nzc++] = i;
1422:     }
1423:   }
1424:   if (!nzc) {
1425:     MatRestoreArray_HYPRE(A,&array);
1426:     return(0);
1427:   }

1429: #if 0 //defined(PETSC_HAVE_HYPRE_DEVICE)
1430:   if (HYPRE_MEMORY_HOST != hypre_IJMatrixMemoryLocation(hA->ij)) {
1431:     hypre_ParCSRMatrix *parcsr;

1433:     PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
1434:     PetscStackCallStandard(hypre_ParCSRMatrixMigrate,(parcsr, HYPRE_MEMORY_HOST));
1435:   }
1436: #endif

1438:   if (ins == ADD_VALUES) {
1439:     for (i=0;i<nr;i++) {
1440:       if (rows[i] >= 0) {
1441:         PetscInt  j;
1442:         HYPRE_Int hnc = (HYPRE_Int)nzc;

1444:         if ((PetscInt)hnc != nzc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",nzc,rows[i]);
1445:         for (j=0;j<nzc;j++) { PetscHYPREScalarCast(vals[cscr[1][j]],&sscr[j]); }
1446:         PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr));
1447:       }
1448:       vals += nc;
1449:     }
1450:   } else { /* INSERT_VALUES */
1451:     PetscInt rst,ren;

1453:     MatGetOwnershipRange(A,&rst,&ren);
1454:     for (i=0;i<nr;i++) {
1455:       if (rows[i] >= 0) {
1456:         PetscInt  j;
1457:         HYPRE_Int hnc = (HYPRE_Int)nzc;

1459:         if ((PetscInt)hnc != nzc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",nzc,rows[i]);
1460:         for (j=0;j<nzc;j++) { PetscHYPREScalarCast(vals[cscr[1][j]],&sscr[j]); }
1461:         /* nonlocal values */
1462:         if (rows[i] < rst || rows[i] >= ren) { MatStashValuesRow_Private(&A->stash,rows[i],nzc,cscr[0],(PetscScalar*)sscr,PETSC_FALSE); }
1463:         /* local values */
1464:         else PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr));
1465:       }
1466:       vals += nc;
1467:     }
1468:   }

1470:   MatRestoreArray_HYPRE(A,&array);
1471:   return(0);
1472: }

1474: static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1475: {
1476:   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;
1477:   HYPRE_Int      *hdnnz,*honnz;
1478:   PetscInt       i,rs,re,cs,ce,bs;
1479:   PetscMPIInt    size;

1483:   MatGetBlockSize(A,&bs);
1484:   PetscLayoutSetUp(A->rmap);
1485:   PetscLayoutSetUp(A->cmap);
1486:   rs   = A->rmap->rstart;
1487:   re   = A->rmap->rend;
1488:   cs   = A->cmap->rstart;
1489:   ce   = A->cmap->rend;
1490:   if (!hA->ij) {
1491:     PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij));
1492:     PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1493:   } else {
1494:     HYPRE_BigInt hrs,hre,hcs,hce;
1495:     PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce));
1496:     if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%D)",hrs,hre+1,rs,re);
1497:     if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%D)",hcs,hce+1,cs,ce);
1498:   }
1499:   if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs;
1500:   if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs;

1502:   if (!dnnz) {
1503:     PetscMalloc1(A->rmap->n,&hdnnz);
1504:     for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz;
1505:   } else {
1506:     hdnnz = (HYPRE_Int*)dnnz;
1507:   }
1508:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
1509:   if (size > 1) {
1510:     hypre_AuxParCSRMatrix *aux_matrix;
1511:     if (!onnz) {
1512:       PetscMalloc1(A->rmap->n,&honnz);
1513:       for (i=0;i<A->rmap->n;i++) honnz[i] = onz;
1514:     } else honnz = (HYPRE_Int*)onnz;
1515:     /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems
1516:        they assume the user will input the entire row values, properly sorted
1517:        In PETSc, we don't make such an assumption and set this flag to 1,
1518:        unless the option MAT_SORTED_FULL is set to true.
1519:        Also, to avoid possible memory leaks, we destroy and recreate the translator
1520:        This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize
1521:        the IJ matrix for us */
1522:     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1523:     hypre_AuxParCSRMatrixDestroy(aux_matrix);
1524:     hypre_IJMatrixTranslator(hA->ij) = NULL;
1525:     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz));
1526:     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1527:     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = !hA->sorted_full;
1528:   } else {
1529:     honnz = NULL;
1530:     PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz));
1531:   }

1533:   /* reset assembled flag and call the initialize method */
1534:   hypre_IJMatrixAssembleFlag(hA->ij) = 0;
1535: #if PETSC_PKG_HYPRE_VERSION_LT(2,19,0)
1536:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1537: #else
1538:   PetscStackCallStandard(HYPRE_IJMatrixInitialize_v2,(hA->ij,HYPRE_MEMORY_HOST));
1539: #endif
1540:   if (!dnnz) {
1541:     PetscFree(hdnnz);
1542:   }
1543:   if (!onnz && honnz) {
1544:     PetscFree(honnz);
1545:   }
1546:   /* Match AIJ logic */
1547:   A->preallocated = PETSC_TRUE;
1548:   A->assembled    = PETSC_FALSE;
1549:   return(0);
1550: }

1552: /*@C
1553:    MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format

1555:    Collective on Mat

1557:    Input Parameters:
1558: +  A - the matrix
1559: .  dnz  - number of nonzeros per row in DIAGONAL portion of local submatrix
1560:           (same value is used for all local rows)
1561: .  dnnz - array containing the number of nonzeros in the various rows of the
1562:           DIAGONAL portion of the local submatrix (possibly different for each row)
1563:           or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure.
1564:           The size of this array is equal to the number of local rows, i.e 'm'.
1565:           For matrices that will be factored, you must leave room for (and set)
1566:           the diagonal entry even if it is zero.
1567: .  onz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
1568:           submatrix (same value is used for all local rows).
1569: -  onnz - array containing the number of nonzeros in the various rows of the
1570:           OFF-DIAGONAL portion of the local submatrix (possibly different for
1571:           each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero
1572:           structure. The size of this array is equal to the number
1573:           of local rows, i.e 'm'.

1575:    Notes:
1576:     If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored.

1578:    Level: intermediate

1580: .seealso: MatCreate(), MatMPIAIJSetPreallocation(), MATHYPRE
1581: @*/
1582: PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1583: {

1589:   PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));
1590:   return(0);
1591: }

1593: /*
1594:    MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix

1596:    Collective

1598:    Input Parameters:
1599: +  parcsr   - the pointer to the hypre_ParCSRMatrix
1600: .  mtype    - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported.
1601: -  copymode - PETSc copying options

1603:    Output Parameter:
1604: .  A  - the matrix

1606:    Level: intermediate

1608: .seealso: MatHYPRE, PetscCopyMode
1609: */
1610: PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat* A)
1611: {
1612:   Mat            T;
1613:   Mat_HYPRE      *hA;
1614:   MPI_Comm       comm;
1615:   PetscInt       rstart,rend,cstart,cend,M,N;
1616:   PetscBool      isseqaij,isseqaijmkl,ismpiaij,isaij,ishyp,isis;

1620:   comm  = hypre_ParCSRMatrixComm(parcsr);
1621:   PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);
1622:   PetscStrcmp(mtype,MATSEQAIJMKL,&isseqaijmkl);
1623:   PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);
1624:   PetscStrcmp(mtype,MATAIJ,&isaij);
1625:   PetscStrcmp(mtype,MATHYPRE,&ishyp);
1626:   PetscStrcmp(mtype,MATIS,&isis);
1627:   isaij = (PetscBool)(isseqaij || isseqaijmkl || ismpiaij || isaij);
1628:   /* TODO */
1629:   if (!isaij && !ishyp && !isis) SETERRQ7(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATSEQAIJMKL,MATMPIAIJ,MATIS,MATHYPRE);
1630:   /* access ParCSRMatrix */
1631:   rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
1632:   rend   = hypre_ParCSRMatrixLastRowIndex(parcsr);
1633:   cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
1634:   cend   = hypre_ParCSRMatrixLastColDiag(parcsr);
1635:   M      = hypre_ParCSRMatrixGlobalNumRows(parcsr);
1636:   N      = hypre_ParCSRMatrixGlobalNumCols(parcsr);

1638:   /* fix for empty local rows/columns */
1639:   if (rend < rstart) rend = rstart;
1640:   if (cend < cstart) cend = cstart;

1642:   /* PETSc convention */
1643:   rend++;
1644:   cend++;
1645:   rend = PetscMin(rend,M);
1646:   cend = PetscMin(cend,N);

1648:   /* create PETSc matrix with MatHYPRE */
1649:   MatCreate(comm,&T);
1650:   MatSetSizes(T,rend-rstart,cend-cstart,M,N);
1651:   MatSetType(T,MATHYPRE);
1652:   hA   = (Mat_HYPRE*)(T->data);

1654:   /* create HYPRE_IJMatrix */
1655:   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
1656:   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));

1658: // TODO DEV
1659:   /* create new ParCSR object if needed */
1660:   if (ishyp && copymode == PETSC_COPY_VALUES) {
1661:     hypre_ParCSRMatrix *new_parcsr;
1662: #if PETSC_PKG_HYPRE_VERSION_LT(2,18,0)
1663:     hypre_CSRMatrix    *hdiag,*hoffd,*ndiag,*noffd;

1665:     new_parcsr = hypre_ParCSRMatrixClone(parcsr,0);
1666:     hdiag      = hypre_ParCSRMatrixDiag(parcsr);
1667:     hoffd      = hypre_ParCSRMatrixOffd(parcsr);
1668:     ndiag      = hypre_ParCSRMatrixDiag(new_parcsr);
1669:     noffd      = hypre_ParCSRMatrixOffd(new_parcsr);
1670:     PetscArraycpy(hypre_CSRMatrixData(ndiag),hypre_CSRMatrixData(hdiag),hypre_CSRMatrixNumNonzeros(hdiag));
1671:     PetscArraycpy(hypre_CSRMatrixData(noffd),hypre_CSRMatrixData(hoffd),hypre_CSRMatrixNumNonzeros(hoffd));
1672: #else
1673:     new_parcsr = hypre_ParCSRMatrixClone(parcsr,1);
1674: #endif
1675:     parcsr     = new_parcsr;
1676:     copymode   = PETSC_OWN_POINTER;
1677:   }

1679:   /* set ParCSR object */
1680:   hypre_IJMatrixObject(hA->ij) = parcsr;
1681:   T->preallocated = PETSC_TRUE;

1683:   /* set assembled flag */
1684:   hypre_IJMatrixAssembleFlag(hA->ij) = 1;
1685: #if 0
1686:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1687: #endif
1688:   if (ishyp) {
1689:     PetscMPIInt myid = 0;

1691:     /* make sure we always have row_starts and col_starts available */
1692:     if (HYPRE_AssumedPartitionCheck()) {
1693:       MPI_Comm_rank(comm,&myid);
1694:     }
1695: #if defined(hypre_ParCSRMatrixOwnsRowStarts)
1696:     if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) {
1697:       PetscLayout map;

1699:       MatGetLayouts(T,NULL,&map);
1700:       PetscLayoutSetUp(map);
1701:       hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid);
1702:     }
1703:     if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) {
1704:       PetscLayout map;

1706:       MatGetLayouts(T,&map,NULL);
1707:       PetscLayoutSetUp(map);
1708:       hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid);
1709:     }
1710: #endif
1711:     /* prevent from freeing the pointer */
1712:     if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
1713:     *A   = T;
1714:     MatSetOption(*A,MAT_SORTED_FULL,PETSC_TRUE);
1715:     MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);
1716:     MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);
1717:   } else if (isaij) {
1718:     if (copymode != PETSC_OWN_POINTER) {
1719:       /* prevent from freeing the pointer */
1720:       hA->inner_free = PETSC_FALSE;
1721:       MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);
1722:       MatDestroy(&T);
1723:     } else { /* AIJ return type with PETSC_OWN_POINTER */
1724:       MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);
1725:       *A   = T;
1726:     }
1727:   } else if (isis) {
1728:     MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);
1729:     if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE;
1730:     MatDestroy(&T);
1731:   }
1732:   return(0);
1733: }

1735: static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr)
1736: {
1737:   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1738:   HYPRE_Int type;

1741:   if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present");
1742:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
1743:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR");
1744:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr));
1745:   return(0);
1746: }

1748: /*
1749:    MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix

1751:    Not collective

1753:    Input Parameters:
1754: +  A  - the MATHYPRE object

1756:    Output Parameter:
1757: .  parcsr  - the pointer to the hypre_ParCSRMatrix

1759:    Level: intermediate

1761: .seealso: MatHYPRE, PetscCopyMode
1762: */
1763: PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr)
1764: {

1770:   PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));
1771:   return(0);
1772: }

1774: static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd)
1775: {
1776:   hypre_ParCSRMatrix *parcsr;
1777:   hypre_CSRMatrix    *ha;
1778:   PetscInt           rst;
1779:   PetscErrorCode     ierr;

1782:   if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented with non-square diagonal blocks");
1783:   MatGetOwnershipRange(A,&rst,NULL);
1784:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1785:   if (missing) *missing = PETSC_FALSE;
1786:   if (dd) *dd = -1;
1787:   ha = hypre_ParCSRMatrixDiag(parcsr);
1788:   if (ha) {
1789:     PetscInt  size,i;
1790:     HYPRE_Int *ii,*jj;

1792:     size = hypre_CSRMatrixNumRows(ha);
1793:     ii   = hypre_CSRMatrixI(ha);
1794:     jj   = hypre_CSRMatrixJ(ha);
1795:     for (i = 0; i < size; i++) {
1796:       PetscInt  j;
1797:       PetscBool found = PETSC_FALSE;

1799:       for (j = ii[i]; j < ii[i+1] && !found; j++)
1800:         found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE;

1802:       if (!found) {
1803:         PetscInfo1(A,"Matrix is missing local diagonal entry %D\n",i);
1804:         if (missing) *missing = PETSC_TRUE;
1805:         if (dd) *dd = i+rst;
1806:         return(0);
1807:       }
1808:     }
1809:     if (!size) {
1810:       PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1811:       if (missing) *missing = PETSC_TRUE;
1812:       if (dd) *dd = rst;
1813:     }
1814:   } else {
1815:     PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1816:     if (missing) *missing = PETSC_TRUE;
1817:     if (dd) *dd = rst;
1818:   }
1819:   return(0);
1820: }

1822: static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s)
1823: {
1824:   hypre_ParCSRMatrix *parcsr;
1825: #if PETSC_PKG_HYPRE_VERSION_LT(2,19,0)
1826:   hypre_CSRMatrix    *ha;
1827: #endif
1828:   PetscErrorCode     ierr;
1829:   HYPRE_Complex      hs;

1832:   PetscHYPREScalarCast(s,&hs);
1833:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1834: #if PETSC_PKG_HYPRE_VERSION_GE(2,19,0)
1835:   PetscStackCallStandard(hypre_ParCSRMatrixScale,(parcsr,hs));
1836: #else  /* diagonal part */
1837:   ha = hypre_ParCSRMatrixDiag(parcsr);
1838:   if (ha) {
1839:     PetscInt      size,i;
1840:     HYPRE_Int     *ii;
1841:     HYPRE_Complex *a;

1843:     size = hypre_CSRMatrixNumRows(ha);
1844:     a    = hypre_CSRMatrixData(ha);
1845:     ii   = hypre_CSRMatrixI(ha);
1846:     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1847:   }
1848:   /* offdiagonal part */
1849:   ha = hypre_ParCSRMatrixOffd(parcsr);
1850:   if (ha) {
1851:     PetscInt      size,i;
1852:     HYPRE_Int     *ii;
1853:     HYPRE_Complex *a;

1855:     size = hypre_CSRMatrixNumRows(ha);
1856:     a    = hypre_CSRMatrixData(ha);
1857:     ii   = hypre_CSRMatrixI(ha);
1858:     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1859:   }
1860: #endif
1861:   return(0);
1862: }

1864: static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1865: {
1866:   hypre_ParCSRMatrix *parcsr;
1867:   HYPRE_Int          *lrows;
1868:   PetscInt           rst,ren,i;
1869:   PetscErrorCode     ierr;

1872:   if (x || b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"To be implemented");
1873:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1874:   PetscMalloc1(numRows,&lrows);
1875:   MatGetOwnershipRange(A,&rst,&ren);
1876:   for (i=0;i<numRows;i++) {
1877:     if (rows[i] < rst || rows[i] >= ren)
1878:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Non-local rows not yet supported");
1879:     lrows[i] = rows[i] - rst;
1880:   }
1881:   PetscStackCallStandard(hypre_ParCSRMatrixEliminateRowsCols,(parcsr,numRows,lrows));
1882:   PetscFree(lrows);
1883:   return(0);
1884: }

1886: static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha)
1887: {
1888:   PetscErrorCode      ierr;

1891:   if (ha) {
1892:     HYPRE_Int     *ii, size;
1893:     HYPRE_Complex *a;

1895:     size = hypre_CSRMatrixNumRows(ha);
1896:     a    = hypre_CSRMatrixData(ha);
1897:     ii   = hypre_CSRMatrixI(ha);

1899:     if (a) {PetscArrayzero(a,ii[size]);}
1900:   }
1901:   return(0);
1902: }

1904: PetscErrorCode MatZeroEntries_HYPRE(Mat A)
1905: {
1906:   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;

1909:   if (HYPRE_MEMORY_DEVICE == hypre_IJMatrixMemoryLocation(hA->ij)) {
1910:     PetscStackCallStandard(HYPRE_IJMatrixSetConstantValues,(hA->ij,0.0));
1911:   } else {
1912:     hypre_ParCSRMatrix *parcsr;
1913:     PetscErrorCode     ierr;

1915:     MatHYPREGetParCSR_HYPRE(A,&parcsr);
1916:     MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr));
1917:     MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr));
1918:   }
1919:   return(0);
1920: }

1922: static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA,PetscInt N,const PetscInt rows[],HYPRE_Complex diag)
1923: {
1924:   PetscInt        ii;
1925:   HYPRE_Int       *i, *j;
1926:   HYPRE_Complex   *a;

1929:   if (!hA) return(0);

1931:   i = hypre_CSRMatrixI(hA);
1932:   j = hypre_CSRMatrixJ(hA);
1933:   a = hypre_CSRMatrixData(hA);

1935:   for (ii = 0; ii < N; ii++) {
1936:     HYPRE_Int jj, ibeg, iend, irow;

1938:     irow = rows[ii];
1939:     ibeg = i[irow];
1940:     iend = i[irow+1];
1941:     for (jj = ibeg; jj < iend; jj++)
1942:       if (j[jj] == irow) a[jj] = diag;
1943:       else a[jj] = 0.0;
1944:    }
1945:    return(0);
1946: }

1948: static PetscErrorCode MatZeroRows_HYPRE(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1949: {
1950:   hypre_ParCSRMatrix  *parcsr;
1951:   PetscInt            *lrows,len;
1952:   HYPRE_Complex       hdiag;
1953:   PetscErrorCode      ierr;

1956:   if (x || b) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size");
1957:   PetscHYPREScalarCast(diag,&hdiag);
1958:   /* retrieve the internal matrix */
1959:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1960:   /* get locally owned rows */
1961:   MatZeroRowsMapLocal_Private(A,N,rows,&len,&lrows);
1962:   /* zero diagonal part */
1963:   MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr),len,lrows,hdiag);
1964:   /* zero off-diagonal part */
1965:   MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr),len,lrows,0.0);

1967:   PetscFree(lrows);
1968:   return(0);
1969: }

1971: static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat,MatAssemblyType mode)
1972: {

1976:   if (mat->nooffprocentries) return(0);

1978:   MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);
1979:   return(0);
1980: }

1982: static PetscErrorCode MatGetRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1983: {
1984:   hypre_ParCSRMatrix  *parcsr;
1985:   HYPRE_Int           hnz;
1986:   PetscErrorCode      ierr;

1989:   /* retrieve the internal matrix */
1990:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1991:   /* call HYPRE API */
1992:   PetscStackCallStandard(HYPRE_ParCSRMatrixGetRow,(parcsr,row,&hnz,(HYPRE_BigInt**)idx,(HYPRE_Complex**)v));
1993:   if (nz) *nz = (PetscInt)hnz;
1994:   return(0);
1995: }

1997: static PetscErrorCode MatRestoreRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1998: {
1999:   hypre_ParCSRMatrix  *parcsr;
2000:   HYPRE_Int           hnz;
2001:   PetscErrorCode      ierr;

2004:   /* retrieve the internal matrix */
2005:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
2006:   /* call HYPRE API */
2007:   hnz = nz ? (HYPRE_Int)(*nz) : 0;
2008:   PetscStackCallStandard(HYPRE_ParCSRMatrixRestoreRow,(parcsr,row,&hnz,(HYPRE_BigInt**)idx,(HYPRE_Complex**)v));
2009:   return(0);
2010: }

2012: static PetscErrorCode MatGetValues_HYPRE(Mat A,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
2013: {
2014:   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
2015:   PetscInt  i;

2018:   if (!m || !n) return(0);
2019:   /* Ignore negative row indices
2020:    * And negative column indices should be automatically ignored in hypre
2021:    * */
2022:   for (i=0; i<m; i++) {
2023:     if (idxm[i] >= 0) {
2024:       HYPRE_Int hn = (HYPRE_Int)n;
2025:       PetscStackCallStandard(HYPRE_IJMatrixGetValues,(hA->ij,1,&hn,(HYPRE_BigInt*)&idxm[i],(HYPRE_BigInt*)idxn,(HYPRE_Complex*)(v + i*n)));
2026:     }
2027:   }
2028:   return(0);
2029: }

2031: static PetscErrorCode MatSetOption_HYPRE(Mat A,MatOption op,PetscBool flg)
2032: {
2033:   Mat_HYPRE *hA = (Mat_HYPRE*)A->data;

2036:   switch (op) {
2037:   case MAT_NO_OFF_PROC_ENTRIES:
2038:     if (flg) {
2039:       PetscStackCallStandard(HYPRE_IJMatrixSetMaxOffProcElmts,(hA->ij,0));
2040:     }
2041:     break;
2042:   case MAT_SORTED_FULL:
2043:     hA->sorted_full = flg;
2044:     break;
2045:   default:
2046:     break;
2047:   }
2048:   return(0);
2049: }

2051: static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view)
2052: {
2053:   PetscErrorCode     ierr;
2054:   PetscViewerFormat  format;

2057:   PetscViewerGetFormat(view,&format);
2058:   if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) return(0);
2059:   if (format != PETSC_VIEWER_NATIVE) {
2060:     Mat                B;
2061:     hypre_ParCSRMatrix *parcsr;
2062:     PetscErrorCode     (*mview)(Mat,PetscViewer) = NULL;

2064:     MatHYPREGetParCSR_HYPRE(A,&parcsr);
2065:     MatCreateFromParCSR(parcsr,MATAIJ,PETSC_USE_POINTER,&B);
2066:     MatGetOperation(B,MATOP_VIEW,(void(**)(void))&mview);
2067:     if (!mview) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Missing view operation");
2068:     (*mview)(B,view);
2069:     MatDestroy(&B);
2070:   } else {
2071:     Mat_HYPRE  *hA = (Mat_HYPRE*)A->data;
2072:     PetscMPIInt size;
2073:     PetscBool   isascii;
2074:     const char *filename;

2076:     /* HYPRE uses only text files */
2077:     PetscObjectTypeCompare((PetscObject)view,PETSCVIEWERASCII,&isascii);
2078:     if (!isascii) SETERRQ1(PetscObjectComm((PetscObject)view),PETSC_ERR_SUP,"PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII",((PetscObject)view)->type_name);
2079:     PetscViewerFileGetName(view,&filename);
2080:     PetscStackCallStandard(HYPRE_IJMatrixPrint,(hA->ij,filename));
2081:     MPI_Comm_size(hA->comm,&size);
2082:     if (size > 1) {
2083:       PetscViewerASCIIPrintf(view,"Matrix files: %s.%05d ... %s.%05d\n",filename,0,filename,size-1);
2084:     } else {
2085:       PetscViewerASCIIPrintf(view,"Matrix file: %s.%05d\n",filename,0);
2086:     }
2087:   }
2088:   return(0);
2089: }

2091: static PetscErrorCode MatDuplicate_HYPRE(Mat A,MatDuplicateOption op, Mat *B)
2092: {
2093:   hypre_ParCSRMatrix *parcsr = NULL;
2094:   PetscErrorCode     ierr;
2095:   PetscCopyMode      cpmode;

2098:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
2099:   if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) {
2100:     parcsr = hypre_ParCSRMatrixClone(parcsr,0);
2101:     cpmode = PETSC_OWN_POINTER;
2102:   } else {
2103:     cpmode = PETSC_COPY_VALUES;
2104:   }
2105:   MatCreateFromParCSR(parcsr,MATHYPRE,cpmode,B);
2106:   return(0);
2107: }

2109: static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str)
2110: {
2111:   hypre_ParCSRMatrix *acsr,*bcsr;
2112:   PetscErrorCode     ierr;

2115:   if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) {
2116:     MatHYPREGetParCSR_HYPRE(A,&acsr);
2117:     MatHYPREGetParCSR_HYPRE(B,&bcsr);
2118:     PetscStackCallStandard(hypre_ParCSRMatrixCopy,(acsr,bcsr,1));
2119:     MatSetOption(B,MAT_SORTED_FULL,PETSC_TRUE); /* "perfect" preallocation, so no need for hypre_AuxParCSRMatrixNeedAux */
2120:     MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
2121:     MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
2122:   } else {
2123:     MatCopy_Basic(A,B,str);
2124:   }
2125:   return(0);
2126: }

2128: static PetscErrorCode MatGetDiagonal_HYPRE(Mat A, Vec d)
2129: {
2130:   hypre_ParCSRMatrix *parcsr;
2131:   hypre_CSRMatrix    *dmat;
2132:   HYPRE_Complex      *a;
2133:   HYPRE_Complex      *data = NULL;
2134:   HYPRE_Int          *diag = NULL;
2135:   PetscInt           i;
2136:   PetscBool          cong;
2137:   PetscErrorCode     ierr;

2140:   MatHasCongruentLayouts(A,&cong);
2141:   if (!cong) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only for square matrices with same local distributions of rows and columns");
2142:   if (PetscDefined(USE_DEBUG)) {
2143:     PetscBool miss;
2144:     MatMissingDiagonal(A,&miss,NULL);
2145:     if (miss && A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented when diagonal entries are missing");
2146:   }
2147:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
2148:   dmat = hypre_ParCSRMatrixDiag(parcsr);
2149:   if (dmat) {
2150:     /* this cast fixes the clang error: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */
2151:     VecGetArray(d,(PetscScalar**)&a);
2152:     diag = hypre_CSRMatrixI(dmat);
2153:     data = hypre_CSRMatrixData(dmat);
2154:     for (i=0;i<A->rmap->n;i++) a[i] = data[diag[i]];
2155:     VecRestoreArray(d,(PetscScalar**)&a);
2156:   }
2157:   return(0);
2158: }

2160: #include <petscblaslapack.h>

2162: static PetscErrorCode MatAXPY_HYPRE(Mat Y,PetscScalar a,Mat X,MatStructure str)
2163: {

2167: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2168:   {
2169:     Mat                B;
2170:     hypre_ParCSRMatrix *x,*y,*z;

2172:     MatHYPREGetParCSR(Y,&y);
2173:     MatHYPREGetParCSR(X,&x);
2174:     PetscStackCallStandard(hypre_ParCSRMatrixAdd,(1.0,y,1.0,x,&z));
2175:     MatCreateFromParCSR(z,MATHYPRE,PETSC_OWN_POINTER,&B);
2176:     MatHeaderMerge(Y,&B);
2177:   }
2178: #else
2179:   if (str == SAME_NONZERO_PATTERN) {
2180:     hypre_ParCSRMatrix *x,*y;
2181:     hypre_CSRMatrix    *xloc,*yloc;
2182:     PetscInt           xnnz,ynnz;
2183:     HYPRE_Complex      *xarr,*yarr;
2184:     PetscBLASInt       one=1,bnz;

2186:     MatHYPREGetParCSR(Y,&y);
2187:     MatHYPREGetParCSR(X,&x);

2189:     /* diagonal block */
2190:     xloc = hypre_ParCSRMatrixDiag(x);
2191:     yloc = hypre_ParCSRMatrixDiag(y);
2192:     xnnz = 0;
2193:     ynnz = 0;
2194:     xarr = NULL;
2195:     yarr = NULL;
2196:     if (xloc) {
2197:       xarr = hypre_CSRMatrixData(xloc);
2198:       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
2199:     }
2200:     if (yloc) {
2201:       yarr = hypre_CSRMatrixData(yloc);
2202:       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
2203:     }
2204:     if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in diagonal block %D != %D",xnnz,ynnz);
2205:     PetscBLASIntCast(xnnz,&bnz);
2206:     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,(PetscScalar*)xarr,&one,(PetscScalar*)yarr,&one));

2208:     /* off-diagonal block */
2209:     xloc = hypre_ParCSRMatrixOffd(x);
2210:     yloc = hypre_ParCSRMatrixOffd(y);
2211:     xnnz = 0;
2212:     ynnz = 0;
2213:     xarr = NULL;
2214:     yarr = NULL;
2215:     if (xloc) {
2216:       xarr = hypre_CSRMatrixData(xloc);
2217:       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
2218:     }
2219:     if (yloc) {
2220:       yarr = hypre_CSRMatrixData(yloc);
2221:       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
2222:     }
2223:     if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in off-diagonal block %D != %D",xnnz,ynnz);
2224:     PetscBLASIntCast(xnnz,&bnz);
2225:     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,(PetscScalar*)xarr,&one,(PetscScalar*)yarr,&one));
2226:   } else if (str == SUBSET_NONZERO_PATTERN) {
2227:     MatAXPY_Basic(Y,a,X,str);
2228:   } else {
2229:     Mat B;

2231:     MatAXPY_Basic_Preallocate(Y,X,&B);
2232:     MatAXPY_BasicWithPreallocation(B,Y,a,X,str);
2233:     MatHeaderReplace(Y,&B);
2234:   }
2235: #endif
2236:   return(0);
2237: }

2239: /*MC
2240:    MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices
2241:           based on the hypre IJ interface.

2243:    Level: intermediate

2245: .seealso: MatCreate()
2246: M*/

2248: PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
2249: {
2250:   Mat_HYPRE      *hB;

2254:   PetscNewLog(B,&hB);

2256:   hB->inner_free  = PETSC_TRUE;
2257:   hB->available   = PETSC_TRUE;
2258:   hB->sorted_full = PETSC_FALSE; /* no assumption whether column indices are sorted or not */
2259:   hB->size        = 0;
2260:   hB->array       = NULL;

2262:   B->data       = (void*)hB;
2263:   B->rmap->bs   = 1;
2264:   B->assembled  = PETSC_FALSE;

2266:   PetscMemzero(B->ops,sizeof(struct _MatOps));
2267:   B->ops->mult                  = MatMult_HYPRE;
2268:   B->ops->multtranspose         = MatMultTranspose_HYPRE;
2269:   B->ops->multadd               = MatMultAdd_HYPRE;
2270:   B->ops->multtransposeadd      = MatMultTransposeAdd_HYPRE;
2271:   B->ops->setup                 = MatSetUp_HYPRE;
2272:   B->ops->destroy               = MatDestroy_HYPRE;
2273:   B->ops->assemblyend           = MatAssemblyEnd_HYPRE;
2274:   B->ops->assemblybegin         = MatAssemblyBegin_HYPRE;
2275:   B->ops->setvalues             = MatSetValues_HYPRE;
2276:   B->ops->missingdiagonal       = MatMissingDiagonal_HYPRE;
2277:   B->ops->scale                 = MatScale_HYPRE;
2278:   B->ops->zerorowscolumns       = MatZeroRowsColumns_HYPRE;
2279:   B->ops->zeroentries           = MatZeroEntries_HYPRE;
2280:   B->ops->zerorows              = MatZeroRows_HYPRE;
2281:   B->ops->getrow                = MatGetRow_HYPRE;
2282:   B->ops->restorerow            = MatRestoreRow_HYPRE;
2283:   B->ops->getvalues             = MatGetValues_HYPRE;
2284:   B->ops->setoption             = MatSetOption_HYPRE;
2285:   B->ops->duplicate             = MatDuplicate_HYPRE;
2286:   B->ops->copy                  = MatCopy_HYPRE;
2287:   B->ops->view                  = MatView_HYPRE;
2288:   B->ops->getdiagonal           = MatGetDiagonal_HYPRE;
2289:   B->ops->axpy                  = MatAXPY_HYPRE;
2290:   B->ops->productsetfromoptions = MatProductSetFromOptions_HYPRE;
2291: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2292:   B->ops->bindtocpu             = MatBindToCPU_HYPRE;
2293:   B->boundtocpu                 = PETSC_FALSE;
2294: #endif

2296:   /* build cache for off array entries formed */
2297:   MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);

2299:   MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);
2300:   PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);
2301:   PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);
2302:   PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);
2303:   PetscObjectComposeFunction((PetscObject)B,"MatProductSetFromOptions_seqaij_hypre_C",MatProductSetFromOptions_HYPRE);
2304:   PetscObjectComposeFunction((PetscObject)B,"MatProductSetFromOptions_mpiaij_hypre_C",MatProductSetFromOptions_HYPRE);
2305:   PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);
2306:   PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);
2307: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2308: #if defined(HYPRE_USING_HIP)
2309:   PetscHIPInitializeCheck();
2310:   MatSetVecType(B,VECHIP);
2311: #endif
2312: #if defined(HYPRE_USING_CUDA)
2313:   PetscCUDAInitializeCheck();
2314:   MatSetVecType(B,VECCUDA);
2315: #endif
2316: #endif
2317:   return(0);
2318: }

2320: static PetscErrorCode hypre_array_destroy(void *ptr)
2321: {
2323:    hypre_TFree(ptr,HYPRE_MEMORY_HOST);
2324:    return(0);
2325: }