Actual source code: mhypre.c

petsc-3.9.4 2018-09-11
Report Typos and Errors

  2: /*
  3:     Creates hypre ijmatrix from PETSc matrix
  4: */

  6:  #include <petscmathypre.h>
  7:  #include <petsc/private/matimpl.h>
  8:  #include <../src/mat/impls/hypre/mhypre.h>
  9:  #include <../src/mat/impls/aij/mpi/mpiaij.h>
 10:  #include <../src/vec/vec/impls/hypre/vhyp.h>
 11: #include <HYPRE.h>
 12: #include <HYPRE_utilities.h>
 13: #include <_hypre_parcsr_ls.h>
 14: #include <_hypre_sstruct_ls.h>

 16: static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*);
 17: static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix);
 18: static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix);
 19: static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix);
 20: static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,Vec,Vec,PetscBool);
 21: static PetscErrorCode hypre_array_destroy(void*);
 22: PetscErrorCode MatSetValues_HYPRE(Mat, PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode ins);

 24: static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
 25: {
 27:   PetscInt       i,n_d,n_o;
 28:   const PetscInt *ia_d,*ia_o;
 29:   PetscBool      done_d=PETSC_FALSE,done_o=PETSC_FALSE;
 30:   PetscInt       *nnz_d=NULL,*nnz_o=NULL;

 33:   if (A_d) { /* determine number of nonzero entries in local diagonal part */
 34:     MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);
 35:     if (done_d) {
 36:       PetscMalloc1(n_d,&nnz_d);
 37:       for (i=0; i<n_d; i++) {
 38:         nnz_d[i] = ia_d[i+1] - ia_d[i];
 39:       }
 40:     }
 41:     MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);
 42:   }
 43:   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
 44:     MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);
 45:     if (done_o) {
 46:       PetscMalloc1(n_o,&nnz_o);
 47:       for (i=0; i<n_o; i++) {
 48:         nnz_o[i] = ia_o[i+1] - ia_o[i];
 49:       }
 50:     }
 51:     MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);
 52:   }
 53:   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
 54:     if (!done_o) { /* only diagonal part */
 55:       PetscMalloc1(n_d,&nnz_o);
 56:       for (i=0; i<n_d; i++) {
 57:         nnz_o[i] = 0;
 58:       }
 59:     }
 60:     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,(HYPRE_Int *)nnz_d,(HYPRE_Int *)nnz_o));
 61:     PetscFree(nnz_d);
 62:     PetscFree(nnz_o);
 63:   }
 64:   return(0);
 65: }

 67: static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
 68: {
 70:   PetscInt       rstart,rend,cstart,cend;

 73:   PetscLayoutSetUp(A->rmap);
 74:   PetscLayoutSetUp(A->cmap);
 75:   rstart = A->rmap->rstart;
 76:   rend   = A->rmap->rend;
 77:   cstart = A->cmap->rstart;
 78:   cend   = A->cmap->rend;
 79:   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
 80:   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
 81:   {
 82:     PetscBool      same;
 83:     Mat            A_d,A_o;
 84:     const PetscInt *colmap;
 85:     PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&same);
 86:     if (same) {
 87:       MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);
 88:       MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);
 89:       return(0);
 90:     }
 91:     PetscObjectTypeCompare((PetscObject)A,MATMPIBAIJ,&same);
 92:     if (same) {
 93:       MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);
 94:       MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);
 95:       return(0);
 96:     }
 97:     PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&same);
 98:     if (same) {
 99:       MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);
100:       return(0);
101:     }
102:     PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&same);
103:     if (same) {
104:       MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);
105:       return(0);
106:     }
107:   }
108:   return(0);
109: }

111: static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij)
112: {
113:   PetscErrorCode    ierr;
114:   PetscInt          i,rstart,rend,ncols,nr,nc;
115:   const PetscScalar *values;
116:   const PetscInt    *cols;
117:   PetscBool         flg;

120:   PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);
121:   MatGetSize(A,&nr,&nc);
122:   if (flg && nr == nc) {
123:     MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);
124:     return(0);
125:   }
126:   PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);
127:   if (flg) {
128:     MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);
129:     return(0);
130:   }

132:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
133:   MatGetOwnershipRange(A,&rstart,&rend);
134:   for (i=rstart; i<rend; i++) {
135:     MatGetRow(A,i,&ncols,&cols,&values);
136:     if (ncols) {
137:       PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,(HYPRE_Int *)&ncols,(HYPRE_Int *)&i,(HYPRE_Int *)cols,values));
138:     }
139:     MatRestoreRow(A,i,&ncols,&cols,&values);
140:   }
141:   return(0);
142: }

144: static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
145: {
146:   PetscErrorCode        ierr;
147:   Mat_SeqAIJ            *pdiag = (Mat_SeqAIJ*)A->data;
148:   HYPRE_Int             type;
149:   hypre_ParCSRMatrix    *par_matrix;
150:   hypre_AuxParCSRMatrix *aux_matrix;
151:   hypre_CSRMatrix       *hdiag;

154:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
155:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
156:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
157:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
158:   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
159:   /*
160:        this is the Hack part where we monkey directly with the hypre datastructures
161:   */
162:   PetscMemcpy(hdiag->i,pdiag->i,(A->rmap->n + 1)*sizeof(PetscInt));
163:   PetscMemcpy(hdiag->j,pdiag->j,pdiag->nz*sizeof(PetscInt));
164:   PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));

166:   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
167:   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
168:   return(0);
169: }

171: static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
172: {
173:   PetscErrorCode        ierr;
174:   Mat_MPIAIJ            *pA = (Mat_MPIAIJ*)A->data;
175:   Mat_SeqAIJ            *pdiag,*poffd;
176:   PetscInt              i,*garray = pA->garray,*jj,cstart,*pjj;
177:   HYPRE_Int             type;
178:   hypre_ParCSRMatrix    *par_matrix;
179:   hypre_AuxParCSRMatrix *aux_matrix;
180:   hypre_CSRMatrix       *hdiag,*hoffd;

183:   pdiag = (Mat_SeqAIJ*) pA->A->data;
184:   poffd = (Mat_SeqAIJ*) pA->B->data;
185:   /* cstart is only valid for square MPIAIJ layed out in the usual way */
186:   MatGetOwnershipRange(A,&cstart,NULL);

188:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
189:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
190:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
191:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
192:   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
193:   hoffd = hypre_ParCSRMatrixOffd(par_matrix);

195:   /*
196:        this is the Hack part where we monkey directly with the hypre datastructures
197:   */
198:   PetscMemcpy(hdiag->i,pdiag->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
199:   /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */
200:   jj  = (PetscInt*)hdiag->j;
201:   pjj = (PetscInt*)pdiag->j;
202:   for (i=0; i<pdiag->nz; i++) jj[i] = cstart + pjj[i];
203:   PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));
204:   PetscMemcpy(hoffd->i,poffd->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
205:   /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this
206:      If we hacked a hypre a bit more we might be able to avoid this step */
207:   jj  = (PetscInt*) hoffd->j;
208:   pjj = (PetscInt*) poffd->j;
209:   for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]];
210:   PetscMemcpy(hoffd->data,poffd->a,poffd->nz*sizeof(PetscScalar));

212:   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
213:   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
214:   return(0);
215: }

217: static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B)
218: {
219:   Mat_HYPRE*             mhA = (Mat_HYPRE*)(A->data);
220:   Mat                    lA;
221:   ISLocalToGlobalMapping rl2g,cl2g;
222:   IS                     is;
223:   hypre_ParCSRMatrix     *hA;
224:   hypre_CSRMatrix        *hdiag,*hoffd;
225:   MPI_Comm               comm;
226:   PetscScalar            *hdd,*hod,*aa,*data;
227:   HYPRE_Int              *col_map_offd,*hdi,*hdj,*hoi,*hoj;
228:   PetscInt               *ii,*jj,*iptr,*jptr;
229:   PetscInt               cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N;
230:   HYPRE_Int              type;
231:   PetscErrorCode         ierr;

234:   comm = PetscObjectComm((PetscObject)A);
235:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type));
236:   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
237:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA));
238:   M     = hypre_ParCSRMatrixGlobalNumRows(hA);
239:   N     = hypre_ParCSRMatrixGlobalNumCols(hA);
240:   str   = hypre_ParCSRMatrixFirstRowIndex(hA);
241:   stc   = hypre_ParCSRMatrixFirstColDiag(hA);
242:   hdiag = hypre_ParCSRMatrixDiag(hA);
243:   hoffd = hypre_ParCSRMatrixOffd(hA);
244:   dr    = hypre_CSRMatrixNumRows(hdiag);
245:   dc    = hypre_CSRMatrixNumCols(hdiag);
246:   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
247:   hdi   = hypre_CSRMatrixI(hdiag);
248:   hdj   = hypre_CSRMatrixJ(hdiag);
249:   hdd   = hypre_CSRMatrixData(hdiag);
250:   oc    = hypre_CSRMatrixNumCols(hoffd);
251:   nnz  += hypre_CSRMatrixNumNonzeros(hoffd);
252:   hoi   = hypre_CSRMatrixI(hoffd);
253:   hoj   = hypre_CSRMatrixJ(hoffd);
254:   hod   = hypre_CSRMatrixData(hoffd);
255:   if (reuse != MAT_REUSE_MATRIX) {
256:     PetscInt *aux;

258:     /* generate l2g maps for rows and cols */
259:     ISCreateStride(comm,dr,str,1,&is);
260:     ISLocalToGlobalMappingCreateIS(is,&rl2g);
261:     ISDestroy(&is);
262:     col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
263:     PetscMalloc1(dc+oc,&aux);
264:     for (i=0; i<dc; i++) aux[i] = i+stc;
265:     for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i];
266:     ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);
267:     ISLocalToGlobalMappingCreateIS(is,&cl2g);
268:     ISDestroy(&is);
269:     /* create MATIS object */
270:     MatCreate(comm,B);
271:     MatSetSizes(*B,dr,dc,M,N);
272:     MatSetType(*B,MATIS);
273:     MatSetLocalToGlobalMapping(*B,rl2g,cl2g);
274:     ISLocalToGlobalMappingDestroy(&rl2g);
275:     ISLocalToGlobalMappingDestroy(&cl2g);

277:     /* allocate CSR for local matrix */
278:     PetscMalloc1(dr+1,&iptr);
279:     PetscMalloc1(nnz,&jptr);
280:     PetscMalloc1(nnz,&data);
281:   } else {
282:     PetscInt  nr;
283:     PetscBool done;
284:     MatISGetLocalMat(*B,&lA);
285:     MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);
286:     if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr);
287:     if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz);
288:     MatSeqAIJGetArray(lA,&data);
289:   }
290:   /* merge local matrices */
291:   ii   = iptr;
292:   jj   = jptr;
293:   aa   = data;
294:   *ii  = *(hdi++) + *(hoi++);
295:   for (jd=0,jo=0,cum=0; *ii<nnz; cum++) {
296:     PetscScalar *aold = aa;
297:     PetscInt    *jold = jj,nc = jd+jo;
298:     for (; jd<*hdi; jd++) { *jj++ = *hdj++;      *aa++ = *hdd++; }
299:     for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
300:     *(++ii) = *(hdi++) + *(hoi++);
301:     PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);
302:   }
303:   for (; cum<dr; cum++) *(++ii) = nnz;
304:   if (reuse != MAT_REUSE_MATRIX) {
305:     Mat_SeqAIJ* a;

307:     MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);
308:     MatISSetLocalMat(*B,lA);
309:     /* hack SeqAIJ */
310:     a          = (Mat_SeqAIJ*)(lA->data);
311:     a->free_a  = PETSC_TRUE;
312:     a->free_ij = PETSC_TRUE;
313:     MatDestroy(&lA);
314:   }
315:   MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);
316:   MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);
317:   if (reuse == MAT_INPLACE_MATRIX) {
318:     MatHeaderReplace(A,B);
319:   }
320:   return(0);
321: }

323: PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
324: {
325:   Mat_HYPRE      *hB;
326:   MPI_Comm       comm = PetscObjectComm((PetscObject)A);

330:   if (reuse == MAT_INPLACE_MATRIX) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported MAT_INPLACE_MATRIX");
331:   if (reuse == MAT_REUSE_MATRIX) {
332:     /* always destroy the old matrix and create a new memory;
333:        hope this does not churn the memory too much. The problem
334:        is I do not know if it is possible to put the matrix back to
335:        its initial state so that we can directly copy the values
336:        the second time through. */
337:     hB = (Mat_HYPRE*)((*B)->data);
338:     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij));
339:   } else {
340:     MatCreate(comm,B);
341:     MatSetType(*B,MATHYPRE);
342:     MatSetSizes(*B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
343:     hB   = (Mat_HYPRE*)((*B)->data);
344:   }
345:   MatHYPRE_CreateFromMat(A,hB);
346:   MatHYPRE_IJMatrixCopy(A,hB->ij);
347:   (*B)->preallocated = PETSC_TRUE;
348:   MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);
349:   MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);
350:   return(0);
351: }

353: static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
354: {
355:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
356:   hypre_ParCSRMatrix *parcsr;
357:   hypre_CSRMatrix    *hdiag,*hoffd;
358:   MPI_Comm           comm;
359:   PetscScalar        *da,*oa,*aptr;
360:   PetscInt           *dii,*djj,*oii,*ojj,*iptr;
361:   PetscInt           i,dnnz,onnz,m,n;
362:   HYPRE_Int          type;
363:   PetscMPIInt        size;
364:   PetscErrorCode     ierr;

367:   comm = PetscObjectComm((PetscObject)A);
368:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
369:   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
370:   if (reuse == MAT_REUSE_MATRIX) {
371:     PetscBool ismpiaij,isseqaij;
372:     PetscObjectTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);
373:     PetscObjectBaseTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);
374:     if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported");
375:   }
376:   MPI_Comm_size(comm,&size);

378:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
379:   hdiag = hypre_ParCSRMatrixDiag(parcsr);
380:   hoffd = hypre_ParCSRMatrixOffd(parcsr);
381:   m     = hypre_CSRMatrixNumRows(hdiag);
382:   n     = hypre_CSRMatrixNumCols(hdiag);
383:   dnnz  = hypre_CSRMatrixNumNonzeros(hdiag);
384:   onnz  = hypre_CSRMatrixNumNonzeros(hoffd);
385:   if (reuse == MAT_INITIAL_MATRIX) {
386:     PetscMalloc1(m+1,&dii);
387:     PetscMalloc1(dnnz,&djj);
388:     PetscMalloc1(dnnz,&da);
389:   } else if (reuse == MAT_REUSE_MATRIX) {
390:     PetscInt  nr;
391:     PetscBool done;
392:     if (size > 1) {
393:       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);

395:       MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);
396:       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m);
397:       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz);
398:       MatSeqAIJGetArray(b->A,&da);
399:     } else {
400:       MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);
401:       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m);
402:       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz);
403:       MatSeqAIJGetArray(*B,&da);
404:     }
405:   } else { /* MAT_INPLACE_MATRIX */
406:     dii = (PetscInt*)hypre_CSRMatrixI(hdiag);
407:     djj = (PetscInt*)hypre_CSRMatrixJ(hdiag);
408:     da  = hypre_CSRMatrixData(hdiag);
409:   }
410:   PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));
411:   PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));
412:   PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));
413:   iptr = djj;
414:   aptr = da;
415:   for (i=0; i<m; i++) {
416:     PetscInt nc = dii[i+1]-dii[i];
417:     PetscSortIntWithScalarArray(nc,iptr,aptr);
418:     iptr += nc;
419:     aptr += nc;
420:   }
421:   if (size > 1) {
422:     HYPRE_Int *offdj,*coffd;

424:     if (reuse == MAT_INITIAL_MATRIX) {
425:       PetscMalloc1(m+1,&oii);
426:       PetscMalloc1(onnz,&ojj);
427:       PetscMalloc1(onnz,&oa);
428:     } else if (reuse == MAT_REUSE_MATRIX) {
429:       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
430:       PetscInt   nr,hr = hypre_CSRMatrixNumRows(hoffd);
431:       PetscBool  done;

433:       MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);
434:       if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr);
435:       if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz);
436:       MatSeqAIJGetArray(b->B,&oa);
437:     } else { /* MAT_INPLACE_MATRIX */
438:       oii = (PetscInt*)hypre_CSRMatrixI(hoffd);
439:       ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd);
440:       oa  = hypre_CSRMatrixData(hoffd);
441:     }
442:     PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));
443:     offdj = hypre_CSRMatrixJ(hoffd);
444:     coffd = hypre_ParCSRMatrixColMapOffd(parcsr);
445:     for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]];
446:     PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));
447:     iptr = ojj;
448:     aptr = oa;
449:     for (i=0; i<m; i++) {
450:        PetscInt nc = oii[i+1]-oii[i];
451:        PetscSortIntWithScalarArray(nc,iptr,aptr);
452:        iptr += nc;
453:        aptr += nc;
454:     }
455:     if (reuse == MAT_INITIAL_MATRIX) {
456:       Mat_MPIAIJ *b;
457:       Mat_SeqAIJ *d,*o;

459:       MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);
460:       /* hack MPIAIJ */
461:       b          = (Mat_MPIAIJ*)((*B)->data);
462:       d          = (Mat_SeqAIJ*)b->A->data;
463:       o          = (Mat_SeqAIJ*)b->B->data;
464:       d->free_a  = PETSC_TRUE;
465:       d->free_ij = PETSC_TRUE;
466:       o->free_a  = PETSC_TRUE;
467:       o->free_ij = PETSC_TRUE;
468:     } else if (reuse == MAT_INPLACE_MATRIX) {
469:       Mat T;
470:       MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);
471:       hypre_CSRMatrixI(hdiag)    = NULL;
472:       hypre_CSRMatrixJ(hdiag)    = NULL;
473:       hypre_CSRMatrixData(hdiag) = NULL;
474:       hypre_CSRMatrixI(hoffd)    = NULL;
475:       hypre_CSRMatrixJ(hoffd)    = NULL;
476:       hypre_CSRMatrixData(hoffd) = NULL;
477:       MatHeaderReplace(A,&T);
478:     }
479:   } else {
480:     oii  = NULL;
481:     ojj  = NULL;
482:     oa   = NULL;
483:     if (reuse == MAT_INITIAL_MATRIX) {
484:       Mat_SeqAIJ* b;
485:       MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);
486:       /* hack SeqAIJ */
487:       b          = (Mat_SeqAIJ*)((*B)->data);
488:       b->free_a  = PETSC_TRUE;
489:       b->free_ij = PETSC_TRUE;
490:     } else if (reuse == MAT_INPLACE_MATRIX) {
491:       Mat T;
492:       MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);
493:       hypre_CSRMatrixI(hdiag)    = NULL;
494:       hypre_CSRMatrixJ(hdiag)    = NULL;
495:       hypre_CSRMatrixData(hdiag) = NULL;
496:       MatHeaderReplace(A,&T);
497:     }
498:   }

500:   /* we have to use hypre_Tfree to free the arrays */
501:   if (reuse == MAT_INPLACE_MATRIX) {
502:     void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
503:     const char *names[6] = {"_hypre_csr_dii",
504:                             "_hypre_csr_djj",
505:                             "_hypre_csr_da",
506:                             "_hypre_csr_oii",
507:                             "_hypre_csr_ojj",
508:                             "_hypre_csr_oa"};
509:     for (i=0; i<6; i++) {
510:       PetscContainer c;

512:       PetscContainerCreate(comm,&c);
513:       PetscContainerSetPointer(c,ptrs[i]);
514:       PetscContainerSetUserDestroy(c,hypre_array_destroy);
515:       PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);
516:       PetscContainerDestroy(&c);
517:     }
518:   }
519:   return(0);
520: }

522: static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
523: {
524:   hypre_ParCSRMatrix *tA;
525:   hypre_CSRMatrix    *hdiag,*hoffd;
526:   Mat_SeqAIJ         *diag,*offd;
527:   PetscInt           *garray,noffd,dnnz,onnz,*row_starts,*col_starts;
528:   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
529:   PetscBool          ismpiaij,isseqaij;
530:   PetscErrorCode     ierr;

533:   PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);
534:   PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);
535:   if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type);
536:   if (ismpiaij) {
537:     Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data);

539:     diag   = (Mat_SeqAIJ*)a->A->data;
540:     offd   = (Mat_SeqAIJ*)a->B->data;
541:     garray = a->garray;
542:     noffd  = a->B->cmap->N;
543:     dnnz   = diag->nz;
544:     onnz   = offd->nz;
545:   } else {
546:     diag   = (Mat_SeqAIJ*)A->data;
547:     offd   = NULL;
548:     garray = NULL;
549:     noffd  = 0;
550:     dnnz   = diag->nz;
551:     onnz   = 0;
552:   }

554:   /* create a temporary ParCSR */
555:   if (HYPRE_AssumedPartitionCheck()) {
556:     PetscMPIInt myid;

558:     MPI_Comm_rank(comm,&myid);
559:     row_starts = A->rmap->range + myid;
560:     col_starts = A->cmap->range + myid;
561:   } else {
562:     row_starts = A->rmap->range;
563:     col_starts = A->cmap->range;
564:   }
565:   tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_Int*)row_starts,(HYPRE_Int*)col_starts,noffd,dnnz,onnz);
566:   hypre_ParCSRMatrixSetRowStartsOwner(tA,0);
567:   hypre_ParCSRMatrixSetColStartsOwner(tA,0);

569:   /* set diagonal part */
570:   hdiag = hypre_ParCSRMatrixDiag(tA);
571:   hypre_CSRMatrixI(hdiag)           = (HYPRE_Int*)diag->i;
572:   hypre_CSRMatrixJ(hdiag)           = (HYPRE_Int*)diag->j;
573:   hypre_CSRMatrixData(hdiag)        = diag->a;
574:   hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz;
575:   hypre_CSRMatrixSetRownnz(hdiag);
576:   hypre_CSRMatrixSetDataOwner(hdiag,0);

578:   /* set offdiagonal part */
579:   hoffd = hypre_ParCSRMatrixOffd(tA);
580:   if (offd) {
581:     hypre_CSRMatrixI(hoffd)           = (HYPRE_Int*)offd->i;
582:     hypre_CSRMatrixJ(hoffd)           = (HYPRE_Int*)offd->j;
583:     hypre_CSRMatrixData(hoffd)        = offd->a;
584:     hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz;
585:     hypre_CSRMatrixSetRownnz(hoffd);
586:     hypre_CSRMatrixSetDataOwner(hoffd,0);
587:     hypre_ParCSRMatrixSetNumNonzeros(tA);
588:     hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_Int*)garray;
589:   }
590:   *hA = tA;
591:   return(0);
592: }

594: static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
595: {
596:   hypre_CSRMatrix    *hdiag,*hoffd;

599:   hdiag = hypre_ParCSRMatrixDiag(*hA);
600:   hoffd = hypre_ParCSRMatrixOffd(*hA);
601:   /* set pointers to NULL before destroying tA */
602:   hypre_CSRMatrixI(hdiag)           = NULL;
603:   hypre_CSRMatrixJ(hdiag)           = NULL;
604:   hypre_CSRMatrixData(hdiag)        = NULL;
605:   hypre_CSRMatrixI(hoffd)           = NULL;
606:   hypre_CSRMatrixJ(hoffd)           = NULL;
607:   hypre_CSRMatrixData(hoffd)        = NULL;
608:   hypre_ParCSRMatrixColMapOffd(*hA) = NULL;
609:   hypre_ParCSRMatrixDestroy(*hA);
610:   *hA = NULL;
611:   return(0);
612: }

614: /* calls RAP from BoomerAMG:
615:    the resulting ParCSR will not own the column and row starts
616:    It looks like we don't need to have the diagonal entries
617:    ordered first in the rows of the diagonal part
618:    for boomerAMGBuildCoarseOperator to work */
619: static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA,hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP)
620: {
622:   HYPRE_Int      P_owns_col_starts,R_owns_row_starts;

625:   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP);
626:   R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR);
627:   PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP));
628:   PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP));
629:   /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */
630:   hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0);
631:   hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0);
632:   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1);
633:   if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1);
634:   return(0);
635: }

637: static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C)
638: {
639:   Mat                B;
640:   hypre_ParCSRMatrix *hA,*hP,*hPtAP;
641:   PetscErrorCode     ierr;

644:   MatAIJGetParCSR_Private(A,&hA);
645:   MatAIJGetParCSR_Private(P,&hP);
646:   MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);
647:   MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);
648:   MatHeaderMerge(C,&B);
649:   MatAIJRestoreParCSR_Private(A,&hA);
650:   MatAIJRestoreParCSR_Private(P,&hP);
651:   return(0);
652: }

654: PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat *C)
655: {

659:   MatCreate(PetscObjectComm((PetscObject)A),C);
660:   MatSetType(*C,MATAIJ);
661:   (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_AIJ_wHYPRE;
662:   return(0);
663: }

665: static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C)
666: {
667:   Mat                B;
668:   Mat_HYPRE          *hP;
669:   hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr;
670:   HYPRE_Int          type;
671:   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
672:   PetscBool          ishypre;
673:   PetscErrorCode     ierr;

676:   PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);
677:   if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
678:   hP = (Mat_HYPRE*)P->data;
679:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
680:   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
681:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));

683:   MatAIJGetParCSR_Private(A,&hA);
684:   MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);
685:   MatAIJRestoreParCSR_Private(A,&hA);

687:   /* create temporary matrix and merge to C */
688:   MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);
689:   MatHeaderMerge(C,&B);
690:   return(0);
691: }

693: static PetscErrorCode MatPtAP_AIJ_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
694: {

698:   if (scall == MAT_INITIAL_MATRIX) {
699:     const char *deft = MATAIJ;
700:     char       type[256];
701:     PetscBool  flg;

703:     PetscObjectOptionsBegin((PetscObject)A);
704:     PetscOptionsFList("-matptap_hypre_outtype","Matrix type",NULL,MatList,deft,type,256,&flg);
705:     PetscOptionsEnd();
706:     PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);
707:     MatCreate(PetscObjectComm((PetscObject)A),C);
708:     if (flg) {
709:       MatSetType(*C,type);
710:     } else {
711:       MatSetType(*C,deft);
712:     }
713:     (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_HYPRE;
714:     PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);
715:   }
716:   PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);
717:   (*(*C)->ops->ptapnumeric)(A,P,*C);
718:   PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);
719:   return(0);
720: }

722: static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C)
723: {
724:   Mat                B;
725:   hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr;
726:   Mat_HYPRE          *hA,*hP;
727:   PetscBool          ishypre;
728:   HYPRE_Int          type;
729:   PetscErrorCode     ierr;

732:   PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);
733:   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
734:   PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);
735:   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
736:   hA = (Mat_HYPRE*)A->data;
737:   hP = (Mat_HYPRE*)P->data;
738:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
739:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
740:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
741:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
742:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
743:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
744:   MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);
745:   MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);
746:   MatHeaderMerge(C,&B);
747:   return(0);
748: }

750: static PetscErrorCode MatPtAP_HYPRE_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
751: {

755:   if (scall == MAT_INITIAL_MATRIX) {
756:     PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);
757:     MatCreate(PetscObjectComm((PetscObject)A),C);
758:     MatSetType(*C,MATHYPRE);
759:     (*C)->ops->ptapnumeric = MatPtAPNumeric_HYPRE_HYPRE;
760:     PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);
761:   }
762:   PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);
763:   (*(*C)->ops->ptapnumeric)(A,P,*C);
764:   PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);
765:   return(0);
766: }

768: /* calls hypre_ParMatmul
769:    hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA
770:    hypre_ParMatrixCreate does not duplicate the communicator
771:    It looks like we don't need to have the diagonal entries
772:    ordered first in the rows of the diagonal part
773:    for boomerAMGBuildCoarseOperator to work */
774: static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB)
775: {
777:   PetscStackPush("hypre_ParMatmul");
778:   *hAB = hypre_ParMatmul(hA,hB);
779:   PetscStackPop;
780:   return(0);
781: }

783: static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C)
784: {
785:   Mat                D;
786:   hypre_ParCSRMatrix *hA,*hB,*hAB = NULL;
787:   PetscErrorCode     ierr;

790:   MatAIJGetParCSR_Private(A,&hA);
791:   MatAIJGetParCSR_Private(B,&hB);
792:   MatHYPRE_ParCSR_MatMatMult(hA,hB,&hAB);
793:   MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);
794:   MatHeaderMerge(C,&D);
795:   MatAIJRestoreParCSR_Private(A,&hA);
796:   MatAIJRestoreParCSR_Private(B,&hB);
797:   return(0);
798: }

800: PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat *C)
801: {

805:   MatCreate(PetscObjectComm((PetscObject)A),C);
806:   MatSetType(*C,MATAIJ);
807:   (*C)->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE;
808:   return(0);
809: }

811: static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A,Mat B,Mat C)
812: {
813:   Mat                D;
814:   hypre_ParCSRMatrix *Aparcsr,*Bparcsr,*ABparcsr = NULL;
815:   Mat_HYPRE          *hA,*hB;
816:   PetscBool          ishypre;
817:   HYPRE_Int          type;
818:   PetscErrorCode     ierr;

821:   PetscObjectTypeCompare((PetscObject)B,MATHYPRE,&ishypre);
822:   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_USER,"B should be of type %s",MATHYPRE);
823:   PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);
824:   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
825:   hA = (Mat_HYPRE*)A->data;
826:   hB = (Mat_HYPRE*)B->data;
827:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
828:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
829:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hB->ij,&type));
830:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
831:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
832:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hB->ij,(void**)&Bparcsr));
833:   MatHYPRE_ParCSR_MatMatMult(Aparcsr,Bparcsr,&ABparcsr);
834:   MatCreateFromParCSR(ABparcsr,MATHYPRE,PETSC_OWN_POINTER,&D);
835:   /* need to use HeaderReplace because HeaderMerge messes up with the communicator */
836:   MatHeaderReplace(C,&D);
837:   C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
838:   return(0);
839: }

841: static PetscErrorCode MatMatMult_HYPRE_HYPRE(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
842: {

846:   if (scall == MAT_INITIAL_MATRIX) {
847:     PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);
848:     MatCreate(PetscObjectComm((PetscObject)A),C);
849:     MatSetType(*C,MATHYPRE);
850:     (*C)->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
851:     PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);
852:   }
853:   PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);
854:   (*(*C)->ops->matmultnumeric)(A,B,*C);
855:   PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);
856:   return(0);
857: }

859: PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D)
860: {
861:   Mat                E;
862:   hypre_ParCSRMatrix *hA,*hB,*hC,*hABC;
863:   PetscErrorCode     ierr;

866:   MatAIJGetParCSR_Private(A,&hA);
867:   MatAIJGetParCSR_Private(B,&hB);
868:   MatAIJGetParCSR_Private(C,&hC);
869:   MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC);
870:   MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E);
871:   MatHeaderMerge(D,&E);
872:   MatAIJRestoreParCSR_Private(A,&hA);
873:   MatAIJRestoreParCSR_Private(B,&hB);
874:   MatAIJRestoreParCSR_Private(C,&hC);
875:   return(0);
876: }

878: PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat *D)
879: {

883:   MatCreate(PetscObjectComm((PetscObject)A),D);
884:   MatSetType(*D,MATAIJ);
885:   return(0);
886: }

888: static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
889: {

893:   MatHYPRE_MultKernel_Private(A,x,y,PETSC_TRUE);
894:   return(0);
895: }

897: static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
898: {

902:   MatHYPRE_MultKernel_Private(A,x,y,PETSC_FALSE);
903:   return(0);
904: }

906: static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, Vec x, Vec y, PetscBool trans)
907: {
908:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
909:   hypre_ParCSRMatrix *parcsr;
910:   hypre_ParVector    *hx,*hy;
911:   PetscScalar        *ax,*ay,*sax,*say;
912:   PetscErrorCode     ierr;

915:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
916:   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx));
917:   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy));
918:   VecGetArrayRead(x,(const PetscScalar**)&ax);
919:   VecGetArray(y,&ay);
920:   if (trans) {
921:     VecHYPRE_ParVectorReplacePointer(hA->x,ay,say);
922:     VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax);
923:     hypre_ParCSRMatrixMatvecT(1.,parcsr,hy,0.,hx);
924:     VecHYPRE_ParVectorReplacePointer(hA->x,say,ay);
925:     VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax);
926:   } else {
927:     VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax);
928:     VecHYPRE_ParVectorReplacePointer(hA->b,ay,say);
929:     hypre_ParCSRMatrixMatvec(1.,parcsr,hx,0.,hy);
930:     VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax);
931:     VecHYPRE_ParVectorReplacePointer(hA->b,say,ay);
932:   }
933:   VecRestoreArrayRead(x,(const PetscScalar**)&ax);
934:   VecRestoreArray(y,&ay);
935:   return(0);
936: }

938: static PetscErrorCode MatDestroy_HYPRE(Mat A)
939: {
940:   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;

944:   if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x));
945:   if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b));
946:   if (hA->ij) {
947:     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
948:     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij));
949:   }
950:   if (hA->comm) { MPI_Comm_free(&hA->comm); }

952:   MatStashDestroy_Private(&A->stash);

954:   PetscFree(hA->array);

956:   PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);
957:   PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);
958:   PetscObjectComposeFunction((PetscObject)A,"MatPtAP_seqaij_hypre_C",NULL);
959:   PetscObjectComposeFunction((PetscObject)A,"MatPtAP_mpiaij_hypre_C",NULL);
960:   PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);
961:   PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);
962:   PetscFree(A->data);
963:   return(0);
964: }

966: static PetscErrorCode MatSetUp_HYPRE(Mat A)
967: {

971:   MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);
972:   return(0);
973: }


976: static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
977: {
978:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
979:   Vec                x,b;
980:   PetscMPIInt        n;
981:   PetscInt           i,j,rstart,ncols,flg;
982:   PetscInt           *row,*col;
983:   PetscScalar        *val;
984:   PetscErrorCode     ierr;

987:   if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE");

989:   if (!A->nooffprocentries) {
990:     while (1) {
991:       MatStashScatterGetMesg_Private(&A->stash,&n,&row,&col,&val,&flg);
992:       if (!flg) break;

994:       for (i=0; i<n; ) {
995:         /* Now identify the consecutive vals belonging to the same row */
996:         for (j=i,rstart=row[j]; j<n; j++) {
997:           if (row[j] != rstart) break;
998:         }
999:         if (j < n) ncols = j-i;
1000:         else       ncols = n-i;
1001:         /* Now assemble all these values with a single function call */
1002:         MatSetValues_HYPRE(A,1,row+i,ncols,col+i,val+i,A->insertmode);

1004:         i = j;
1005:       }
1006:     }
1007:     MatStashScatterEnd_Private(&A->stash);
1008:   }

1010:   PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij));
1011:   if (hA->x) return(0);
1012:   PetscLayoutSetUp(A->rmap);
1013:   PetscLayoutSetUp(A->cmap);
1014:   VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);
1015:   VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);
1016:   VecHYPRE_IJVectorCreate(x,&hA->x);
1017:   VecHYPRE_IJVectorCreate(b,&hA->b);
1018:   VecDestroy(&x);
1019:   VecDestroy(&b);
1020:   return(0);
1021: }

1023: static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array)
1024: {
1025:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1026:   PetscErrorCode     ierr;

1029:   if (!hA->available) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Temporary space is in use");

1031:   if (hA->size >= size) *array = hA->array;
1032:   else {
1033:     PetscFree(hA->array);
1034:     hA->size = size;
1035:     PetscMalloc(hA->size,&hA->array);
1036:     *array = hA->array;
1037:   }

1039:   hA->available = PETSC_FALSE;
1040:   return(0);
1041: }

1043: static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array)
1044: {
1045:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;

1048:   *array = NULL;
1049:   hA->available = PETSC_TRUE;
1050:   return(0);
1051: }


1054: PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins)
1055: {
1056:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1057:   PetscScalar        *vals = (PetscScalar *)v;
1058:   PetscScalar        *sscr;
1059:   PetscInt           *cscr[2];
1060:   PetscInt           i,nzc;
1061:   void               *array = NULL;
1062:   PetscErrorCode     ierr;

1065:   MatGetArray_HYPRE(A,sizeof(PetscInt)*(2*nc)+sizeof(PetscScalar)*nc*nr,&array);
1066:   cscr[0] = (PetscInt*)array;
1067:   cscr[1] = ((PetscInt*)array)+nc;
1068:   sscr = (PetscScalar*)(((PetscInt*)array)+nc*2);
1069:   for (i=0,nzc=0;i<nc;i++) {
1070:     if (cols[i] >= 0) {
1071:       cscr[0][nzc  ] = cols[i];
1072:       cscr[1][nzc++] = i;
1073:     }
1074:   }
1075:   if (!nzc) {
1076:     MatRestoreArray_HYPRE(A,&array);
1077:     return(0);
1078:   }

1080:   if (ins == ADD_VALUES) {
1081:     for (i=0;i<nr;i++) {
1082:       if (rows[i] >= 0 && nzc) {
1083:         PetscInt j;
1084:         for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]];
1085:         PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,(HYPRE_Int*)&nzc,(HYPRE_Int*)(rows+i),(HYPRE_Int*)cscr[0],sscr));
1086:       }
1087:       vals += nc;
1088:     }
1089:   } else { /* INSERT_VALUES */

1091:     PetscInt rst,ren;
1092:     MatGetOwnershipRange(A,&rst,&ren);

1094:     for (i=0;i<nr;i++) {
1095:       if (rows[i] >= 0 && nzc) {
1096:         PetscInt j;
1097:         for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]];
1098:         /* nonlocal values */
1099:         if (rows[i] < rst || rows[i] >= ren) { MatStashValuesRow_Private(&A->stash,rows[i],nzc,cscr[0],sscr, PETSC_FALSE); }
1100:         /* local values */
1101:         else PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,(HYPRE_Int*)&nzc,(HYPRE_Int*)(rows+i),(HYPRE_Int*)cscr[0],sscr));
1102:       }
1103:       vals += nc;
1104:     }
1105:   }

1107:   MatRestoreArray_HYPRE(A,&array);
1108:   return(0);
1109: }

1111: static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1112: {
1113:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1114:   HYPRE_Int          *hdnnz,*honnz;
1115:   PetscInt           i,rs,re,cs,ce,bs;
1116:   PetscMPIInt        size;
1117:   PetscErrorCode     ierr;

1120:   MatGetBlockSize(A,&bs);
1121:   PetscLayoutSetUp(A->rmap);
1122:   PetscLayoutSetUp(A->cmap);
1123:   rs   = A->rmap->rstart;
1124:   re   = A->rmap->rend;
1125:   cs   = A->cmap->rstart;
1126:   ce   = A->cmap->rend;
1127:   if (!hA->ij) {
1128:     PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij));
1129:     PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1130:   } else {
1131:     HYPRE_Int hrs,hre,hcs,hce;
1132:     PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce));
1133:     if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%d)",hrs,hre+1,rs,re);
1134:     if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%d)",hcs,hce+1,cs,ce);
1135:   }
1136:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));

1138:   if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs;
1139:   if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs;

1141:   if (!dnnz) {
1142:     PetscMalloc1(A->rmap->n,&hdnnz);
1143:     for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz;
1144:   } else {
1145:     hdnnz = (HYPRE_Int*)dnnz;
1146:   }
1147:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
1148:   if (size > 1) {
1149:     if (!onnz) {
1150:       PetscMalloc1(A->rmap->n,&honnz);
1151:       for (i=0;i<A->rmap->n;i++) honnz[i] = onz;
1152:     } else {
1153:       honnz = (HYPRE_Int*)onnz;
1154:     }
1155:     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz));
1156:   } else {
1157:     honnz = NULL;
1158:     PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz));
1159:   }
1160:   if (!dnnz) {
1161:     PetscFree(hdnnz);
1162:   }
1163:   if (!onnz && honnz) {
1164:     PetscFree(honnz);
1165:   }
1166:   A->preallocated = PETSC_TRUE;

1168:   /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0 */
1169:   {
1170:     hypre_AuxParCSRMatrix *aux_matrix;
1171:     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1172:     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
1173:   }
1174:   return(0);
1175: }

1177: /*@C
1178:    MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format

1180:    Collective on Mat

1182:    Input Parameters:
1183: +  A - the matrix
1184: .  dnz  - number of nonzeros per row in DIAGONAL portion of local submatrix
1185:           (same value is used for all local rows)
1186: .  dnnz - array containing the number of nonzeros in the various rows of the
1187:           DIAGONAL portion of the local submatrix (possibly different for each row)
1188:           or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure.
1189:           The size of this array is equal to the number of local rows, i.e 'm'.
1190:           For matrices that will be factored, you must leave room for (and set)
1191:           the diagonal entry even if it is zero.
1192: .  onz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
1193:           submatrix (same value is used for all local rows).
1194: -  onnz - array containing the number of nonzeros in the various rows of the
1195:           OFF-DIAGONAL portion of the local submatrix (possibly different for
1196:           each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero
1197:           structure. The size of this array is equal to the number
1198:           of local rows, i.e 'm'.

1200:    Notes: If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored.

1202:    Level: intermediate

1204: .keywords: matrix, aij, compressed row, sparse, parallel

1206: .seealso: MatCreate(), MatMPIAIJSetPreallocation, MATHYPRE
1207: @*/
1208: PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1209: {

1215:   PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));
1216:   return(0);
1217: }

1219: /*
1220:    MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix

1222:    Collective

1224:    Input Parameters:
1225: +  vparcsr  - the pointer to the hypre_ParCSRMatrix
1226: .  mtype    - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported.
1227: -  copymode - PETSc copying options

1229:    Output Parameter:
1230: .  A  - the matrix

1232:    Level: intermediate

1234: .seealso: MatHYPRE, PetscCopyMode
1235: */
1236: PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *vparcsr, MatType mtype, PetscCopyMode copymode, Mat* A)
1237: {
1238:   Mat                   T;
1239:   Mat_HYPRE             *hA;
1240:   hypre_ParCSRMatrix    *parcsr;
1241:   MPI_Comm              comm;
1242:   PetscInt              rstart,rend,cstart,cend,M,N;
1243:   PetscBool             isseqaij,ismpiaij,isaij,ishyp,isis;
1244:   PetscErrorCode        ierr;

1247:   parcsr = (hypre_ParCSRMatrix *)vparcsr;
1248:   comm   = hypre_ParCSRMatrixComm(parcsr);
1249:   PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);
1250:   PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);
1251:   PetscStrcmp(mtype,MATAIJ,&isaij);
1252:   PetscStrcmp(mtype,MATHYPRE,&ishyp);
1253:   PetscStrcmp(mtype,MATIS,&isis);
1254:   isaij  = (PetscBool)(isseqaij || ismpiaij || isaij);
1255:   if (!isaij && !ishyp && !isis) SETERRQ6(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATMPIAIJ,MATIS,MATHYPRE);
1256:   if (ishyp && copymode == PETSC_COPY_VALUES) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported copymode PETSC_COPY_VALUES");

1258:   /* access ParCSRMatrix */
1259:   rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
1260:   rend   = hypre_ParCSRMatrixLastRowIndex(parcsr);
1261:   cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
1262:   cend   = hypre_ParCSRMatrixLastColDiag(parcsr);
1263:   M      = hypre_ParCSRMatrixGlobalNumRows(parcsr);
1264:   N      = hypre_ParCSRMatrixGlobalNumCols(parcsr);

1266:   /* fix for empty local rows/columns */
1267:   if (rend < rstart) rend = rstart;
1268:   if (cend < cstart) cend = cstart;

1270:   /* create PETSc matrix with MatHYPRE */
1271:   MatCreate(comm,&T);
1272:   MatSetSizes(T,rend-rstart+1,cend-cstart+1,M,N);
1273:   MatSetType(T,MATHYPRE);
1274:   hA   = (Mat_HYPRE*)(T->data);

1276:   /* create HYPRE_IJMatrix */
1277:   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));

1279:   /* set ParCSR object */
1280:   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1281:   hypre_IJMatrixObject(hA->ij) = parcsr;
1282:   T->preallocated = PETSC_TRUE;

1284:   /* set assembled flag */
1285:   hypre_IJMatrixAssembleFlag(hA->ij) = 1;
1286:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1287:   if (ishyp) {
1288:     PetscMPIInt myid = 0;

1290:     /* make sure we always have row_starts and col_starts available */
1291:     if (HYPRE_AssumedPartitionCheck()) {
1292:       MPI_Comm_rank(comm,&myid);
1293:     }
1294:     if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) {
1295:       PetscLayout map;

1297:       MatGetLayouts(T,NULL,&map);
1298:       PetscLayoutSetUp(map);
1299:       hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_Int*)(map->range + myid);
1300:     }
1301:     if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) {
1302:       PetscLayout map;

1304:       MatGetLayouts(T,&map,NULL);
1305:       PetscLayoutSetUp(map);
1306:       hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_Int*)(map->range + myid);
1307:     }
1308:     /* prevent from freeing the pointer */
1309:     if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
1310:     *A   = T;
1311:     MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);
1312:     MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);
1313:   } else if (isaij) {
1314:     if (copymode != PETSC_OWN_POINTER) {
1315:       /* prevent from freeing the pointer */
1316:       hA->inner_free = PETSC_FALSE;
1317:       MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);
1318:       MatDestroy(&T);
1319:     } else { /* AIJ return type with PETSC_OWN_POINTER */
1320:       MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);
1321:       *A   = T;
1322:     }
1323:   } else if (isis) {
1324:     MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);
1325:     if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE;
1326:     MatDestroy(&T);
1327:   }
1328:   return(0);
1329: }

1331: static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr)
1332: {
1333:   Mat_HYPRE*            hA = (Mat_HYPRE*)A->data;
1334:   HYPRE_Int             type;
1335:   PetscErrorCode        ierr;

1338:   if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present");
1339:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
1340:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR");
1341:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr));
1342:   return(0);
1343: }

1345: /*
1346:    MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix

1348:    Not collective

1350:    Input Parameters:
1351: +  A  - the MATHYPRE object

1353:    Output Parameter:
1354: .  parcsr  - the pointer to the hypre_ParCSRMatrix

1356:    Level: intermediate

1358: .seealso: MatHYPRE, PetscCopyMode
1359: */
1360: PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr)
1361: {

1367:   PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));
1368:   return(0);
1369: }

1371: static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd)
1372: {
1373:   hypre_ParCSRMatrix *parcsr;
1374:   hypre_CSRMatrix    *ha;
1375:   PetscInt           rst;
1376:   PetscErrorCode     ierr;

1379:   if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented with non-square diagonal blocks");
1380:   MatGetOwnershipRange(A,&rst,NULL);
1381:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1382:   if (missing) *missing = PETSC_FALSE;
1383:   if (dd) *dd = -1;
1384:   ha = hypre_ParCSRMatrixDiag(parcsr);
1385:   if (ha) {
1386:     PetscInt  size,i;
1387:     HYPRE_Int *ii,*jj;

1389:     size = hypre_CSRMatrixNumRows(ha);
1390:     ii   = hypre_CSRMatrixI(ha);
1391:     jj   = hypre_CSRMatrixJ(ha);
1392:     for (i = 0; i < size; i++) {
1393:       PetscInt  j;
1394:       PetscBool found = PETSC_FALSE;

1396:       for (j = ii[i]; j < ii[i+1] && !found; j++)
1397:         found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE;

1399:       if (!found) {
1400:         PetscInfo1(A,"Matrix is missing local diagonal entry %D\n",i);
1401:         if (missing) *missing = PETSC_TRUE;
1402:         if (dd) *dd = i+rst;
1403:         return(0);
1404:       }
1405:     }
1406:     if (!size) {
1407:       PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1408:       if (missing) *missing = PETSC_TRUE;
1409:       if (dd) *dd = rst;
1410:     }
1411:   } else {
1412:     PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1413:     if (missing) *missing = PETSC_TRUE;
1414:     if (dd) *dd = rst;
1415:   }
1416:   return(0);
1417: }

1419: static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s)
1420: {
1421:   hypre_ParCSRMatrix *parcsr;
1422:   hypre_CSRMatrix    *ha;
1423:   PetscErrorCode     ierr;

1426:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1427:   /* diagonal part */
1428:   ha = hypre_ParCSRMatrixDiag(parcsr);
1429:   if (ha) {
1430:     PetscInt    size,i;
1431:     HYPRE_Int   *ii;
1432:     PetscScalar *a;

1434:     size = hypre_CSRMatrixNumRows(ha);
1435:     a    = hypre_CSRMatrixData(ha);
1436:     ii   = hypre_CSRMatrixI(ha);
1437:     for (i = 0; i < ii[size]; i++) a[i] *= s;
1438:   }
1439:   /* offdiagonal part */
1440:   ha = hypre_ParCSRMatrixOffd(parcsr);
1441:   if (ha) {
1442:     PetscInt    size,i;
1443:     HYPRE_Int   *ii;
1444:     PetscScalar *a;

1446:     size = hypre_CSRMatrixNumRows(ha);
1447:     a    = hypre_CSRMatrixData(ha);
1448:     ii   = hypre_CSRMatrixI(ha);
1449:     for (i = 0; i < ii[size]; i++) a[i] *= s;
1450:   }
1451:   return(0);
1452: }

1454: static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1455: {
1456:   hypre_ParCSRMatrix *parcsr;
1457:   HYPRE_Int          *lrows;
1458:   PetscInt           rst,ren,i;
1459:   PetscErrorCode     ierr;

1462:   if (x || b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"To be implemented");
1463:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1464:   PetscMalloc1(numRows,&lrows);
1465:   MatGetOwnershipRange(A,&rst,&ren);
1466:   for (i=0;i<numRows;i++) {
1467:     if (rows[i] < rst || rows[i] >= ren)
1468:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Non-local rows not yet supported");
1469:     lrows[i] = rows[i] - rst;
1470:   }
1471:   PetscStackCallStandard(hypre_ParCSRMatrixEliminateRowsCols,(parcsr,numRows,lrows));
1472:   PetscFree(lrows);
1473:   return(0);
1474: }

1476: static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha)
1477: {
1478:   PetscErrorCode      ierr;

1481:   if (ha) {
1482:     HYPRE_Int     *ii, size;
1483:     HYPRE_Complex *a;

1485:     size = hypre_CSRMatrixNumRows(ha);
1486:     a    = hypre_CSRMatrixData(ha);
1487:     ii   = hypre_CSRMatrixI(ha);

1489:     if (a) { PetscMemzero(a,(ii[size])*sizeof(HYPRE_Complex)); }
1490:   }
1491:   return(0);
1492: }

1494: PetscErrorCode MatZeroEntries_HYPRE(Mat A)
1495: {
1496:   hypre_ParCSRMatrix  *parcsr;
1497:   PetscErrorCode      ierr;

1500:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1501:   /* diagonal part */
1502:   MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr));
1503:   /* off-diagonal part */
1504:   MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr));
1505:   return(0);
1506: }


1509: static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA,PetscInt N,const PetscInt rows[],PetscScalar diag)
1510: {
1511:   PetscInt        ii, jj, ibeg, iend, irow;
1512:   PetscInt        *i, *j;
1513:   PetscScalar     *a;


1517:   if (!hA) return(0);

1519:   i = (PetscInt*) hypre_CSRMatrixI(hA);
1520:   j = (PetscInt*) hypre_CSRMatrixJ(hA);
1521:   a = hypre_CSRMatrixData(hA);

1523:   for (ii = 0; ii < N; ii++) {
1524:     irow = rows[ii];
1525:     ibeg = i[irow];
1526:     iend = i[irow+1];
1527:     for (jj = ibeg; jj < iend; jj++)
1528:       if (j[jj] == irow) a[jj] = diag;
1529:       else a[jj] = 0.0;
1530:    }

1532:    return(0);
1533: }

1535: PetscErrorCode MatZeroRows_HYPRE(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1536: {
1537:   hypre_ParCSRMatrix  *parcsr;
1538:   PetscInt            *lrows,len;
1539:   PetscErrorCode      ierr;

1542:   if (x || b) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size \n");
1543:   /* retrieve the internal matrix */
1544:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1545:   /* get locally owned rows */
1546:   MatZeroRowsMapLocal_Private(A,N,rows,&len,&lrows);
1547:   /* zero diagonal part */
1548:   MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr),len,lrows,diag);
1549:   /* zero off-diagonal part */
1550:   MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr),len,lrows,0.0);

1552:   PetscFree(lrows);
1553:   return(0);
1554: }


1557: PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat,MatAssemblyType mode)
1558: {

1562:   if (mat->nooffprocentries) return(0);

1564:   MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);
1565:   return(0);
1566: }

1568: PetscErrorCode MatGetRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1569: {
1570:   hypre_ParCSRMatrix  *parcsr;
1571:   PetscErrorCode      ierr;

1574:   /* retrieve the internal matrix */
1575:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1576:   /* call HYPRE API */
1577:   PetscStackCallStandard(HYPRE_ParCSRMatrixGetRow,(parcsr,row,(HYPRE_Int*)nz,(HYPRE_Int**)idx,v));
1578:   return(0);
1579: }

1581: PetscErrorCode MatRestoreRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1582: {
1583:   hypre_ParCSRMatrix  *parcsr;
1584:   PetscErrorCode      ierr;

1587:   /* retrieve the internal matrix */
1588:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1589:   /* call HYPRE API */
1590:   PetscStackCallStandard(HYPRE_ParCSRMatrixRestoreRow,(parcsr,row,(HYPRE_Int*)nz,(HYPRE_Int**)idx,v));
1591:   return(0);
1592: }



1596: PetscErrorCode MatGetValues_HYPRE(Mat A,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
1597: {
1598:   HYPRE_IJMatrix     *hIJ = (HYPRE_IJMatrix*)A->data;
1599:   PetscErrorCode      ierr;
1600:   PetscInt            i;
1602:   if (!m || !n) return(0);

1604:   /* Ignore negative row indices
1605:    * And negative column indices should be automatically ignored in hypre
1606:    * */
1607:   for (i=0; i<m; i++)
1608:     if (idxm[i] >= 0) PetscStackCallStandard(HYPRE_IJMatrixGetValues,(*hIJ,1,(HYPRE_Int*)&n,(HYPRE_Int*)&idxm[i],(HYPRE_Int*)idxn,&v[i*n]));

1610:   return(0);
1611: }


1614: /*MC
1615:    MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices
1616:           based on the hypre IJ interface.

1618:    Level: intermediate

1620: .seealso: MatCreate()
1621: M*/

1623: PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
1624: {
1625:   Mat_HYPRE      *hB;

1629:   PetscNewLog(B,&hB);
1630:   hB->inner_free = PETSC_TRUE;
1631:   hB->available  = PETSC_TRUE;
1632:   hB->size       = 0;
1633:   hB->array      = NULL;

1635:   B->data       = (void*)hB;
1636:   B->rmap->bs   = 1;
1637:   B->assembled  = PETSC_FALSE;

1639:   PetscMemzero(B->ops,sizeof(struct _MatOps));
1640:   B->ops->mult            = MatMult_HYPRE;
1641:   B->ops->multtranspose   = MatMultTranspose_HYPRE;
1642:   B->ops->setup           = MatSetUp_HYPRE;
1643:   B->ops->destroy         = MatDestroy_HYPRE;

1645:   /* build cache for off array entries formed */
1646:   MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);
1647:   B->ops->assemblyend     = MatAssemblyEnd_HYPRE;
1648:   B->ops->assemblybegin   = MatAssemblyBegin_HYPRE;

1650:   B->ops->ptap            = MatPtAP_HYPRE_HYPRE;
1651:   B->ops->matmult         = MatMatMult_HYPRE_HYPRE;
1652:   B->ops->setvalues       = MatSetValues_HYPRE;
1653:   B->ops->missingdiagonal = MatMissingDiagonal_HYPRE;
1654:   B->ops->scale           = MatScale_HYPRE;
1655:   B->ops->zerorowscolumns = MatZeroRowsColumns_HYPRE;
1656:   B->ops->zeroentries     = MatZeroEntries_HYPRE;
1657:   B->ops->zerorows        = MatZeroRows_HYPRE;
1658:   B->ops->getrow          = MatGetRow_HYPRE;
1659:   B->ops->restorerow      = MatRestoreRow_HYPRE;
1660:   B->ops->getvalues       = MatGetValues_HYPRE;

1662:   MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);
1663:   PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);
1664:   PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);
1665:   PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);
1666:   PetscObjectComposeFunction((PetscObject)B,"MatPtAP_seqaij_hypre_C",MatPtAP_AIJ_HYPRE);
1667:   PetscObjectComposeFunction((PetscObject)B,"MatPtAP_mpiaij_hypre_C",MatPtAP_AIJ_HYPRE);
1668:   PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);
1669:   PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);
1670:   return(0);
1671: }

1673: static PetscErrorCode hypre_array_destroy(void *ptr)
1674: {
1676:    hypre_TFree(ptr,HYPRE_MEMORY_HOST);
1677:    return(0);
1678: }