Actual source code: mhypre.c

petsc-3.8.4 2018-03-24
Report Typos and Errors

  2: /*
  3:     Creates hypre ijmatrix from PETSc matrix
  4: */

  6:  #include <petscmathypre.h>
  7:  #include <petsc/private/matimpl.h>
  8:  #include <../src/mat/impls/hypre/mhypre.h>
  9:  #include <../src/mat/impls/aij/mpi/mpiaij.h>
 10:  #include <../src/vec/vec/impls/hypre/vhyp.h>
 11: #include <HYPRE.h>
 12: #include <HYPRE_utilities.h>
 13: #include <_hypre_parcsr_ls.h>
 14: #include <_hypre_sstruct_ls.h>

 16: static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*);
 17: static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix);
 18: static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix);
 19: static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix);
 20: static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,Vec,Vec,PetscBool);
 21: static PetscErrorCode hypre_array_destroy(void*);

 23: static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
 24: {
 26:   PetscInt       i,n_d,n_o;
 27:   const PetscInt *ia_d,*ia_o;
 28:   PetscBool      done_d=PETSC_FALSE,done_o=PETSC_FALSE;
 29:   PetscInt       *nnz_d=NULL,*nnz_o=NULL;

 32:   if (A_d) { /* determine number of nonzero entries in local diagonal part */
 33:     MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);
 34:     if (done_d) {
 35:       PetscMalloc1(n_d,&nnz_d);
 36:       for (i=0; i<n_d; i++) {
 37:         nnz_d[i] = ia_d[i+1] - ia_d[i];
 38:       }
 39:     }
 40:     MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);
 41:   }
 42:   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
 43:     MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);
 44:     if (done_o) {
 45:       PetscMalloc1(n_o,&nnz_o);
 46:       for (i=0; i<n_o; i++) {
 47:         nnz_o[i] = ia_o[i+1] - ia_o[i];
 48:       }
 49:     }
 50:     MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);
 51:   }
 52:   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
 53:     if (!done_o) { /* only diagonal part */
 54:       PetscMalloc1(n_d,&nnz_o);
 55:       for (i=0; i<n_d; i++) {
 56:         nnz_o[i] = 0;
 57:       }
 58:     }
 59:     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,(HYPRE_Int *)nnz_d,(HYPRE_Int *)nnz_o));
 60:     PetscFree(nnz_d);
 61:     PetscFree(nnz_o);
 62:   }
 63:   return(0);
 64: }

 66: static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
 67: {
 69:   PetscInt       rstart,rend,cstart,cend;

 72:   PetscLayoutSetUp(A->rmap);
 73:   PetscLayoutSetUp(A->cmap);
 74:   rstart = A->rmap->rstart;
 75:   rend   = A->rmap->rend;
 76:   cstart = A->cmap->rstart;
 77:   cend   = A->cmap->rend;
 78:   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
 79:   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
 80:   {
 81:     PetscBool      same;
 82:     Mat            A_d,A_o;
 83:     const PetscInt *colmap;
 84:     PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&same);
 85:     if (same) {
 86:       MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);
 87:       MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);
 88:       return(0);
 89:     }
 90:     PetscObjectTypeCompare((PetscObject)A,MATMPIBAIJ,&same);
 91:     if (same) {
 92:       MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);
 93:       MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);
 94:       return(0);
 95:     }
 96:     PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&same);
 97:     if (same) {
 98:       MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);
 99:       return(0);
100:     }
101:     PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&same);
102:     if (same) {
103:       MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);
104:       return(0);
105:     }
106:   }
107:   return(0);
108: }

110: static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij)
111: {
112:   PetscErrorCode    ierr;
113:   PetscInt          i,rstart,rend,ncols,nr,nc;
114:   const PetscScalar *values;
115:   const PetscInt    *cols;
116:   PetscBool         flg;

119:   PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);
120:   MatGetSize(A,&nr,&nc);
121:   if (flg && nr == nc) {
122:     MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);
123:     return(0);
124:   }
125:   PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);
126:   if (flg) {
127:     MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);
128:     return(0);
129:   }

131:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
132:   MatGetOwnershipRange(A,&rstart,&rend);
133:   for (i=rstart; i<rend; i++) {
134:     MatGetRow(A,i,&ncols,&cols,&values);
135:     if (ncols) {
136:       PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,(HYPRE_Int *)&ncols,(HYPRE_Int *)&i,(HYPRE_Int *)cols,values));
137:     }
138:     MatRestoreRow(A,i,&ncols,&cols,&values);
139:   }
140:   return(0);
141: }

143: static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
144: {
145:   PetscErrorCode        ierr;
146:   Mat_SeqAIJ            *pdiag = (Mat_SeqAIJ*)A->data;
147:   HYPRE_Int             type;
148:   hypre_ParCSRMatrix    *par_matrix;
149:   hypre_AuxParCSRMatrix *aux_matrix;
150:   hypre_CSRMatrix       *hdiag;

153:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
154:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
155:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
156:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
157:   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
158:   /*
159:        this is the Hack part where we monkey directly with the hypre datastructures
160:   */
161:   PetscMemcpy(hdiag->i,pdiag->i,(A->rmap->n + 1)*sizeof(PetscInt));
162:   PetscMemcpy(hdiag->j,pdiag->j,pdiag->nz*sizeof(PetscInt));
163:   PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));

165:   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
166:   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
167:   return(0);
168: }

170: static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
171: {
172:   PetscErrorCode        ierr;
173:   Mat_MPIAIJ            *pA = (Mat_MPIAIJ*)A->data;
174:   Mat_SeqAIJ            *pdiag,*poffd;
175:   PetscInt              i,*garray = pA->garray,*jj,cstart,*pjj;
176:   HYPRE_Int             type;
177:   hypre_ParCSRMatrix    *par_matrix;
178:   hypre_AuxParCSRMatrix *aux_matrix;
179:   hypre_CSRMatrix       *hdiag,*hoffd;

182:   pdiag = (Mat_SeqAIJ*) pA->A->data;
183:   poffd = (Mat_SeqAIJ*) pA->B->data;
184:   /* cstart is only valid for square MPIAIJ layed out in the usual way */
185:   MatGetOwnershipRange(A,&cstart,NULL);

187:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
188:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
189:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
190:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
191:   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
192:   hoffd = hypre_ParCSRMatrixOffd(par_matrix);

194:   /*
195:        this is the Hack part where we monkey directly with the hypre datastructures
196:   */
197:   PetscMemcpy(hdiag->i,pdiag->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
198:   /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */
199:   jj  = (PetscInt*)hdiag->j;
200:   pjj = (PetscInt*)pdiag->j;
201:   for (i=0; i<pdiag->nz; i++) jj[i] = cstart + pjj[i];
202:   PetscMemcpy(hdiag->data,pdiag->a,pdiag->nz*sizeof(PetscScalar));
203:   PetscMemcpy(hoffd->i,poffd->i,(pA->A->rmap->n + 1)*sizeof(PetscInt));
204:   /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this
205:      If we hacked a hypre a bit more we might be able to avoid this step */
206:   jj  = (PetscInt*) hoffd->j;
207:   pjj = (PetscInt*) poffd->j;
208:   for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]];
209:   PetscMemcpy(hoffd->data,poffd->a,poffd->nz*sizeof(PetscScalar));

211:   aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
212:   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
213:   return(0);
214: }

216: static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B)
217: {
218:   Mat_HYPRE*             mhA = (Mat_HYPRE*)(A->data);
219:   Mat                    lA;
220:   ISLocalToGlobalMapping rl2g,cl2g;
221:   IS                     is;
222:   hypre_ParCSRMatrix     *hA;
223:   hypre_CSRMatrix        *hdiag,*hoffd;
224:   MPI_Comm               comm;
225:   PetscScalar            *hdd,*hod,*aa,*data;
226:   HYPRE_Int              *col_map_offd,*hdi,*hdj,*hoi,*hoj;
227:   PetscInt               *ii,*jj,*iptr,*jptr;
228:   PetscInt               cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N;
229:   HYPRE_Int              type;
230:   PetscErrorCode         ierr;

233:   comm = PetscObjectComm((PetscObject)A);
234:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type));
235:   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
236:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA));
237:   M     = hypre_ParCSRMatrixGlobalNumRows(hA);
238:   N     = hypre_ParCSRMatrixGlobalNumCols(hA);
239:   str   = hypre_ParCSRMatrixFirstRowIndex(hA);
240:   stc   = hypre_ParCSRMatrixFirstColDiag(hA);
241:   hdiag = hypre_ParCSRMatrixDiag(hA);
242:   hoffd = hypre_ParCSRMatrixOffd(hA);
243:   dr    = hypre_CSRMatrixNumRows(hdiag);
244:   dc    = hypre_CSRMatrixNumCols(hdiag);
245:   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
246:   hdi   = hypre_CSRMatrixI(hdiag);
247:   hdj   = hypre_CSRMatrixJ(hdiag);
248:   hdd   = hypre_CSRMatrixData(hdiag);
249:   oc    = hypre_CSRMatrixNumCols(hoffd);
250:   nnz  += hypre_CSRMatrixNumNonzeros(hoffd);
251:   hoi   = hypre_CSRMatrixI(hoffd);
252:   hoj   = hypre_CSRMatrixJ(hoffd);
253:   hod   = hypre_CSRMatrixData(hoffd);
254:   if (reuse != MAT_REUSE_MATRIX) {
255:     PetscInt *aux;

257:     /* generate l2g maps for rows and cols */
258:     ISCreateStride(comm,dr,str,1,&is);
259:     ISLocalToGlobalMappingCreateIS(is,&rl2g);
260:     ISDestroy(&is);
261:     col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
262:     PetscMalloc1(dc+oc,&aux);
263:     for (i=0; i<dc; i++) aux[i] = i+stc;
264:     for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i];
265:     ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);
266:     ISLocalToGlobalMappingCreateIS(is,&cl2g);
267:     ISDestroy(&is);
268:     /* create MATIS object */
269:     MatCreate(comm,B);
270:     MatSetSizes(*B,dr,dc,M,N);
271:     MatSetType(*B,MATIS);
272:     MatSetLocalToGlobalMapping(*B,rl2g,cl2g);
273:     ISLocalToGlobalMappingDestroy(&rl2g);
274:     ISLocalToGlobalMappingDestroy(&cl2g);

276:     /* allocate CSR for local matrix */
277:     PetscMalloc1(dr+1,&iptr);
278:     PetscMalloc1(nnz,&jptr);
279:     PetscMalloc1(nnz,&data);
280:   } else {
281:     PetscInt  nr;
282:     PetscBool done;
283:     MatISGetLocalMat(*B,&lA);
284:     MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);
285:     if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr);
286:     if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz);
287:     MatSeqAIJGetArray(lA,&data);
288:   }
289:   /* merge local matrices */
290:   ii   = iptr;
291:   jj   = jptr;
292:   aa   = data;
293:   *ii  = *(hdi++) + *(hoi++);
294:   for (jd=0,jo=0,cum=0; *ii<nnz; cum++) {
295:     PetscScalar *aold = aa;
296:     PetscInt    *jold = jj,nc = jd+jo;
297:     for (; jd<*hdi; jd++) { *jj++ = *hdj++;      *aa++ = *hdd++; }
298:     for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
299:     *(++ii) = *(hdi++) + *(hoi++);
300:     PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);
301:   }
302:   for (; cum<dr; cum++) *(++ii) = nnz;
303:   if (reuse != MAT_REUSE_MATRIX) {
304:     Mat_SeqAIJ* a;

306:     MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);
307:     MatISSetLocalMat(*B,lA);
308:     /* hack SeqAIJ */
309:     a          = (Mat_SeqAIJ*)(lA->data);
310:     a->free_a  = PETSC_TRUE;
311:     a->free_ij = PETSC_TRUE;
312:     MatDestroy(&lA);
313:   }
314:   MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);
315:   MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);
316:   if (reuse == MAT_INPLACE_MATRIX) {
317:     MatHeaderReplace(A,B);
318:   }
319:   return(0);
320: }

322: PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
323: {
324:   Mat_HYPRE      *hB;
325:   MPI_Comm       comm = PetscObjectComm((PetscObject)A);

329:   if (reuse == MAT_INPLACE_MATRIX) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported MAT_INPLACE_MATRIX");
330:   if (reuse == MAT_REUSE_MATRIX) {
331:     /* always destroy the old matrix and create a new memory;
332:        hope this does not churn the memory too much. The problem
333:        is I do not know if it is possible to put the matrix back to
334:        its initial state so that we can directly copy the values
335:        the second time through. */
336:     hB = (Mat_HYPRE*)((*B)->data);
337:     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij));
338:   } else {
339:     MatCreate(comm,B);
340:     MatSetType(*B,MATHYPRE);
341:     MatSetSizes(*B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
342:     hB   = (Mat_HYPRE*)((*B)->data);
343:   }
344:   MatHYPRE_CreateFromMat(A,hB);
345:   MatHYPRE_IJMatrixCopy(A,hB->ij);
346:   (*B)->preallocated = PETSC_TRUE;
347:   MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);
348:   MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);
349:   return(0);
350: }

352: static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
353: {
354:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
355:   hypre_ParCSRMatrix *parcsr;
356:   hypre_CSRMatrix    *hdiag,*hoffd;
357:   MPI_Comm           comm;
358:   PetscScalar        *da,*oa,*aptr;
359:   PetscInt           *dii,*djj,*oii,*ojj,*iptr;
360:   PetscInt           i,dnnz,onnz,m,n;
361:   HYPRE_Int          type;
362:   PetscMPIInt        size;
363:   PetscErrorCode     ierr;

366:   comm = PetscObjectComm((PetscObject)A);
367:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
368:   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
369:   if (reuse == MAT_REUSE_MATRIX) {
370:     PetscBool ismpiaij,isseqaij;
371:     PetscObjectTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);
372:     PetscObjectBaseTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);
373:     if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported");
374:   }
375:   MPI_Comm_size(comm,&size);

377:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
378:   hdiag = hypre_ParCSRMatrixDiag(parcsr);
379:   hoffd = hypre_ParCSRMatrixOffd(parcsr);
380:   m     = hypre_CSRMatrixNumRows(hdiag);
381:   n     = hypre_CSRMatrixNumCols(hdiag);
382:   dnnz  = hypre_CSRMatrixNumNonzeros(hdiag);
383:   onnz  = hypre_CSRMatrixNumNonzeros(hoffd);
384:   if (reuse == MAT_INITIAL_MATRIX) {
385:     PetscMalloc1(m+1,&dii);
386:     PetscMalloc1(dnnz,&djj);
387:     PetscMalloc1(dnnz,&da);
388:   } else if (reuse == MAT_REUSE_MATRIX) {
389:     PetscInt  nr;
390:     PetscBool done;
391:     if (size > 1) {
392:       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);

394:       MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);
395:       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m);
396:       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz);
397:       MatSeqAIJGetArray(b->A,&da);
398:     } else {
399:       MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);
400:       if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m);
401:       if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz);
402:       MatSeqAIJGetArray(*B,&da);
403:     }
404:   } else { /* MAT_INPLACE_MATRIX */
405:     dii = (PetscInt*)hypre_CSRMatrixI(hdiag);
406:     djj = (PetscInt*)hypre_CSRMatrixJ(hdiag);
407:     da  = hypre_CSRMatrixData(hdiag);
408:   }
409:   PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));
410:   PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));
411:   PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));
412:   iptr = djj;
413:   aptr = da;
414:   for (i=0; i<m; i++) {
415:     PetscInt nc = dii[i+1]-dii[i];
416:     PetscSortIntWithScalarArray(nc,iptr,aptr);
417:     iptr += nc;
418:     aptr += nc;
419:   }
420:   if (size > 1) {
421:     HYPRE_Int *offdj,*coffd;

423:     if (reuse == MAT_INITIAL_MATRIX) {
424:       PetscMalloc1(m+1,&oii);
425:       PetscMalloc1(onnz,&ojj);
426:       PetscMalloc1(onnz,&oa);
427:     } else if (reuse == MAT_REUSE_MATRIX) {
428:       Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
429:       PetscInt   nr,hr = hypre_CSRMatrixNumRows(hoffd);
430:       PetscBool  done;

432:       MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);
433:       if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr);
434:       if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz);
435:       MatSeqAIJGetArray(b->B,&oa);
436:     } else { /* MAT_INPLACE_MATRIX */
437:       oii = (PetscInt*)hypre_CSRMatrixI(hoffd);
438:       ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd);
439:       oa  = hypre_CSRMatrixData(hoffd);
440:     }
441:     PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));
442:     offdj = hypre_CSRMatrixJ(hoffd);
443:     coffd = hypre_ParCSRMatrixColMapOffd(parcsr);
444:     for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]];
445:     PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));
446:     iptr = ojj;
447:     aptr = oa;
448:     for (i=0; i<m; i++) {
449:        PetscInt nc = oii[i+1]-oii[i];
450:        PetscSortIntWithScalarArray(nc,iptr,aptr);
451:        iptr += nc;
452:        aptr += nc;
453:     }
454:     if (reuse == MAT_INITIAL_MATRIX) {
455:       Mat_MPIAIJ *b;
456:       Mat_SeqAIJ *d,*o;

458:       MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);
459:       /* hack MPIAIJ */
460:       b          = (Mat_MPIAIJ*)((*B)->data);
461:       d          = (Mat_SeqAIJ*)b->A->data;
462:       o          = (Mat_SeqAIJ*)b->B->data;
463:       d->free_a  = PETSC_TRUE;
464:       d->free_ij = PETSC_TRUE;
465:       o->free_a  = PETSC_TRUE;
466:       o->free_ij = PETSC_TRUE;
467:     } else if (reuse == MAT_INPLACE_MATRIX) {
468:       Mat T;
469:       MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);
470:       hypre_CSRMatrixI(hdiag)    = NULL;
471:       hypre_CSRMatrixJ(hdiag)    = NULL;
472:       hypre_CSRMatrixData(hdiag) = NULL;
473:       hypre_CSRMatrixI(hoffd)    = NULL;
474:       hypre_CSRMatrixJ(hoffd)    = NULL;
475:       hypre_CSRMatrixData(hoffd) = NULL;
476:       MatHeaderReplace(A,&T);
477:     }
478:   } else {
479:     oii  = NULL;
480:     ojj  = NULL;
481:     oa   = NULL;
482:     if (reuse == MAT_INITIAL_MATRIX) {
483:       Mat_SeqAIJ* b;
484:       MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);
485:       /* hack SeqAIJ */
486:       b          = (Mat_SeqAIJ*)((*B)->data);
487:       b->free_a  = PETSC_TRUE;
488:       b->free_ij = PETSC_TRUE;
489:     } else if (reuse == MAT_INPLACE_MATRIX) {
490:       Mat T;
491:       MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);
492:       hypre_CSRMatrixI(hdiag)    = NULL;
493:       hypre_CSRMatrixJ(hdiag)    = NULL;
494:       hypre_CSRMatrixData(hdiag) = NULL;
495:       MatHeaderReplace(A,&T);
496:     }
497:   }

499:   /* we have to use hypre_Tfree to free the arrays */
500:   if (reuse == MAT_INPLACE_MATRIX) {
501:     void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
502:     const char *names[6] = {"_hypre_csr_dii",
503:                             "_hypre_csr_djj",
504:                             "_hypre_csr_da",
505:                             "_hypre_csr_oii",
506:                             "_hypre_csr_ojj",
507:                             "_hypre_csr_oa"};
508:     for (i=0; i<6; i++) {
509:       PetscContainer c;

511:       PetscContainerCreate(comm,&c);
512:       PetscContainerSetPointer(c,ptrs[i]);
513:       PetscContainerSetUserDestroy(c,hypre_array_destroy);
514:       PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);
515:       PetscContainerDestroy(&c);
516:     }
517:   }
518:   return(0);
519: }

521: static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
522: {
523:   hypre_ParCSRMatrix *tA;
524:   hypre_CSRMatrix    *hdiag,*hoffd;
525:   Mat_SeqAIJ         *diag,*offd;
526:   PetscInt           *garray,noffd,dnnz,onnz,*row_starts,*col_starts;
527:   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
528:   PetscBool          ismpiaij,isseqaij;
529:   PetscErrorCode     ierr;

532:   PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);
533:   PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);
534:   if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type);
535:   if (ismpiaij) {
536:     Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data);

538:     diag   = (Mat_SeqAIJ*)a->A->data;
539:     offd   = (Mat_SeqAIJ*)a->B->data;
540:     garray = a->garray;
541:     noffd  = a->B->cmap->N;
542:     dnnz   = diag->nz;
543:     onnz   = offd->nz;
544:   } else {
545:     diag   = (Mat_SeqAIJ*)A->data;
546:     offd   = NULL;
547:     garray = NULL;
548:     noffd  = 0;
549:     dnnz   = diag->nz;
550:     onnz   = 0;
551:   }

553:   /* create a temporary ParCSR */
554:   if (HYPRE_AssumedPartitionCheck()) {
555:     PetscMPIInt myid;

557:     MPI_Comm_rank(comm,&myid);
558:     row_starts = A->rmap->range + myid;
559:     col_starts = A->cmap->range + myid;
560:   } else {
561:     row_starts = A->rmap->range;
562:     col_starts = A->cmap->range;
563:   }
564:   tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_Int*)row_starts,(HYPRE_Int*)col_starts,noffd,dnnz,onnz);
565:   hypre_ParCSRMatrixSetRowStartsOwner(tA,0);
566:   hypre_ParCSRMatrixSetColStartsOwner(tA,0);

568:   /* set diagonal part */
569:   hdiag = hypre_ParCSRMatrixDiag(tA);
570:   hypre_CSRMatrixI(hdiag)           = (HYPRE_Int*)diag->i;
571:   hypre_CSRMatrixJ(hdiag)           = (HYPRE_Int*)diag->j;
572:   hypre_CSRMatrixData(hdiag)        = diag->a;
573:   hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz;
574:   hypre_CSRMatrixSetRownnz(hdiag);
575:   hypre_CSRMatrixSetDataOwner(hdiag,0);

577:   /* set offdiagonal part */
578:   hoffd = hypre_ParCSRMatrixOffd(tA);
579:   if (offd) {
580:     hypre_CSRMatrixI(hoffd)           = (HYPRE_Int*)offd->i;
581:     hypre_CSRMatrixJ(hoffd)           = (HYPRE_Int*)offd->j;
582:     hypre_CSRMatrixData(hoffd)        = offd->a;
583:     hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz;
584:     hypre_CSRMatrixSetRownnz(hoffd);
585:     hypre_CSRMatrixSetDataOwner(hoffd,0);
586:     hypre_ParCSRMatrixSetNumNonzeros(tA);
587:     hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_Int*)garray;
588:   }
589:   *hA = tA;
590:   return(0);
591: }

593: static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
594: {
595:   hypre_CSRMatrix    *hdiag,*hoffd;

598:   hdiag = hypre_ParCSRMatrixDiag(*hA);
599:   hoffd = hypre_ParCSRMatrixOffd(*hA);
600:   /* set pointers to NULL before destroying tA */
601:   hypre_CSRMatrixI(hdiag)           = NULL;
602:   hypre_CSRMatrixJ(hdiag)           = NULL;
603:   hypre_CSRMatrixData(hdiag)        = NULL;
604:   hypre_CSRMatrixI(hoffd)           = NULL;
605:   hypre_CSRMatrixJ(hoffd)           = NULL;
606:   hypre_CSRMatrixData(hoffd)        = NULL;
607:   hypre_ParCSRMatrixColMapOffd(*hA) = NULL;
608:   hypre_ParCSRMatrixDestroy(*hA);
609:   *hA = NULL;
610:   return(0);
611: }

613: /* calls RAP from BoomerAMG:
614:    the resulting ParCSR will not own the column and row starts
615:    It looks like we don't need to have the diagonal entries
616:    ordered first in the rows of the diagonal part
617:    for boomerAMGBuildCoarseOperator to work */
618: static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA,hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP)
619: {
621:   HYPRE_Int      P_owns_col_starts,R_owns_row_starts;

624:   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP);
625:   R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR);
626:   PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP));
627:   PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP));
628:   /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */
629:   hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0);
630:   hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0);
631:   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1);
632:   if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1);
633:   return(0);
634: }

636: static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C)
637: {
638:   Mat                B;
639:   hypre_ParCSRMatrix *hA,*hP,*hPtAP;
640:   PetscErrorCode     ierr;

643:   MatAIJGetParCSR_Private(A,&hA);
644:   MatAIJGetParCSR_Private(P,&hP);
645:   MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);
646:   MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);
647:   MatHeaderMerge(C,&B);
648:   MatAIJRestoreParCSR_Private(A,&hA);
649:   MatAIJRestoreParCSR_Private(P,&hP);
650:   return(0);
651: }

653: PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat *C)
654: {

658:   MatCreate(PetscObjectComm((PetscObject)A),C);
659:   MatSetType(*C,MATAIJ);
660:   (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_AIJ_wHYPRE;
661:   return(0);
662: }

664: static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C)
665: {
666:   Mat                B;
667:   Mat_HYPRE          *hP;
668:   hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr;
669:   HYPRE_Int          type;
670:   MPI_Comm           comm = PetscObjectComm((PetscObject)A);
671:   PetscBool          ishypre;
672:   PetscErrorCode     ierr;

675:   PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);
676:   if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
677:   hP = (Mat_HYPRE*)P->data;
678:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
679:   if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
680:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));

682:   MatAIJGetParCSR_Private(A,&hA);
683:   MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);
684:   MatAIJRestoreParCSR_Private(A,&hA);

686:   /* create temporary matrix and merge to C */
687:   MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);
688:   MatHeaderMerge(C,&B);
689:   return(0);
690: }

692: static PetscErrorCode MatPtAP_AIJ_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
693: {

697:   if (scall == MAT_INITIAL_MATRIX) {
698:     const char *deft = MATAIJ;
699:     char       type[256];
700:     PetscBool  flg;

702:     PetscObjectOptionsBegin((PetscObject)A);
703:     PetscOptionsFList("-matptap_hypre_outtype","Matrix type",NULL,MatList,deft,type,256,&flg);
704:     PetscOptionsEnd();
705:     PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);
706:     MatCreate(PetscObjectComm((PetscObject)A),C);
707:     if (flg) {
708:       MatSetType(*C,type);
709:     } else {
710:       MatSetType(*C,deft);
711:     }
712:     (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_HYPRE;
713:     PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);
714:   }
715:   PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);
716:   (*(*C)->ops->ptapnumeric)(A,P,*C);
717:   PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);
718:   return(0);
719: }

721: static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C)
722: {
723:   Mat                B;
724:   hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr;
725:   Mat_HYPRE          *hA,*hP;
726:   PetscBool          ishypre;
727:   HYPRE_Int          type;
728:   PetscErrorCode     ierr;

731:   PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);
732:   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
733:   PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);
734:   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
735:   hA = (Mat_HYPRE*)A->data;
736:   hP = (Mat_HYPRE*)P->data;
737:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
738:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
739:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
740:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
741:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
742:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
743:   MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);
744:   MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);
745:   MatHeaderMerge(C,&B);
746:   return(0);
747: }

749: static PetscErrorCode MatPtAP_HYPRE_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
750: {

754:   if (scall == MAT_INITIAL_MATRIX) {
755:     PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);
756:     MatCreate(PetscObjectComm((PetscObject)A),C);
757:     MatSetType(*C,MATHYPRE);
758:     (*C)->ops->ptapnumeric = MatPtAPNumeric_HYPRE_HYPRE;
759:     PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);
760:   }
761:   PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);
762:   (*(*C)->ops->ptapnumeric)(A,P,*C);
763:   PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);
764:   return(0);
765: }

767: /* calls hypre_ParMatmul
768:    hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA
769:    hypre_ParMatrixCreate does not duplicate the communicator
770:    It looks like we don't need to have the diagonal entries
771:    ordered first in the rows of the diagonal part
772:    for boomerAMGBuildCoarseOperator to work */
773: static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB)
774: {
776:   PetscStackPush("hypre_ParMatmul");
777:   *hAB = hypre_ParMatmul(hA,hB);
778:   PetscStackPop;
779:   return(0);
780: }

782: static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C)
783: {
784:   Mat                D;
785:   hypre_ParCSRMatrix *hA,*hB,*hAB = NULL;
786:   PetscErrorCode     ierr;

789:   MatAIJGetParCSR_Private(A,&hA);
790:   MatAIJGetParCSR_Private(B,&hB);
791:   MatHYPRE_ParCSR_MatMatMult(hA,hB,&hAB);
792:   MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);
793:   MatHeaderMerge(C,&D);
794:   MatAIJRestoreParCSR_Private(A,&hA);
795:   MatAIJRestoreParCSR_Private(B,&hB);
796:   return(0);
797: }

799: PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat *C)
800: {

804:   MatCreate(PetscObjectComm((PetscObject)A),C);
805:   MatSetType(*C,MATAIJ);
806:   (*C)->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE;
807:   return(0);
808: }

810: static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A,Mat B,Mat C)
811: {
812:   Mat                D;
813:   hypre_ParCSRMatrix *Aparcsr,*Bparcsr,*ABparcsr = NULL;
814:   Mat_HYPRE          *hA,*hB;
815:   PetscBool          ishypre;
816:   HYPRE_Int          type;
817:   PetscErrorCode     ierr;

820:   PetscObjectTypeCompare((PetscObject)B,MATHYPRE,&ishypre);
821:   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_USER,"B should be of type %s",MATHYPRE);
822:   PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);
823:   if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
824:   hA = (Mat_HYPRE*)A->data;
825:   hB = (Mat_HYPRE*)B->data;
826:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
827:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
828:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hB->ij,&type));
829:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
830:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
831:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hB->ij,(void**)&Bparcsr));
832:   MatHYPRE_ParCSR_MatMatMult(Aparcsr,Bparcsr,&ABparcsr);
833:   MatCreateFromParCSR(ABparcsr,MATHYPRE,PETSC_OWN_POINTER,&D);
834:   /* need to use HeaderReplace because HeaderMerge messes up with the communicator */
835:   MatHeaderReplace(C,&D);
836:   C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
837:   return(0);
838: }

840: static PetscErrorCode MatMatMult_HYPRE_HYPRE(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
841: {

845:   if (scall == MAT_INITIAL_MATRIX) {
846:     PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);
847:     MatCreate(PetscObjectComm((PetscObject)A),C);
848:     MatSetType(*C,MATHYPRE);
849:     (*C)->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
850:     PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);
851:   }
852:   PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);
853:   (*(*C)->ops->matmultnumeric)(A,B,*C);
854:   PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);
855:   return(0);
856: }

858: PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D)
859: {
860:   Mat                E;
861:   hypre_ParCSRMatrix *hA,*hB,*hC,*hABC;
862:   PetscErrorCode     ierr;

865:   MatAIJGetParCSR_Private(A,&hA);
866:   MatAIJGetParCSR_Private(B,&hB);
867:   MatAIJGetParCSR_Private(C,&hC);
868:   MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC);
869:   MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E);
870:   MatHeaderMerge(D,&E);
871:   MatAIJRestoreParCSR_Private(A,&hA);
872:   MatAIJRestoreParCSR_Private(B,&hB);
873:   MatAIJRestoreParCSR_Private(C,&hC);
874:   return(0);
875: }

877: PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat *D)
878: {

882:   MatCreate(PetscObjectComm((PetscObject)A),D);
883:   MatSetType(*D,MATAIJ);
884:   return(0);
885: }

887: static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
888: {

892:   MatHYPRE_MultKernel_Private(A,x,y,PETSC_TRUE);
893:   return(0);
894: }

896: static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
897: {

901:   MatHYPRE_MultKernel_Private(A,x,y,PETSC_FALSE);
902:   return(0);
903: }

905: static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, Vec x, Vec y, PetscBool trans)
906: {
907:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
908:   hypre_ParCSRMatrix *parcsr;
909:   hypre_ParVector    *hx,*hy;
910:   PetscScalar        *ax,*ay,*sax,*say;
911:   PetscErrorCode     ierr;

914:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
915:   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx));
916:   PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy));
917:   VecGetArrayRead(x,(const PetscScalar**)&ax);
918:   VecGetArray(y,&ay);
919:   if (trans) {
920:     VecHYPRE_ParVectorReplacePointer(hA->x,ay,say);
921:     VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax);
922:     hypre_ParCSRMatrixMatvecT(1.,parcsr,hy,0.,hx);
923:     VecHYPRE_ParVectorReplacePointer(hA->x,say,ay);
924:     VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax);
925:   } else {
926:     VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax);
927:     VecHYPRE_ParVectorReplacePointer(hA->b,ay,say);
928:     hypre_ParCSRMatrixMatvec(1.,parcsr,hx,0.,hy);
929:     VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax);
930:     VecHYPRE_ParVectorReplacePointer(hA->b,say,ay);
931:   }
932:   VecRestoreArrayRead(x,(const PetscScalar**)&ax);
933:   VecRestoreArray(y,&ay);
934:   return(0);
935: }

937: static PetscErrorCode MatDestroy_HYPRE(Mat A)
938: {
939:   Mat_HYPRE      *hA = (Mat_HYPRE*)A->data;

943:   if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x));
944:   if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b));
945:   if (hA->ij) {
946:     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
947:     PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij));
948:   }
949:   if (hA->comm) { MPI_Comm_free(&hA->comm); }
950:   PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);
951:   PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);
952:   PetscObjectComposeFunction((PetscObject)A,"MatPtAP_seqaij_hypre_C",NULL);
953:   PetscObjectComposeFunction((PetscObject)A,"MatPtAP_mpiaij_hypre_C",NULL);
954:   PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);
955:   PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);
956:   PetscFree(A->data);
957:   return(0);
958: }

960: static PetscErrorCode MatSetUp_HYPRE(Mat A)
961: {

965:   MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);
966:   return(0);
967: }

969: static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
970: {
971:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
972:   Vec                x,b;
973:   PetscErrorCode     ierr;

976:   if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE");
977:   PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij));
978:   if (hA->x) return(0);
979:   PetscLayoutSetUp(A->rmap);
980:   PetscLayoutSetUp(A->cmap);
981:   VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);
982:   VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);
983:   VecHYPRE_IJVectorCreate(x,&hA->x);
984:   VecHYPRE_IJVectorCreate(b,&hA->b);
985:   VecDestroy(&x);
986:   VecDestroy(&b);
987:   return(0);
988: }

990: #define MATHYPRE_SCRATCH 2048

992: PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins)
993: {
994:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
995:   PetscScalar        *vals = (PetscScalar *)v;
996:   PetscScalar        sscr[MATHYPRE_SCRATCH];
997:   HYPRE_Int          cscr[2][MATHYPRE_SCRATCH];
998:   HYPRE_Int          i,nzc;
999:   PetscErrorCode     ierr;

1002:   for (i=0,nzc=0;i<nc;i++) {
1003:     if (cols[i] >= 0) {
1004:       cscr[0][nzc  ] = cols[i];
1005:       cscr[1][nzc++] = i;
1006:     }
1007:   }
1008:   if (!nzc) return(0);

1010:   if (ins == ADD_VALUES) {
1011:     for (i=0;i<nr;i++) {
1012:       if (rows[i] >= 0 && nzc) {
1013:         PetscInt j;
1014:         for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]];
1015:         PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,&nzc,(HYPRE_Int*)(rows+i),cscr[0],sscr));
1016:       }
1017:       vals += nc;
1018:     }
1019:   } else { /* INSERT_VALUES */
1020: #if defined(PETSC_USE_DEBUG)
1021:     /* Insert values cannot be used to insert offproc entries */
1022:     PetscInt rst,ren;
1023:     MatGetOwnershipRange(A,&rst,&ren);
1024:     for (i=0;i<nr;i++)
1025:       if (rows[i] >= 0 && (rows[i] < rst || rows[i] >= ren)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use INSERT_VALUES for off-proc entries with MatHYPRE. Use ADD_VALUES instead");
1026: #endif
1027:     for (i=0;i<nr;i++) {
1028:       if (rows[i] >= 0 && nzc) {
1029:         PetscInt j;
1030:         for (j=0;j<nzc;j++) sscr[j] = vals[cscr[1][j]];
1031:         PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,&nzc,(HYPRE_Int*)(rows+i),cscr[0],sscr));
1032:       }
1033:       vals += nc;
1034:     }
1035:   }
1036:   return(0);
1037: }

1039: static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1040: {
1041:   Mat_HYPRE          *hA = (Mat_HYPRE*)A->data;
1042:   HYPRE_Int          *hdnnz,*honnz;
1043:   PetscInt           i,rs,re,cs,ce,bs;
1044:   PetscMPIInt        size;
1045:   PetscErrorCode     ierr;

1048:   MatGetBlockSize(A,&bs);
1049:   PetscLayoutSetUp(A->rmap);
1050:   PetscLayoutSetUp(A->cmap);
1051:   rs   = A->rmap->rstart;
1052:   re   = A->rmap->rend;
1053:   cs   = A->cmap->rstart;
1054:   ce   = A->cmap->rend;
1055:   if (!hA->ij) {
1056:     PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij));
1057:     PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1058:   } else {
1059:     HYPRE_Int hrs,hre,hcs,hce;
1060:     PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce));
1061:     if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%d)",hrs,hre+1,rs,re);
1062:     if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%d)",hcs,hce+1,cs,ce);
1063:   }
1064:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));

1066:   if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs;
1067:   if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs;

1069:   if (!dnnz) {
1070:     PetscMalloc1(A->rmap->n,&hdnnz);
1071:     for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz;
1072:   } else {
1073:     hdnnz = (HYPRE_Int*)dnnz;
1074:   }
1075:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
1076:   if (size > 1) {
1077:     if (!onnz) {
1078:       PetscMalloc1(A->rmap->n,&honnz);
1079:       for (i=0;i<A->rmap->n;i++) honnz[i] = onz;
1080:     } else {
1081:       honnz = (HYPRE_Int*)onnz;
1082:     }
1083:     PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz));
1084:   } else {
1085:     honnz = NULL;
1086:     PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz));
1087:   }
1088:   if (!dnnz) {
1089:     PetscFree(hdnnz);
1090:   }
1091:   if (!onnz && honnz) {
1092:     PetscFree(honnz);
1093:   }
1094:   A->preallocated = PETSC_TRUE;

1096:   /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0 */
1097:   {
1098:     hypre_AuxParCSRMatrix *aux_matrix;
1099:     aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1100:     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
1101:   }
1102:   return(0);
1103: }

1105: /*@C
1106:    MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format

1108:    Collective on Mat

1110:    Input Parameters:
1111: +  A - the matrix
1112: .  dnz  - number of nonzeros per row in DIAGONAL portion of local submatrix
1113:           (same value is used for all local rows)
1114: .  dnnz - array containing the number of nonzeros in the various rows of the
1115:           DIAGONAL portion of the local submatrix (possibly different for each row)
1116:           or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure.
1117:           The size of this array is equal to the number of local rows, i.e 'm'.
1118:           For matrices that will be factored, you must leave room for (and set)
1119:           the diagonal entry even if it is zero.
1120: .  onz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
1121:           submatrix (same value is used for all local rows).
1122: -  onnz - array containing the number of nonzeros in the various rows of the
1123:           OFF-DIAGONAL portion of the local submatrix (possibly different for
1124:           each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero
1125:           structure. The size of this array is equal to the number
1126:           of local rows, i.e 'm'.

1128:    Notes: If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored.

1130:    Level: intermediate

1132: .keywords: matrix, aij, compressed row, sparse, parallel

1134: .seealso: MatCreate(), MatMPIAIJSetPreallocation, MATHYPRE
1135: @*/
1136: PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1137: {

1143:   PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));
1144:   return(0);
1145: }

1147: /*
1148:    MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix

1150:    Collective

1152:    Input Parameters:
1153: +  vparcsr  - the pointer to the hypre_ParCSRMatrix
1154: .  mtype    - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported.
1155: -  copymode - PETSc copying options

1157:    Output Parameter:
1158: .  A  - the matrix

1160:    Level: intermediate

1162: .seealso: MatHYPRE, PetscCopyMode
1163: */
1164: PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *vparcsr, MatType mtype, PetscCopyMode copymode, Mat* A)
1165: {
1166:   Mat                   T;
1167:   Mat_HYPRE             *hA;
1168:   hypre_ParCSRMatrix    *parcsr;
1169:   MPI_Comm              comm;
1170:   PetscInt              rstart,rend,cstart,cend,M,N;
1171:   PetscBool             isseqaij,ismpiaij,isaij,ishyp,isis;
1172:   PetscErrorCode        ierr;

1175:   parcsr = (hypre_ParCSRMatrix *)vparcsr;
1176:   comm   = hypre_ParCSRMatrixComm(parcsr);
1177:   PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);
1178:   PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);
1179:   PetscStrcmp(mtype,MATAIJ,&isaij);
1180:   PetscStrcmp(mtype,MATHYPRE,&ishyp);
1181:   PetscStrcmp(mtype,MATIS,&isis);
1182:   isaij  = (PetscBool)(isseqaij || ismpiaij || isaij);
1183:   if (!isaij && !ishyp && !isis) SETERRQ6(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATMPIAIJ,MATIS,MATHYPRE);
1184:   if (ishyp && copymode == PETSC_COPY_VALUES) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported copymode PETSC_COPY_VALUES");

1186:   /* access ParCSRMatrix */
1187:   rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
1188:   rend   = hypre_ParCSRMatrixLastRowIndex(parcsr);
1189:   cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
1190:   cend   = hypre_ParCSRMatrixLastColDiag(parcsr);
1191:   M      = hypre_ParCSRMatrixGlobalNumRows(parcsr);
1192:   N      = hypre_ParCSRMatrixGlobalNumCols(parcsr);

1194:   /* fix for empty local rows/columns */
1195:   if (rend < rstart) rend = rstart;
1196:   if (cend < cstart) cend = cstart;

1198:   /* create PETSc matrix with MatHYPRE */
1199:   MatCreate(comm,&T);
1200:   MatSetSizes(T,rend-rstart+1,cend-cstart+1,M,N);
1201:   MatSetType(T,MATHYPRE);
1202:   hA   = (Mat_HYPRE*)(T->data);

1204:   /* create HYPRE_IJMatrix */
1205:   PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));

1207:   /* set ParCSR object */
1208:   PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1209:   hypre_IJMatrixObject(hA->ij) = parcsr;
1210:   T->preallocated = PETSC_TRUE;

1212:   /* set assembled flag */
1213:   hypre_IJMatrixAssembleFlag(hA->ij) = 1;
1214:   PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1215:   if (ishyp) {
1216:     PetscMPIInt myid = 0;

1218:     /* make sure we always have row_starts and col_starts available */
1219:     if (HYPRE_AssumedPartitionCheck()) {
1220:       MPI_Comm_rank(comm,&myid);
1221:     }
1222:     if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) {
1223:       PetscLayout map;

1225:       MatGetLayouts(T,NULL,&map);
1226:       PetscLayoutSetUp(map);
1227:       hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_Int*)(map->range + myid);
1228:     }
1229:     if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) {
1230:       PetscLayout map;

1232:       MatGetLayouts(T,&map,NULL);
1233:       PetscLayoutSetUp(map);
1234:       hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_Int*)(map->range + myid);
1235:     }
1236:     /* prevent from freeing the pointer */
1237:     if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
1238:     *A   = T;
1239:     MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);
1240:     MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);
1241:   } else if (isaij) {
1242:     if (copymode != PETSC_OWN_POINTER) {
1243:       /* prevent from freeing the pointer */
1244:       hA->inner_free = PETSC_FALSE;
1245:       MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);
1246:       MatDestroy(&T);
1247:     } else { /* AIJ return type with PETSC_OWN_POINTER */
1248:       MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);
1249:       *A   = T;
1250:     }
1251:   } else if (isis) {
1252:     MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);
1253:     if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE;
1254:     MatDestroy(&T);
1255:   }
1256:   return(0);
1257: }

1259: static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr)
1260: {
1261:   Mat_HYPRE*            hA = (Mat_HYPRE*)A->data;
1262:   HYPRE_Int             type;
1263:   PetscErrorCode        ierr;

1266:   if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present");
1267:   PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
1268:   if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR");
1269:   PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr));
1270:   return(0);
1271: }

1273: /*
1274:    MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix

1276:    Not collective

1278:    Input Parameters:
1279: +  A  - the MATHYPRE object

1281:    Output Parameter:
1282: .  parcsr  - the pointer to the hypre_ParCSRMatrix

1284:    Level: intermediate

1286: .seealso: MatHYPRE, PetscCopyMode
1287: */
1288: PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr)
1289: {

1295:   PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));
1296:   return(0);
1297: }

1299: static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd)
1300: {
1301:   hypre_ParCSRMatrix *parcsr;
1302:   hypre_CSRMatrix    *ha;
1303:   PetscInt           rst;
1304:   PetscErrorCode     ierr;

1307:   if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented with non-square diagonal blocks");
1308:   MatGetOwnershipRange(A,&rst,NULL);
1309:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1310:   if (missing) *missing = PETSC_FALSE;
1311:   if (dd) *dd = -1;
1312:   ha = hypre_ParCSRMatrixDiag(parcsr);
1313:   if (ha) {
1314:     PetscInt  size,i;
1315:     HYPRE_Int *ii,*jj;

1317:     size = hypre_CSRMatrixNumRows(ha);
1318:     ii   = hypre_CSRMatrixI(ha);
1319:     jj   = hypre_CSRMatrixJ(ha);
1320:     for (i = 0; i < size; i++) {
1321:       PetscInt  j;
1322:       PetscBool found = PETSC_FALSE;

1324:       for (j = ii[i]; j < ii[i+1] && !found; j++)
1325:         found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE;

1327:       if (!found) {
1328:         PetscInfo1(A,"Matrix is missing local diagonal entry %D\n",i);
1329:         if (missing) *missing = PETSC_TRUE;
1330:         if (dd) *dd = i+rst;
1331:         return(0);
1332:       }
1333:     }
1334:     if (!size) {
1335:       PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1336:       if (missing) *missing = PETSC_TRUE;
1337:       if (dd) *dd = rst;
1338:     }
1339:   } else {
1340:     PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1341:     if (missing) *missing = PETSC_TRUE;
1342:     if (dd) *dd = rst;
1343:   }
1344:   return(0);
1345: }

1347: static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s)
1348: {
1349:   hypre_ParCSRMatrix *parcsr;
1350:   hypre_CSRMatrix    *ha;
1351:   PetscErrorCode     ierr;

1354:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1355:   /* diagonal part */
1356:   ha = hypre_ParCSRMatrixDiag(parcsr);
1357:   if (ha) {
1358:     PetscInt    size,i;
1359:     HYPRE_Int   *ii;
1360:     PetscScalar *a;

1362:     size = hypre_CSRMatrixNumRows(ha);
1363:     a    = hypre_CSRMatrixData(ha);
1364:     ii   = hypre_CSRMatrixI(ha);
1365:     for (i = 0; i < ii[size]; i++) a[i] *= s;
1366:   }
1367:   /* offdiagonal part */
1368:   ha = hypre_ParCSRMatrixOffd(parcsr);
1369:   if (ha) {
1370:     PetscInt    size,i;
1371:     HYPRE_Int   *ii;
1372:     PetscScalar *a;

1374:     size = hypre_CSRMatrixNumRows(ha);
1375:     a    = hypre_CSRMatrixData(ha);
1376:     ii   = hypre_CSRMatrixI(ha);
1377:     for (i = 0; i < ii[size]; i++) a[i] *= s;
1378:   }
1379:   return(0);
1380: }

1382: static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1383: {
1384:   hypre_ParCSRMatrix *parcsr;
1385:   HYPRE_Int          *lrows;
1386:   PetscInt           rst,ren,i;
1387:   PetscErrorCode     ierr;

1390:   if (x || b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"To be implemented");
1391:   MatHYPREGetParCSR_HYPRE(A,&parcsr);
1392:   PetscMalloc1(numRows,&lrows);
1393:   MatGetOwnershipRange(A,&rst,&ren);
1394:   for (i=0;i<numRows;i++) {
1395:     if (rows[i] < rst || rows[i] >= ren)
1396:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Non-local rows not yet supported");
1397:     lrows[i] = rows[i] - rst;
1398:   }
1399:   PetscStackCallStandard(hypre_ParCSRMatrixEliminateRowsCols,(parcsr,numRows,lrows));
1400:   PetscFree(lrows);
1401:   return(0);
1402: }

1404: /*MC
1405:    MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices
1406:           based on the hypre IJ interface.

1408:    Level: intermediate

1410: .seealso: MatCreate()
1411: M*/

1413: PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
1414: {
1415:   Mat_HYPRE      *hB;

1419:   PetscNewLog(B,&hB);
1420:   hB->inner_free = PETSC_TRUE;

1422:   B->data       = (void*)hB;
1423:   B->rmap->bs   = 1;
1424:   B->assembled  = PETSC_FALSE;

1426:   PetscMemzero(B->ops,sizeof(struct _MatOps));
1427:   B->ops->mult            = MatMult_HYPRE;
1428:   B->ops->multtranspose   = MatMultTranspose_HYPRE;
1429:   B->ops->setup           = MatSetUp_HYPRE;
1430:   B->ops->destroy         = MatDestroy_HYPRE;
1431:   B->ops->assemblyend     = MatAssemblyEnd_HYPRE;
1432:   B->ops->ptap            = MatPtAP_HYPRE_HYPRE;
1433:   B->ops->matmult         = MatMatMult_HYPRE_HYPRE;
1434:   B->ops->setvalues       = MatSetValues_HYPRE;
1435:   B->ops->missingdiagonal = MatMissingDiagonal_HYPRE;
1436:   B->ops->scale           = MatScale_HYPRE;
1437:   B->ops->zerorowscolumns = MatZeroRowsColumns_HYPRE;

1439:   MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);
1440:   PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);
1441:   PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);
1442:   PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);
1443:   PetscObjectComposeFunction((PetscObject)B,"MatPtAP_seqaij_hypre_C",MatPtAP_AIJ_HYPRE);
1444:   PetscObjectComposeFunction((PetscObject)B,"MatPtAP_mpiaij_hypre_C",MatPtAP_AIJ_HYPRE);
1445:   PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);
1446:   PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);
1447:   return(0);
1448: }

1450: static PetscErrorCode hypre_array_destroy(void *ptr)
1451: {
1453:    hypre_TFree(ptr);
1454:    return(0);
1455: }