Actual source code: hmg.c

petsc-3.14.6 2021-03-30
Report Typos and Errors
  1: #include <petscdm.h>
  2: #include <petscctable.h>
  3: #include <petsc/private/matimpl.h>
  4: #include <petsc/private/pcmgimpl.h>
  5: #include <petsc/private/pcimpl.h>

  7: typedef struct {
  8:   PC               innerpc;            /* A MG inner PC (Hypre or PCGAMG) to setup interpolations and coarse operators  */
  9:   char*            innerpctype;        /* PCGAMG or PCHYPRE */
 10:   PetscBool        reuseinterp;        /* A flag indicates if or not to reuse the interpolations */
 11:   PetscBool        subcoarsening;      /* If or not to use a subspace-based coarsening algorithm */
 12:   PetscBool        usematmaij;         /* If or not to use MatMAIJ for saving memory */
 13:   PetscInt         component;          /* Which subspace is used for the subspace-based coarsening algorithm? */
 14: } PC_HMG;

 16: PetscErrorCode PCSetFromOptions_HMG(PetscOptionItems*,PC);
 17: PetscErrorCode PCReset_MG(PC);

 19: static PetscErrorCode PCHMGExtractSubMatrix_Private(Mat pmat,Mat *submat,MatReuse reuse,PetscInt component,PetscInt blocksize)
 20: {
 21:   IS             isrow;
 23:   PetscInt       rstart,rend;
 24:   MPI_Comm       comm;

 27:   PetscObjectGetComm((PetscObject)pmat,&comm);
 28:   if (component>=blocksize) SETERRQ2(comm,PETSC_ERR_ARG_INCOMP,"Component %D should be less than block size %D \n",component,blocksize);
 29:   MatGetOwnershipRange(pmat,&rstart,&rend);
 30:   if ((rend-rstart)%blocksize != 0) SETERRQ3(comm,PETSC_ERR_ARG_INCOMP,"Block size %D is inconsistent for [%D, %D) \n",blocksize,rstart,rend);
 31:   ISCreateStride(comm,(rend-rstart)/blocksize,rstart+component,blocksize,&isrow);
 32:   MatCreateSubMatrix(pmat,isrow,isrow,reuse,submat);
 33:   ISDestroy(&isrow);
 34:   return(0);
 35: }

 37: static PetscErrorCode PCHMGExpandInterpolation_Private(Mat subinterp, Mat *interp, PetscInt blocksize)
 38: {
 39:   PetscInt              subrstart,subrend,subrowsize,subcolsize,subcstart,subcend,rowsize,colsize;
 40:   PetscInt              subrow,row,nz,*d_nnz,*o_nnz,i,j,dnz,onz,max_nz,*indices;
 41:   const PetscInt        *idx;
 42:   const PetscScalar     *values;
 43:   PetscErrorCode        ierr;
 44:   MPI_Comm              comm;

 47:   PetscObjectGetComm((PetscObject)subinterp,&comm);
 48:   MatGetOwnershipRange(subinterp,&subrstart,&subrend);
 49:   subrowsize = subrend-subrstart;
 50:   rowsize = subrowsize*blocksize;
 51:   PetscCalloc2(rowsize,&d_nnz,rowsize,&o_nnz);
 52:   MatGetOwnershipRangeColumn(subinterp,&subcstart,&subcend);
 53:   subcolsize = subcend - subcstart;
 54:   colsize    = subcolsize*blocksize;
 55:   max_nz = 0;
 56:   for (subrow=subrstart;subrow<subrend;subrow++) {
 57:     MatGetRow(subinterp,subrow,&nz,&idx,NULL);
 58:     if (max_nz<nz) max_nz = nz;
 59:     dnz = 0; onz = 0;
 60:     for (i=0;i<nz;i++) {
 61:       if (idx[i]>=subcstart && idx[i]<subcend) dnz++;
 62:       else onz++;
 63:     }
 64:     for (i=0;i<blocksize;i++) {
 65:       d_nnz[(subrow-subrstart)*blocksize+i] = dnz;
 66:       o_nnz[(subrow-subrstart)*blocksize+i] = onz;
 67:     }
 68:     MatRestoreRow(subinterp,subrow,&nz,&idx,NULL);
 69:   }
 70:   MatCreateAIJ(comm,rowsize,colsize,PETSC_DETERMINE,PETSC_DETERMINE,0,d_nnz,0,o_nnz,interp);
 71:   MatSetOption(*interp,MAT_IGNORE_OFF_PROC_ENTRIES,PETSC_TRUE);
 72:   MatSetOption(*interp,MAT_IGNORE_ZERO_ENTRIES,PETSC_TRUE);
 73:   MatSetOption(*interp,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_TRUE);
 74:   MatSetFromOptions(*interp);

 76:   MatSetUp(*interp);
 77:   PetscFree2(d_nnz,o_nnz);
 78:   PetscMalloc1(max_nz,&indices);
 79:   for (subrow=subrstart; subrow<subrend; subrow++) {
 80:     MatGetRow(subinterp,subrow,&nz,&idx,&values);
 81:     for (i=0;i<blocksize;i++) {
 82:       row = subrow*blocksize+i;
 83:       for (j=0;j<nz;j++) {
 84:         indices[j] = idx[j]*blocksize+i;
 85:       }
 86:       MatSetValues(*interp,1,&row,nz,indices,values,INSERT_VALUES);
 87:     }
 88:     MatRestoreRow(subinterp,subrow,&nz,&idx,&values);
 89:   }
 90:   PetscFree(indices);
 91:   MatAssemblyBegin(*interp,MAT_FINAL_ASSEMBLY);
 92:   MatAssemblyEnd(*interp,MAT_FINAL_ASSEMBLY);
 93:   return(0);
 94: }

 96: PetscErrorCode PCSetUp_HMG(PC pc)
 97: {
 98:   PetscErrorCode     ierr;
 99:   Mat                PA, submat;
100:   PC_MG              *mg   = (PC_MG*)pc->data;
101:   PC_HMG             *hmg   = (PC_HMG*) mg->innerctx;
102:   MPI_Comm           comm;
103:   PetscInt           level;
104:   PetscInt           num_levels;
105:   Mat                *operators,*interpolations;
106:   PetscInt           blocksize;
107:   const char         *prefix;
108:   PCMGGalerkinType   galerkin;

111:   PetscObjectGetComm((PetscObject)pc,&comm);
112:   if (pc->setupcalled) {
113:    if (hmg->reuseinterp) {
114:      /* If we did not use Galerkin in the last call or we have a different sparsity pattern now,
115:       * we have to build from scratch
116:       * */
117:      PCMGGetGalerkin(pc,&galerkin);
118:      if (galerkin == PC_MG_GALERKIN_NONE || pc->flag != SAME_NONZERO_PATTERN) pc->setupcalled = PETSC_FALSE;
119:      PCMGSetGalerkin(pc,PC_MG_GALERKIN_PMAT);
120:      PCSetUp_MG(pc);
121:      return(0);
122:     } else {
123:      PCReset_MG(pc);
124:      pc->setupcalled = PETSC_FALSE;
125:     }
126:   }

128:   /* Create an inner PC (GAMG or HYPRE) */
129:   if (!hmg->innerpc) {
130:     PCCreate(comm,&hmg->innerpc);
131:     /* If users do not set an inner pc type, we need to set a default value */
132:     if (!hmg->innerpctype) {
133:     /* If hypre is available, use hypre, otherwise, use gamg */
134: #if PETSC_HAVE_HYPRE
135:       PetscStrallocpy(PCHYPRE,&(hmg->innerpctype));
136: #else
137:       PetscStrallocpy(PCGAMG,&(hmg->innerpctype));
138: #endif
139:     }
140:     PCSetType(hmg->innerpc,hmg->innerpctype);
141:   }
142:   PCGetOperators(pc,NULL,&PA);
143:   /* Users need to correctly set a block size of matrix in order to use subspace coarsening */
144:   MatGetBlockSize(PA,&blocksize);
145:   if (blocksize<=1) hmg->subcoarsening = PETSC_FALSE;
146:   /* Extract a submatrix for constructing subinterpolations */
147:   if (hmg->subcoarsening) {
148:     PCHMGExtractSubMatrix_Private(PA,&submat,MAT_INITIAL_MATRIX,hmg->component,blocksize);
149:     PA = submat;
150:   }
151:   PCSetOperators(hmg->innerpc,PA,PA);
152:   if (hmg->subcoarsening) {
153:    MatDestroy(&PA);
154:   }
155:   /* Setup inner PC correctly. During this step, matrix will be coarsened */
156:   PCSetUseAmat(hmg->innerpc,PETSC_FALSE);
157:   PetscObjectGetOptionsPrefix((PetscObject)pc,&prefix);
158:   PetscObjectSetOptionsPrefix((PetscObject)hmg->innerpc,prefix);
159:   PetscObjectAppendOptionsPrefix((PetscObject)hmg->innerpc,"hmg_inner_");
160:   PCSetFromOptions(hmg->innerpc);
161:   PCSetUp(hmg->innerpc);

163:   /* Obtain interpolations IN PLACE. For BoomerAMG, (I,J,data) is reused to avoid memory overhead */
164:   PCGetInterpolations(hmg->innerpc,&num_levels,&interpolations);
165:   /* We can reuse the coarse operators when we do the full space coarsening */
166:   if (!hmg->subcoarsening) {
167:     PCGetCoarseOperators(hmg->innerpc,&num_levels,&operators);
168:   }

170:   PCDestroy(&hmg->innerpc);
171:   hmg->innerpc = NULL;
172:   PCMGSetLevels_MG(pc,num_levels,NULL);
173:   /* Set coarse matrices and interpolations to PCMG */
174:   for (level=num_levels-1; level>0; level--) {
175:     Mat P=NULL, pmat=NULL;
176:     Vec b, x,r;
177:     if (hmg->subcoarsening) {
178:       if (hmg->usematmaij) {
179:         MatCreateMAIJ(interpolations[level-1],blocksize,&P);
180:         MatDestroy(&interpolations[level-1]);
181:       } else {
182:         /* Grow interpolation. In the future, we should use MAIJ */
183:         PCHMGExpandInterpolation_Private(interpolations[level-1],&P,blocksize);
184:         MatDestroy(&interpolations[level-1]);
185:       }
186:     } else {
187:       P = interpolations[level-1];
188:     }
189:     MatCreateVecs(P,&b,&r);
190:     PCMGSetInterpolation(pc,level,P);
191:     PCMGSetRestriction(pc,level,P);
192:     MatDestroy(&P);
193:     /* We reuse the matrices when we do not do subspace coarsening */
194:     if ((level-1)>=0 && !hmg->subcoarsening) {
195:       pmat = operators[level-1];
196:       PCMGSetOperators(pc,level-1,pmat,pmat);
197:       MatDestroy(&pmat);
198:     }
199:     PCMGSetRhs(pc,level-1,b);

201:     PCMGSetR(pc,level,r);
202:     VecDestroy(&r);

204:     VecDuplicate(b,&x);
205:     PCMGSetX(pc,level-1,x);
206:     VecDestroy(&x);
207:     VecDestroy(&b);
208:   }
209:   PetscFree(interpolations);
210:   if (!hmg->subcoarsening) {
211:     PetscFree(operators);
212:   }
213:   /* Turn Galerkin off when we already have coarse operators */
214:   PCMGSetGalerkin(pc,hmg->subcoarsening ? PC_MG_GALERKIN_PMAT:PC_MG_GALERKIN_NONE);
215:   PCSetDM(pc,NULL);
216:   PCSetUseAmat(pc,PETSC_FALSE);
217:   PetscObjectOptionsBegin((PetscObject)pc);
218:   PCSetFromOptions_MG(PetscOptionsObject,pc); /* should be called in PCSetFromOptions_HMG(), but cannot be called prior to PCMGSetLevels() */
219:   PetscOptionsEnd();
220:   PCSetUp_MG(pc);
221:   return(0);
222: }

224: PetscErrorCode PCDestroy_HMG(PC pc)
225: {
227:   PC_MG          *mg  = (PC_MG*)pc->data;
228:   PC_HMG         *hmg = (PC_HMG*) mg->innerctx;

231:   PCDestroy(&hmg->innerpc);
232:   PetscFree(hmg->innerpctype);
233:   PetscFree(hmg);
234:   PCDestroy_MG(pc);

236:   PetscObjectComposeFunction((PetscObject)pc,"PCHMGSetReuseInterpolation_C",NULL);
237:   PetscObjectComposeFunction((PetscObject)pc,"PCHMGSetUseSubspaceCoarsening_C",NULL);
238:   PetscObjectComposeFunction((PetscObject)pc,"PCHMGSetInnerPCType_C",NULL);
239:   PetscObjectComposeFunction((PetscObject)pc,"PCHMGSetCoarseningComponent_C",NULL);
240:   return(0);
241: }

243: PetscErrorCode PCView_HMG(PC pc,PetscViewer viewer)
244: {
245:   PC_MG          *mg = (PC_MG*)pc->data;
246:   PC_HMG         *hmg = (PC_HMG*) mg->innerctx;
248:   PetscBool      iascii;

251:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
252:   if (iascii) {
253:     PetscViewerASCIIPrintf(viewer," Reuse interpolation: %s\n",hmg->reuseinterp? "true":"false");
254:     PetscViewerASCIIPrintf(viewer," Use subspace coarsening: %s\n",hmg->subcoarsening? "true":"false");
255:     PetscViewerASCIIPrintf(viewer," Coarsening component: %D \n",hmg->component);
256:     PetscViewerASCIIPrintf(viewer," Use MatMAIJ: %s \n",hmg->usematmaij? "true":"false");
257:     PetscViewerASCIIPrintf(viewer," Inner PC type: %s \n",hmg->innerpctype);
258:   }
259:   PCView_MG(pc,viewer);
260:   return(0);
261: }

263: PetscErrorCode PCSetFromOptions_HMG(PetscOptionItems *PetscOptionsObject,PC pc)
264: {
266:   PC_MG          *mg = (PC_MG*)pc->data;
267:   PC_HMG         *hmg = (PC_HMG*) mg->innerctx;

270:   PetscOptionsHead(PetscOptionsObject,"HMG");
271:   PetscOptionsBool("-pc_hmg_reuse_interpolation","Reuse the interpolation operators when possible (cheaper, weaker when matrix entries change a lot)","PCHMGSetReuseInterpolation",hmg->reuseinterp,&hmg->reuseinterp,NULL);
272:   PetscOptionsBool("-pc_hmg_use_subspace_coarsening","Use the subspace coarsening to compute the interpolations","PCHMGSetUseSubspaceCoarsening",hmg->subcoarsening,&hmg->subcoarsening,NULL);
273:   PetscOptionsBool("-pc_hmg_use_matmaij","Use MatMAIJ store interpolation for saving memory","PCHMGSetInnerPCType",hmg->usematmaij,&hmg->usematmaij,NULL);
274:   PetscOptionsInt("-pc_hmg_coarsening_component","Which component is chosen for the subspace-based coarsening algorithm","PCHMGSetCoarseningComponent",hmg->component,&hmg->component,NULL);
275:   PetscOptionsTail();
276:   return(0);
277: }

279: static PetscErrorCode PCHMGSetReuseInterpolation_HMG(PC pc, PetscBool reuse)
280: {
281:   PC_MG          *mg  = (PC_MG*)pc->data;
282:   PC_HMG         *hmg = (PC_HMG*) mg->innerctx;

285:   hmg->reuseinterp = reuse;
286:   return(0);
287: }

289: /*@
290:    PCHMGSetReuseInterpolation - Reuse interpolation matrices in HMG

292:    Logically Collective on PC

294:    Input Parameters:
295: +  pc - the HMG context
296: -  reuse - True indicates that HMG will reuse the interpolations

298:    Options Database Keys:
299: .  -pc_hmg_reuse_interpolation <true | false> - Whether or not to reuse the interpolations. If true, it potentially save the compute time.

301:    Level: beginner

303: .keywords: HMG, multigrid, interpolation, reuse, set

305: .seealso: PCHMG
306: @*/
307: PetscErrorCode PCHMGSetReuseInterpolation(PC pc, PetscBool reuse)
308: {

313:   PetscUseMethod(pc,"PCHMGSetReuseInterpolation_C",(PC,PetscBool),(pc,reuse));
314:   return(0);
315: }

317: static PetscErrorCode PCHMGSetUseSubspaceCoarsening_HMG(PC pc, PetscBool subspace)
318: {
319:   PC_MG          *mg  = (PC_MG*)pc->data;
320:   PC_HMG         *hmg = (PC_HMG*) mg->innerctx;

323:   hmg->subcoarsening = subspace;
324:   return(0);
325: }

327: /*@
328:    PCHMGSetUseSubspaceCoarsening - Use subspace coarsening in HMG

330:    Logically Collective on PC

332:    Input Parameters:
333: +  pc - the HMG context
334: -  reuse - True indicates that HMG will use the subspace coarsening

336:    Options Database Keys:
337: .  -pc_hmg_use_subspace_coarsening  <true | false> - Whether or not to use subspace coarsening (that is, coarsen a submatrix).

339:    Level: beginner

341: .keywords: HMG, multigrid, interpolation, subspace, coarsening

343: .seealso: PCHMG
344: @*/
345: PetscErrorCode PCHMGSetUseSubspaceCoarsening(PC pc, PetscBool subspace)
346: {

351:   PetscUseMethod(pc,"PCHMGSetUseSubspaceCoarsening_C",(PC,PetscBool),(pc,subspace));
352:   return(0);
353: }

355: static PetscErrorCode PCHMGSetInnerPCType_HMG(PC pc, PCType type)
356: {
357:   PC_MG           *mg  = (PC_MG*)pc->data;
358:   PC_HMG          *hmg = (PC_HMG*) mg->innerctx;
359:   PetscErrorCode  ierr;

362:   PetscStrallocpy(type,&(hmg->innerpctype));
363:   return(0);
364: }

366: /*@C
367:    PCHMGSetInnerPCType - Set an inner PC type

369:    Logically Collective on PC

371:    Input Parameters:
372: +  pc - the HMG context
373: -  type - <hypre, gamg> coarsening algorithm

375:    Options Database Keys:
376: .  -hmg_inner_pc_type <hypre, gamg> - What method is used to coarsen matrix

378:    Level: beginner

380: .keywords: HMG, multigrid, interpolation, coarsening

382: .seealso: PCHMG, PCType
383: @*/
384: PetscErrorCode PCHMGSetInnerPCType(PC pc, PCType type)
385: {
386:   PetscErrorCode  ierr;

390:   PetscUseMethod(pc,"PCHMGSetInnerPCType_C",(PC,PCType),(pc,type));
391:   return(0);
392: }

394: static PetscErrorCode PCHMGSetCoarseningComponent_HMG(PC pc, PetscInt component)
395: {
396:   PC_MG           *mg  = (PC_MG*)pc->data;
397:   PC_HMG          *hmg = (PC_HMG*) mg->innerctx;

400:   hmg->component = component;
401:   return(0);
402: }

404: /*@
405:    PCHMGSetCoarseningComponent - Set which component is used for the subspace-based coarsening algorithm

407:    Logically Collective on PC

409:    Input Parameters:
410: +  pc - the HMG context
411: -  component - which component PC will coarsen

413:    Options Database Keys:
414: .  -pc_hmg_coarsening_component - Which component is chosen for the subspace-based coarsening algorithm

416:    Level: beginner

418: .keywords: HMG, multigrid, interpolation, coarsening, component

420: .seealso: PCHMG, PCType
421: @*/
422: PetscErrorCode PCHMGSetCoarseningComponent(PC pc, PetscInt component)
423: {
424:   PetscErrorCode  ierr;

428:   PetscUseMethod(pc,"PCHMGSetCoarseningComponent_C",(PC,PetscInt),(pc,component));
429:   return(0);
430: }

432: static PetscErrorCode PCHMGUseMatMAIJ_HMG(PC pc, PetscBool usematmaij)
433: {
434:   PC_MG           *mg  = (PC_MG*)pc->data;
435:   PC_HMG          *hmg = (PC_HMG*) mg->innerctx;

438:   hmg->usematmaij = usematmaij;
439:   return(0);
440: }

442: /*@
443:    PCHMGUseMatMAIJ - Set a flag that indicates if or not to use MatMAIJ for interpolations for saving memory

445:    Logically Collective on PC

447:    Input Parameters:
448: +  pc - the HMG context
449: -  usematmaij - if or not to use MatMAIJ for interpolations. By default, it is true for saving memory

451:    Options Database Keys:
452: .  -pc_hmg_use_matmaij - <true | false >

454:    Level: beginner

456: .keywords: HMG, multigrid, interpolation, coarsening, MatMAIJ

458: .seealso: PCHMG, PCType
459: @*/
460: PetscErrorCode PCHMGUseMatMAIJ(PC pc, PetscBool usematmaij)
461: {
462:   PetscErrorCode  ierr;

466:   PetscUseMethod(pc,"PCHMGUseMatMAIJ_C",(PC,PetscBool),(pc,usematmaij));
467:   return(0);
468: }

470: /*MC
471:    PCHMG - Hybrid of PETSc preconditioners (such as ASM, BJacobi, SOR, etc.) and Hypre BoomerAMG, GAMG or other multilevel methods. BoomerAMG, GAMG
472:            or other multilevel methods is used to coarsen matrix and generate a sequence of coarse matrices and interpolations. The matrices and
473:            interpolations are employed to construct PCMG, and then any available PETSc preconditioners can be chosen as smoothers and the coarse solver.

475:    Options Database Keys:
476: +  -pc_hmg_reuse_interpolation <true | false> - Whether or not to reuse the interpolations. If true, it potentially save the compute time.
477: .  -pc_hmg_use_subspace_coarsening  <true | false> - Whether or not to use subspace coarsening (that is, coarsen a submatrix).
478: .  -hmg_inner_pc_type <hypre, gamg, ...> - What method is used to coarsen matrix
479: -  -pc_hmg_use_matmaij <true | false> - Whether or not to use MatMAIJ for multicomponent problems for saving memory


482:    Notes:
483:     For multicomponent problems, we can just coarsen one submatrix associated with one particular component. In this way, the preconditioner setup
484:     time is significantly reduced. One typical use case is neutron transport equations. There are many variables on each mesh vertex due to the
485:     of angle and energy. Each variable, in fact, corresponds to the same PDEs but with different material properties.

487:    Level: beginner

489:    Concepts: Hybrid of ASM and MG, Subspace Coarsening

491:     References:
492: .   1. - Fande Kong, Yaqi Wang, Derek R Gaston, Cody J Permann, Andrew E Slaughter, Alexander D Lindsay, Richard C Martineau, A highly parallel multilevel
493:     Newton-Krylov-Schwarz method with subspace-based coarsening and partition-based balancing for the multigroup neutron transport equations on
494:     3D unstructured meshes, arXiv preprint arXiv:1903.03659, 2019

496: .seealso:  PCCreate(), PCSetType(), PCType, PC, PCMG, PCHYPRE, PCHMG, PCGetCoarseOperators(), PCGetInterpolations(), PCHMGSetReuseInterpolation(), PCHMGSetUseSubspaceCoarsening(),
497:            PCHMGSetInnerPCType()

499: M*/
500: PETSC_EXTERN PetscErrorCode PCCreate_HMG(PC pc)
501: {
503:   PC_HMG         *hmg;
504:   PC_MG          *mg;

507:   /* if type was previously mg; must manually destroy it because call to PCSetType(pc,PCMG) will not destroy it */
508:   if (pc->ops->destroy) {
509:      (*pc->ops->destroy)(pc);
510:     pc->data = NULL;
511:   }
512:   PetscFree(((PetscObject)pc)->type_name);

514:   PCSetType(pc,PCMG);
515:   PetscObjectChangeTypeName((PetscObject)pc, PCHMG);
516:   PetscNew(&hmg);

518:   mg                      = (PC_MG*) pc->data;
519:   mg->innerctx            = hmg;
520:   hmg->reuseinterp        = PETSC_FALSE;
521:   hmg->subcoarsening      = PETSC_FALSE;
522:   hmg->usematmaij         = PETSC_TRUE;
523:   hmg->component          = 0;
524:   hmg->innerpc            = NULL;

526:   pc->ops->setfromoptions = PCSetFromOptions_HMG;
527:   pc->ops->view           = PCView_HMG;
528:   pc->ops->destroy        = PCDestroy_HMG;
529:   pc->ops->setup          = PCSetUp_HMG;

531:   PetscObjectComposeFunction((PetscObject)pc,"PCHMGSetReuseInterpolation_C",PCHMGSetReuseInterpolation_HMG);
532:   PetscObjectComposeFunction((PetscObject)pc,"PCHMGSetUseSubspaceCoarsening_C",PCHMGSetUseSubspaceCoarsening_HMG);
533:   PetscObjectComposeFunction((PetscObject)pc,"PCHMGSetInnerPCType_C",PCHMGSetInnerPCType_HMG);
534:   PetscObjectComposeFunction((PetscObject)pc,"PCHMGSetCoarseningComponent_C",PCHMGSetCoarseningComponent_HMG);
535:   PetscObjectComposeFunction((PetscObject)pc,"PCHMGUseMatMAIJ_C",PCHMGUseMatMAIJ_HMG);
536:   return(0);
537: }