Actual source code: hmg.c
1: #include <petscdm.h>
2: #include <petsc/private/hashmapi.h>
3: #include <petsc/private/matimpl.h>
4: #include <petsc/private/pcmgimpl.h>
5: #include <petsc/private/pcimpl.h>
7: typedef struct {
8: PC innerpc; /* A MG inner PC (Hypre or PCGAMG) to setup interpolations and coarse operators */
9: char *innerpctype; /* PCGAMG or PCHYPRE */
10: PetscBool reuseinterp; /* A flag indicates if or not to reuse the interpolations */
11: PetscBool subcoarsening; /* If or not to use a subspace-based coarsening algorithm */
12: PetscBool usematmaij; /* If or not to use MatMAIJ for saving memory */
13: PetscInt component; /* Which subspace is used for the subspace-based coarsening algorithm? */
14: } PC_HMG;
16: static PetscErrorCode PCSetFromOptions_HMG(PC, PetscOptionItems *);
17: PetscErrorCode PCReset_MG(PC);
19: static PetscErrorCode PCHMGExtractSubMatrix_Private(Mat pmat, Mat *submat, MatReuse reuse, PetscInt component, PetscInt blocksize)
20: {
21: IS isrow;
22: PetscInt rstart, rend;
23: MPI_Comm comm;
25: PetscFunctionBegin;
26: PetscCall(PetscObjectGetComm((PetscObject)pmat, &comm));
27: PetscCheck(component < blocksize, comm, PETSC_ERR_ARG_INCOMP, "Component %" PetscInt_FMT " should be less than block size %" PetscInt_FMT " ", component, blocksize);
28: PetscCall(MatGetOwnershipRange(pmat, &rstart, &rend));
29: PetscCheck((rend - rstart) % blocksize == 0, comm, PETSC_ERR_ARG_INCOMP, "Block size %" PetscInt_FMT " is inconsistent for [%" PetscInt_FMT ", %" PetscInt_FMT ") ", blocksize, rstart, rend);
30: PetscCall(ISCreateStride(comm, (rend - rstart) / blocksize, rstart + component, blocksize, &isrow));
31: PetscCall(MatCreateSubMatrix(pmat, isrow, isrow, reuse, submat));
32: PetscCall(ISDestroy(&isrow));
33: PetscFunctionReturn(PETSC_SUCCESS);
34: }
36: static PetscErrorCode PCHMGExpandInterpolation_Private(Mat subinterp, Mat *interp, PetscInt blocksize)
37: {
38: PetscInt subrstart, subrend, subrowsize, subcolsize, subcstart, subcend, rowsize, colsize;
39: PetscInt subrow, row, nz, *d_nnz, *o_nnz, i, j, dnz, onz, max_nz, *indices;
40: const PetscInt *idx;
41: const PetscScalar *values;
42: MPI_Comm comm;
44: PetscFunctionBegin;
45: PetscCall(PetscObjectGetComm((PetscObject)subinterp, &comm));
46: PetscCall(MatGetOwnershipRange(subinterp, &subrstart, &subrend));
47: subrowsize = subrend - subrstart;
48: rowsize = subrowsize * blocksize;
49: PetscCall(PetscCalloc2(rowsize, &d_nnz, rowsize, &o_nnz));
50: PetscCall(MatGetOwnershipRangeColumn(subinterp, &subcstart, &subcend));
51: subcolsize = subcend - subcstart;
52: colsize = subcolsize * blocksize;
53: max_nz = 0;
54: for (subrow = subrstart; subrow < subrend; subrow++) {
55: PetscCall(MatGetRow(subinterp, subrow, &nz, &idx, NULL));
56: if (max_nz < nz) max_nz = nz;
57: dnz = 0;
58: onz = 0;
59: for (i = 0; i < nz; i++) {
60: if (idx[i] >= subcstart && idx[i] < subcend) dnz++;
61: else onz++;
62: }
63: for (i = 0; i < blocksize; i++) {
64: d_nnz[(subrow - subrstart) * blocksize + i] = dnz;
65: o_nnz[(subrow - subrstart) * blocksize + i] = onz;
66: }
67: PetscCall(MatRestoreRow(subinterp, subrow, &nz, &idx, NULL));
68: }
69: PetscCall(MatCreateAIJ(comm, rowsize, colsize, PETSC_DETERMINE, PETSC_DETERMINE, 0, d_nnz, 0, o_nnz, interp));
70: PetscCall(MatSetOption(*interp, MAT_IGNORE_OFF_PROC_ENTRIES, PETSC_TRUE));
71: PetscCall(MatSetOption(*interp, MAT_IGNORE_ZERO_ENTRIES, PETSC_TRUE));
72: PetscCall(MatSetOption(*interp, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE));
73: PetscCall(MatSetFromOptions(*interp));
75: PetscCall(MatSetUp(*interp));
76: PetscCall(PetscFree2(d_nnz, o_nnz));
77: PetscCall(PetscMalloc1(max_nz, &indices));
78: for (subrow = subrstart; subrow < subrend; subrow++) {
79: PetscCall(MatGetRow(subinterp, subrow, &nz, &idx, &values));
80: for (i = 0; i < blocksize; i++) {
81: row = subrow * blocksize + i;
82: for (j = 0; j < nz; j++) indices[j] = idx[j] * blocksize + i;
83: PetscCall(MatSetValues(*interp, 1, &row, nz, indices, values, INSERT_VALUES));
84: }
85: PetscCall(MatRestoreRow(subinterp, subrow, &nz, &idx, &values));
86: }
87: PetscCall(PetscFree(indices));
88: PetscCall(MatAssemblyBegin(*interp, MAT_FINAL_ASSEMBLY));
89: PetscCall(MatAssemblyEnd(*interp, MAT_FINAL_ASSEMBLY));
90: PetscFunctionReturn(PETSC_SUCCESS);
91: }
93: static PetscErrorCode PCSetUp_HMG(PC pc)
94: {
95: Mat PA, submat;
96: PC_MG *mg = (PC_MG *)pc->data;
97: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
98: MPI_Comm comm;
99: PetscInt level;
100: PetscInt num_levels;
101: Mat *operators, *interpolations;
102: PetscInt blocksize;
103: const char *prefix;
104: PCMGGalerkinType galerkin;
106: PetscFunctionBegin;
107: PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
108: if (pc->setupcalled) {
109: if (hmg->reuseinterp) {
110: /* If we did not use Galerkin in the last call or we have a different sparsity pattern now,
111: * we have to build from scratch
112: * */
113: PetscCall(PCMGGetGalerkin(pc, &galerkin));
114: if (galerkin == PC_MG_GALERKIN_NONE || pc->flag != SAME_NONZERO_PATTERN) pc->setupcalled = PETSC_FALSE;
115: PetscCall(PCMGSetGalerkin(pc, PC_MG_GALERKIN_PMAT));
116: PetscCall(PCSetUp_MG(pc));
117: PetscFunctionReturn(PETSC_SUCCESS);
118: } else {
119: PetscCall(PCReset_MG(pc));
120: pc->setupcalled = PETSC_FALSE;
121: }
122: }
124: /* Create an inner PC (GAMG or HYPRE) */
125: if (!hmg->innerpc) {
126: PetscCall(PCCreate(comm, &hmg->innerpc));
127: /* If users do not set an inner pc type, we need to set a default value */
128: if (!hmg->innerpctype) {
129: /* If hypre is available, use hypre, otherwise, use gamg */
130: #if PetscDefined(HAVE_HYPRE)
131: PetscCall(PetscStrallocpy(PCHYPRE, &(hmg->innerpctype)));
132: #else
133: PetscCall(PetscStrallocpy(PCGAMG, &(hmg->innerpctype)));
134: #endif
135: }
136: PetscCall(PCSetType(hmg->innerpc, hmg->innerpctype));
137: }
138: PetscCall(PCGetOperators(pc, NULL, &PA));
139: /* Users need to correctly set a block size of matrix in order to use subspace coarsening */
140: PetscCall(MatGetBlockSize(PA, &blocksize));
141: if (blocksize <= 1) hmg->subcoarsening = PETSC_FALSE;
142: /* Extract a submatrix for constructing subinterpolations */
143: if (hmg->subcoarsening) {
144: PetscCall(PCHMGExtractSubMatrix_Private(PA, &submat, MAT_INITIAL_MATRIX, hmg->component, blocksize));
145: PA = submat;
146: }
147: PetscCall(PCSetOperators(hmg->innerpc, PA, PA));
148: if (hmg->subcoarsening) PetscCall(MatDestroy(&PA));
149: /* Setup inner PC correctly. During this step, matrix will be coarsened */
150: PetscCall(PCSetUseAmat(hmg->innerpc, PETSC_FALSE));
151: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc, &prefix));
152: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)hmg->innerpc, prefix));
153: PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)hmg->innerpc, "hmg_inner_"));
154: PetscCall(PCSetFromOptions(hmg->innerpc));
155: PetscCall(PCSetUp(hmg->innerpc));
157: /* Obtain interpolations IN PLACE. For BoomerAMG, (I,J,data) is reused to avoid memory overhead */
158: PetscCall(PCGetInterpolations(hmg->innerpc, &num_levels, &interpolations));
159: /* We can reuse the coarse operators when we do the full space coarsening */
160: if (!hmg->subcoarsening) PetscCall(PCGetCoarseOperators(hmg->innerpc, &num_levels, &operators));
162: PetscCall(PCDestroy(&hmg->innerpc));
163: hmg->innerpc = NULL;
164: PetscCall(PCMGSetLevels_MG(pc, num_levels, NULL));
165: /* Set coarse matrices and interpolations to PCMG */
166: for (level = num_levels - 1; level > 0; level--) {
167: Mat P = NULL, pmat = NULL;
168: Vec b, x, r;
169: if (hmg->subcoarsening) {
170: if (hmg->usematmaij) {
171: PetscCall(MatCreateMAIJ(interpolations[level - 1], blocksize, &P));
172: PetscCall(MatDestroy(&interpolations[level - 1]));
173: } else {
174: /* Grow interpolation. In the future, we should use MAIJ */
175: PetscCall(PCHMGExpandInterpolation_Private(interpolations[level - 1], &P, blocksize));
176: PetscCall(MatDestroy(&interpolations[level - 1]));
177: }
178: } else {
179: P = interpolations[level - 1];
180: }
181: PetscCall(MatCreateVecs(P, &b, &r));
182: PetscCall(PCMGSetInterpolation(pc, level, P));
183: PetscCall(PCMGSetRestriction(pc, level, P));
184: PetscCall(MatDestroy(&P));
185: /* We reuse the matrices when we do not do subspace coarsening */
186: if ((level - 1) >= 0 && !hmg->subcoarsening) {
187: pmat = operators[level - 1];
188: PetscCall(PCMGSetOperators(pc, level - 1, pmat, pmat));
189: PetscCall(MatDestroy(&pmat));
190: }
191: PetscCall(PCMGSetRhs(pc, level - 1, b));
193: PetscCall(PCMGSetR(pc, level, r));
194: PetscCall(VecDestroy(&r));
196: PetscCall(VecDuplicate(b, &x));
197: PetscCall(PCMGSetX(pc, level - 1, x));
198: PetscCall(VecDestroy(&x));
199: PetscCall(VecDestroy(&b));
200: }
201: PetscCall(PetscFree(interpolations));
202: if (!hmg->subcoarsening) PetscCall(PetscFree(operators));
203: /* Turn Galerkin off when we already have coarse operators */
204: PetscCall(PCMGSetGalerkin(pc, hmg->subcoarsening ? PC_MG_GALERKIN_PMAT : PC_MG_GALERKIN_NONE));
205: PetscCall(PCSetDM(pc, NULL));
206: PetscCall(PCSetUseAmat(pc, PETSC_FALSE));
207: PetscObjectOptionsBegin((PetscObject)pc);
208: PetscCall(PCSetFromOptions_MG(pc, PetscOptionsObject)); /* should be called in PCSetFromOptions_HMG(), but cannot be called prior to PCMGSetLevels() */
209: PetscOptionsEnd();
210: PetscCall(PCSetUp_MG(pc));
211: PetscFunctionReturn(PETSC_SUCCESS);
212: }
214: static PetscErrorCode PCDestroy_HMG(PC pc)
215: {
216: PC_MG *mg = (PC_MG *)pc->data;
217: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
219: PetscFunctionBegin;
220: PetscCall(PCDestroy(&hmg->innerpc));
221: PetscCall(PetscFree(hmg->innerpctype));
222: PetscCall(PetscFree(hmg));
223: PetscCall(PCDestroy_MG(pc));
225: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetReuseInterpolation_C", NULL));
226: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetUseSubspaceCoarsening_C", NULL));
227: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetInnerPCType_C", NULL));
228: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetCoarseningComponent_C", NULL));
229: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGUseMatMAIJ_C", NULL));
230: PetscFunctionReturn(PETSC_SUCCESS);
231: }
233: static PetscErrorCode PCView_HMG(PC pc, PetscViewer viewer)
234: {
235: PC_MG *mg = (PC_MG *)pc->data;
236: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
237: PetscBool iascii;
239: PetscFunctionBegin;
240: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
241: if (iascii) {
242: PetscCall(PetscViewerASCIIPrintf(viewer, " Reuse interpolation: %s\n", hmg->reuseinterp ? "true" : "false"));
243: PetscCall(PetscViewerASCIIPrintf(viewer, " Use subspace coarsening: %s\n", hmg->subcoarsening ? "true" : "false"));
244: PetscCall(PetscViewerASCIIPrintf(viewer, " Coarsening component: %" PetscInt_FMT " \n", hmg->component));
245: PetscCall(PetscViewerASCIIPrintf(viewer, " Use MatMAIJ: %s \n", hmg->usematmaij ? "true" : "false"));
246: PetscCall(PetscViewerASCIIPrintf(viewer, " Inner PC type: %s \n", hmg->innerpctype));
247: }
248: PetscCall(PCView_MG(pc, viewer));
249: PetscFunctionReturn(PETSC_SUCCESS);
250: }
252: static PetscErrorCode PCSetFromOptions_HMG(PC pc, PetscOptionItems *PetscOptionsObject)
253: {
254: PC_MG *mg = (PC_MG *)pc->data;
255: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
257: PetscFunctionBegin;
258: PetscOptionsHeadBegin(PetscOptionsObject, "HMG");
259: PetscCall(PetscOptionsBool("-pc_hmg_reuse_interpolation", "Reuse the interpolation operators when possible (cheaper, weaker when matrix entries change a lot)", "PCHMGSetReuseInterpolation", hmg->reuseinterp, &hmg->reuseinterp, NULL));
260: PetscCall(PetscOptionsBool("-pc_hmg_use_subspace_coarsening", "Use the subspace coarsening to compute the interpolations", "PCHMGSetUseSubspaceCoarsening", hmg->subcoarsening, &hmg->subcoarsening, NULL));
261: PetscCall(PetscOptionsBool("-pc_hmg_use_matmaij", "Use MatMAIJ store interpolation for saving memory", "PCHMGSetInnerPCType", hmg->usematmaij, &hmg->usematmaij, NULL));
262: PetscCall(PetscOptionsInt("-pc_hmg_coarsening_component", "Which component is chosen for the subspace-based coarsening algorithm", "PCHMGSetCoarseningComponent", hmg->component, &hmg->component, NULL));
263: PetscOptionsHeadEnd();
264: PetscFunctionReturn(PETSC_SUCCESS);
265: }
267: static PetscErrorCode PCHMGSetReuseInterpolation_HMG(PC pc, PetscBool reuse)
268: {
269: PC_MG *mg = (PC_MG *)pc->data;
270: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
272: PetscFunctionBegin;
273: hmg->reuseinterp = reuse;
274: PetscFunctionReturn(PETSC_SUCCESS);
275: }
277: /*@
278: PCHMGSetReuseInterpolation - Reuse the interpolation matrices in `PCHMG` after changing the matrices numerical values
280: Logically Collective
282: Input Parameters:
283: + pc - the `PCHMG` context
284: - reuse - `PETSC_TRUE` indicates that `PCHMG` will reuse the interpolations
286: Options Database Key:
287: . -pc_hmg_reuse_interpolation <true | false> - Whether or not to reuse the interpolations. If true, it potentially save the compute time.
289: Level: beginner
291: .seealso: [](ch_ksp), `PCHMG`, `PCGAMG`, `PCHMGSetUseSubspaceCoarsening()`, `PCHMGSetCoarseningComponent()`, `PCHMGSetInnerPCType()`
292: @*/
293: PetscErrorCode PCHMGSetReuseInterpolation(PC pc, PetscBool reuse)
294: {
295: PetscFunctionBegin;
297: PetscUseMethod(pc, "PCHMGSetReuseInterpolation_C", (PC, PetscBool), (pc, reuse));
298: PetscFunctionReturn(PETSC_SUCCESS);
299: }
301: static PetscErrorCode PCHMGSetUseSubspaceCoarsening_HMG(PC pc, PetscBool subspace)
302: {
303: PC_MG *mg = (PC_MG *)pc->data;
304: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
306: PetscFunctionBegin;
307: hmg->subcoarsening = subspace;
308: PetscFunctionReturn(PETSC_SUCCESS);
309: }
311: /*@
312: PCHMGSetUseSubspaceCoarsening - Use subspace coarsening in `PCHMG`
314: Logically Collective
316: Input Parameters:
317: + pc - the `PCHMG` context
318: - subspace - `PETSC_TRUE` indicates that `PCHMG` will use the subspace coarsening
320: Options Database Key:
321: . -pc_hmg_use_subspace_coarsening <true | false> - Whether or not to use subspace coarsening (that is, coarsen a submatrix).
323: Level: beginner
325: .seealso: [](ch_ksp), `PCHMG`, `PCHMGSetReuseInterpolation()`, `PCHMGSetCoarseningComponent()`, `PCHMGSetInnerPCType()`
326: @*/
327: PetscErrorCode PCHMGSetUseSubspaceCoarsening(PC pc, PetscBool subspace)
328: {
329: PetscFunctionBegin;
331: PetscUseMethod(pc, "PCHMGSetUseSubspaceCoarsening_C", (PC, PetscBool), (pc, subspace));
332: PetscFunctionReturn(PETSC_SUCCESS);
333: }
335: static PetscErrorCode PCHMGSetInnerPCType_HMG(PC pc, PCType type)
336: {
337: PC_MG *mg = (PC_MG *)pc->data;
338: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
340: PetscFunctionBegin;
341: PetscCall(PetscStrallocpy(type, &(hmg->innerpctype)));
342: PetscFunctionReturn(PETSC_SUCCESS);
343: }
345: /*@C
346: PCHMGSetInnerPCType - Set an inner `PC` type
348: Logically Collective
350: Input Parameters:
351: + pc - the `PCHMG` context
352: - type - `PCHYPRE` or `PCGAMG` coarsening algorithm
354: Options Database Key:
355: . -hmg_inner_pc_type <hypre, gamg> - What method is used to coarsen matrix
357: Level: beginner
359: .seealso: [](ch_ksp), `PCHMG`, `PCType`, `PCHMGSetReuseInterpolation()`, `PCHMGSetUseSubspaceCoarsening()`, `PCHMGSetCoarseningComponent()`
360: @*/
361: PetscErrorCode PCHMGSetInnerPCType(PC pc, PCType type)
362: {
363: PetscFunctionBegin;
365: PetscUseMethod(pc, "PCHMGSetInnerPCType_C", (PC, PCType), (pc, type));
366: PetscFunctionReturn(PETSC_SUCCESS);
367: }
369: static PetscErrorCode PCHMGSetCoarseningComponent_HMG(PC pc, PetscInt component)
370: {
371: PC_MG *mg = (PC_MG *)pc->data;
372: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
374: PetscFunctionBegin;
375: hmg->component = component;
376: PetscFunctionReturn(PETSC_SUCCESS);
377: }
379: /*@
380: PCHMGSetCoarseningComponent - Set which component of the PDE is used for the subspace-based coarsening algorithm
382: Logically Collective
384: Input Parameters:
385: + pc - the `PCHMG` context
386: - component - which component `PC` will coarsen
388: Options Database Key:
389: . -pc_hmg_coarsening_component <i> - Which component is chosen for the subspace-based coarsening algorithm
391: Level: beginner
393: .seealso: [](ch_ksp), `PCHMG`, `PCType`, `PCGAMG`, `PCHMGSetReuseInterpolation()`, `PCHMGSetUseSubspaceCoarsening()`, `PCHMGSetInnerPCType()`
394: @*/
395: PetscErrorCode PCHMGSetCoarseningComponent(PC pc, PetscInt component)
396: {
397: PetscFunctionBegin;
399: PetscUseMethod(pc, "PCHMGSetCoarseningComponent_C", (PC, PetscInt), (pc, component));
400: PetscFunctionReturn(PETSC_SUCCESS);
401: }
403: static PetscErrorCode PCHMGUseMatMAIJ_HMG(PC pc, PetscBool usematmaij)
404: {
405: PC_MG *mg = (PC_MG *)pc->data;
406: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
408: PetscFunctionBegin;
409: hmg->usematmaij = usematmaij;
410: PetscFunctionReturn(PETSC_SUCCESS);
411: }
413: /*@
414: PCHMGUseMatMAIJ - Set a flag that indicates if or not to use `MATMAIJ` for the interpolation matrices for saving memory
416: Logically Collective
418: Input Parameters:
419: + pc - the `PCHMG` context
420: - usematmaij - `PETSC_TRUE` (default) to use `MATMAIJ` for interpolations.
422: Options Database Key:
423: . -pc_hmg_use_matmaij - <true | false >
425: Level: beginner
427: .seealso: [](ch_ksp), `PCHMG`, `PCType`, `PCGAMG`
428: @*/
429: PetscErrorCode PCHMGUseMatMAIJ(PC pc, PetscBool usematmaij)
430: {
431: PetscFunctionBegin;
433: PetscUseMethod(pc, "PCHMGUseMatMAIJ_C", (PC, PetscBool), (pc, usematmaij));
434: PetscFunctionReturn(PETSC_SUCCESS);
435: }
437: /*MC
438: PCHMG - For multiple component PDE problems constructs a hierarchy of restriction operators to coarse grid problems using the submatrix of
439: a single component with either `PCHYPRE` or `PCGAMG`. The same restriction operators are used for each of the components of the PDE with `PCMG`
440: resulting in a much more efficient to build and apply preconditioner than using `PCGAMG` on the entire system {cite}`kong2020highly`.
442: Options Database Keys:
443: + -pc_hmg_reuse_interpolation <true | false> - Whether or not to reuse the interpolations for new matrix values. It can potentially save compute time.
444: . -pc_hmg_use_subspace_coarsening <true | false> - Whether or not to use subspace coarsening (that is, coarsen a submatrix).
445: . -hmg_inner_pc_type <hypre, gamg, ...> - What method is used to solve the coarsen matrix
446: - -pc_hmg_use_matmaij <true | false> - Whether or not to use `MATMAIJ` for multicomponent problems for saving memory
448: Level: intermediate
450: Note:
451: `MatSetBlockSize()` must be called on the linear system matrix to set the number of components of the PDE.
453: .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCMG`, `PCHYPRE`, `PCHMG`, `PCGetCoarseOperators()`, `PCGetInterpolations()`,
454: `PCHMGSetReuseInterpolation()`, `PCHMGSetUseSubspaceCoarsening()`, `PCHMGSetInnerPCType()`
455: M*/
456: PETSC_EXTERN PetscErrorCode PCCreate_HMG(PC pc)
457: {
458: PC_HMG *hmg;
459: PC_MG *mg;
461: PetscFunctionBegin;
462: /* if type was previously mg; must manually destroy it because call to PCSetType(pc,PCMG) will not destroy it */
463: PetscTryTypeMethod(pc, destroy);
464: pc->data = NULL;
465: PetscCall(PetscFree(((PetscObject)pc)->type_name));
467: PetscCall(PCSetType(pc, PCMG));
468: PetscCall(PetscObjectChangeTypeName((PetscObject)pc, PCHMG));
469: PetscCall(PetscNew(&hmg));
471: mg = (PC_MG *)pc->data;
472: mg->innerctx = hmg;
473: hmg->reuseinterp = PETSC_FALSE;
474: hmg->subcoarsening = PETSC_FALSE;
475: hmg->usematmaij = PETSC_TRUE;
476: hmg->component = 0;
477: hmg->innerpc = NULL;
479: pc->ops->setfromoptions = PCSetFromOptions_HMG;
480: pc->ops->view = PCView_HMG;
481: pc->ops->destroy = PCDestroy_HMG;
482: pc->ops->setup = PCSetUp_HMG;
484: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetReuseInterpolation_C", PCHMGSetReuseInterpolation_HMG));
485: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetUseSubspaceCoarsening_C", PCHMGSetUseSubspaceCoarsening_HMG));
486: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetInnerPCType_C", PCHMGSetInnerPCType_HMG));
487: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetCoarseningComponent_C", PCHMGSetCoarseningComponent_HMG));
488: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGUseMatMAIJ_C", PCHMGUseMatMAIJ_HMG));
489: PetscFunctionReturn(PETSC_SUCCESS);
490: }