Actual source code: redundant.c
1: /*
2: This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
3: */
4: #include <petsc/private/pcimpl.h>
5: #include <petscksp.h>
7: typedef struct {
8: KSP ksp;
9: PC pc; /* actual preconditioner used on each processor */
10: Vec xsub, ysub; /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
11: Vec xdup, ydup; /* parallel vector that congregates xsub or ysub facilitating vector scattering */
12: Mat pmats; /* matrix and optional preconditioner matrix belong to a subcommunicator */
13: VecScatter scatterin, scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
14: PetscBool useparallelmat;
15: PetscSubcomm psubcomm;
16: PetscInt nsubcomm; /* num of data structure PetscSubcomm */
17: PetscBool shifttypeset;
18: MatFactorShiftType shifttype;
19: } PC_Redundant;
21: static PetscErrorCode PCFactorSetShiftType_Redundant(PC pc, MatFactorShiftType shifttype)
22: {
23: PC_Redundant *red = (PC_Redundant *)pc->data;
25: PetscFunctionBegin;
26: if (red->ksp) {
27: PC pc;
28: PetscCall(KSPGetPC(red->ksp, &pc));
29: PetscCall(PCFactorSetShiftType(pc, shifttype));
30: } else {
31: red->shifttypeset = PETSC_TRUE;
32: red->shifttype = shifttype;
33: }
34: PetscFunctionReturn(PETSC_SUCCESS);
35: }
37: static PetscErrorCode PCView_Redundant(PC pc, PetscViewer viewer)
38: {
39: PC_Redundant *red = (PC_Redundant *)pc->data;
40: PetscBool iascii, isstring;
41: PetscViewer subviewer;
43: PetscFunctionBegin;
44: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
45: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
46: if (iascii) {
47: if (!red->psubcomm) {
48: PetscCall(PetscViewerASCIIPrintf(viewer, " Not yet setup\n"));
49: } else {
50: PetscCall(PetscViewerASCIIPrintf(viewer, " First (color=0) of %" PetscInt_FMT " PCs follows\n", red->nsubcomm));
51: PetscCall(PetscViewerGetSubViewer(viewer, ((PetscObject)red->pc)->comm, &subviewer));
52: if (!red->psubcomm->color) { /* only view first redundant pc */
53: PetscCall(PetscViewerASCIIPushTab(subviewer));
54: PetscCall(KSPView(red->ksp, subviewer));
55: PetscCall(PetscViewerASCIIPopTab(subviewer));
56: }
57: PetscCall(PetscViewerRestoreSubViewer(viewer, ((PetscObject)red->pc)->comm, &subviewer));
58: }
59: } else if (isstring) {
60: PetscCall(PetscViewerStringSPrintf(viewer, " Redundant solver preconditioner"));
61: }
62: PetscFunctionReturn(PETSC_SUCCESS);
63: }
65: #include <../src/mat/impls/aij/mpi/mpiaij.h>
66: static PetscErrorCode PCSetUp_Redundant(PC pc)
67: {
68: PC_Redundant *red = (PC_Redundant *)pc->data;
69: PetscInt mstart, mend, mlocal, M;
70: PetscMPIInt size;
71: MPI_Comm comm, subcomm;
72: Vec x;
74: PetscFunctionBegin;
75: PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
77: /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
78: PetscCallMPI(MPI_Comm_size(comm, &size));
79: if (size == 1) red->useparallelmat = PETSC_FALSE;
81: if (!pc->setupcalled) {
82: PetscInt mloc_sub;
83: if (!red->psubcomm) { /* create red->psubcomm, new ksp and pc over subcomm */
84: KSP ksp;
85: PetscCall(PCRedundantGetKSP(pc, &ksp));
86: }
87: subcomm = PetscSubcommChild(red->psubcomm);
89: if (red->useparallelmat) {
90: /* grab the parallel matrix and put it into the processes of a subcommunicator */
91: PetscCall(MatCreateRedundantMatrix(pc->pmat, red->psubcomm->n, subcomm, MAT_INITIAL_MATRIX, &red->pmats));
93: PetscCallMPI(MPI_Comm_size(subcomm, &size));
94: if (size > 1) {
95: PetscBool foundpack, issbaij;
96: PetscCall(PetscObjectTypeCompare((PetscObject)red->pmats, MATMPISBAIJ, &issbaij));
97: if (!issbaij) {
98: PetscCall(MatGetFactorAvailable(red->pmats, NULL, MAT_FACTOR_LU, &foundpack));
99: } else {
100: PetscCall(MatGetFactorAvailable(red->pmats, NULL, MAT_FACTOR_CHOLESKY, &foundpack));
101: }
102: if (!foundpack) { /* reset default ksp and pc */
103: PetscCall(KSPSetType(red->ksp, KSPGMRES));
104: PetscCall(PCSetType(red->pc, PCBJACOBI));
105: } else {
106: PetscCall(PCFactorSetMatSolverType(red->pc, NULL));
107: }
108: }
110: PetscCall(KSPSetOperators(red->ksp, red->pmats, red->pmats));
112: /* get working vectors xsub and ysub */
113: PetscCall(MatCreateVecs(red->pmats, &red->xsub, &red->ysub));
115: /* create working vectors xdup and ydup.
116: xdup concatenates all xsub's contigously to form a mpi vector over dupcomm (see PetscSubcommCreate_interlaced())
117: ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
118: Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
119: PetscCall(MatGetLocalSize(red->pmats, &mloc_sub, NULL));
120: PetscCall(VecCreateMPI(PetscSubcommContiguousParent(red->psubcomm), mloc_sub, PETSC_DECIDE, &red->xdup));
121: PetscCall(VecCreateMPIWithArray(PetscSubcommContiguousParent(red->psubcomm), 1, mloc_sub, PETSC_DECIDE, NULL, &red->ydup));
123: /* create vecscatters */
124: if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
125: IS is1, is2;
126: PetscInt *idx1, *idx2, i, j, k;
128: PetscCall(MatCreateVecs(pc->pmat, &x, NULL));
129: PetscCall(VecGetSize(x, &M));
130: PetscCall(VecGetOwnershipRange(x, &mstart, &mend));
131: mlocal = mend - mstart;
132: PetscCall(PetscMalloc2(red->psubcomm->n * mlocal, &idx1, red->psubcomm->n * mlocal, &idx2));
133: j = 0;
134: for (k = 0; k < red->psubcomm->n; k++) {
135: for (i = mstart; i < mend; i++) {
136: idx1[j] = i;
137: idx2[j++] = i + M * k;
138: }
139: }
140: PetscCall(ISCreateGeneral(comm, red->psubcomm->n * mlocal, idx1, PETSC_COPY_VALUES, &is1));
141: PetscCall(ISCreateGeneral(comm, red->psubcomm->n * mlocal, idx2, PETSC_COPY_VALUES, &is2));
142: PetscCall(VecScatterCreate(x, is1, red->xdup, is2, &red->scatterin));
143: PetscCall(ISDestroy(&is1));
144: PetscCall(ISDestroy(&is2));
146: /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
147: PetscCall(ISCreateStride(comm, mlocal, mstart + red->psubcomm->color * M, 1, &is1));
148: PetscCall(ISCreateStride(comm, mlocal, mstart, 1, &is2));
149: PetscCall(VecScatterCreate(red->xdup, is1, x, is2, &red->scatterout));
150: PetscCall(ISDestroy(&is1));
151: PetscCall(ISDestroy(&is2));
152: PetscCall(PetscFree2(idx1, idx2));
153: PetscCall(VecDestroy(&x));
154: }
155: } else { /* !red->useparallelmat */
156: PetscCall(KSPSetOperators(red->ksp, pc->mat, pc->pmat));
157: }
158: } else { /* pc->setupcalled */
159: if (red->useparallelmat) {
160: MatReuse reuse;
161: /* grab the parallel matrix and put it into the processes of a subcommunicator */
162: if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
163: /* destroy old matrices */
164: PetscCall(MatDestroy(&red->pmats));
165: reuse = MAT_INITIAL_MATRIX;
166: } else {
167: reuse = MAT_REUSE_MATRIX;
168: }
169: PetscCall(MatCreateRedundantMatrix(pc->pmat, red->psubcomm->n, PetscSubcommChild(red->psubcomm), reuse, &red->pmats));
170: PetscCall(KSPSetOperators(red->ksp, red->pmats, red->pmats));
171: } else { /* !red->useparallelmat */
172: PetscCall(KSPSetOperators(red->ksp, pc->mat, pc->pmat));
173: }
174: }
176: if (pc->setfromoptionscalled) PetscCall(KSPSetFromOptions(red->ksp));
177: PetscCall(KSPSetUp(red->ksp));
179: /* Detect failure */
180: KSPConvergedReason redreason;
181: PetscCall(KSPGetConvergedReason(red->ksp, &redreason));
182: if (redreason) pc->failedreason = PC_SUBPC_ERROR;
183: PetscFunctionReturn(PETSC_SUCCESS);
184: }
186: static PetscErrorCode PCApply_Redundant(PC pc, Vec x, Vec y)
187: {
188: PC_Redundant *red = (PC_Redundant *)pc->data;
189: PetscScalar *array;
191: PetscFunctionBegin;
192: if (!red->useparallelmat) {
193: PetscCall(KSPSolve(red->ksp, x, y));
194: PetscCall(KSPCheckSolve(red->ksp, pc, y));
195: PetscFunctionReturn(PETSC_SUCCESS);
196: }
198: /* scatter x to xdup */
199: PetscCall(VecScatterBegin(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
200: PetscCall(VecScatterEnd(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
202: /* place xdup's local array into xsub */
203: PetscCall(VecGetArray(red->xdup, &array));
204: PetscCall(VecPlaceArray(red->xsub, (const PetscScalar *)array));
206: /* apply preconditioner on each processor */
207: PetscCall(KSPSolve(red->ksp, red->xsub, red->ysub));
208: PetscCall(KSPCheckSolve(red->ksp, pc, red->ysub));
209: PetscCall(VecResetArray(red->xsub));
210: PetscCall(VecRestoreArray(red->xdup, &array));
212: /* place ysub's local array into ydup */
213: PetscCall(VecGetArray(red->ysub, &array));
214: PetscCall(VecPlaceArray(red->ydup, (const PetscScalar *)array));
216: /* scatter ydup to y */
217: PetscCall(VecScatterBegin(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
218: PetscCall(VecScatterEnd(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
219: PetscCall(VecResetArray(red->ydup));
220: PetscCall(VecRestoreArray(red->ysub, &array));
221: PetscFunctionReturn(PETSC_SUCCESS);
222: }
224: static PetscErrorCode PCApplyTranspose_Redundant(PC pc, Vec x, Vec y)
225: {
226: PC_Redundant *red = (PC_Redundant *)pc->data;
227: PetscScalar *array;
229: PetscFunctionBegin;
230: if (!red->useparallelmat) {
231: PetscCall(KSPSolveTranspose(red->ksp, x, y));
232: PetscCall(KSPCheckSolve(red->ksp, pc, y));
233: PetscFunctionReturn(PETSC_SUCCESS);
234: }
236: /* scatter x to xdup */
237: PetscCall(VecScatterBegin(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
238: PetscCall(VecScatterEnd(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
240: /* place xdup's local array into xsub */
241: PetscCall(VecGetArray(red->xdup, &array));
242: PetscCall(VecPlaceArray(red->xsub, (const PetscScalar *)array));
244: /* apply preconditioner on each processor */
245: PetscCall(KSPSolveTranspose(red->ksp, red->xsub, red->ysub));
246: PetscCall(KSPCheckSolve(red->ksp, pc, red->ysub));
247: PetscCall(VecResetArray(red->xsub));
248: PetscCall(VecRestoreArray(red->xdup, &array));
250: /* place ysub's local array into ydup */
251: PetscCall(VecGetArray(red->ysub, &array));
252: PetscCall(VecPlaceArray(red->ydup, (const PetscScalar *)array));
254: /* scatter ydup to y */
255: PetscCall(VecScatterBegin(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
256: PetscCall(VecScatterEnd(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
257: PetscCall(VecResetArray(red->ydup));
258: PetscCall(VecRestoreArray(red->ysub, &array));
259: PetscFunctionReturn(PETSC_SUCCESS);
260: }
262: static PetscErrorCode PCReset_Redundant(PC pc)
263: {
264: PC_Redundant *red = (PC_Redundant *)pc->data;
266: PetscFunctionBegin;
267: if (red->useparallelmat) {
268: PetscCall(VecScatterDestroy(&red->scatterin));
269: PetscCall(VecScatterDestroy(&red->scatterout));
270: PetscCall(VecDestroy(&red->ysub));
271: PetscCall(VecDestroy(&red->xsub));
272: PetscCall(VecDestroy(&red->xdup));
273: PetscCall(VecDestroy(&red->ydup));
274: }
275: PetscCall(MatDestroy(&red->pmats));
276: PetscCall(KSPReset(red->ksp));
277: PetscFunctionReturn(PETSC_SUCCESS);
278: }
280: static PetscErrorCode PCDestroy_Redundant(PC pc)
281: {
282: PC_Redundant *red = (PC_Redundant *)pc->data;
284: PetscFunctionBegin;
285: PetscCall(PCReset_Redundant(pc));
286: PetscCall(KSPDestroy(&red->ksp));
287: PetscCall(PetscSubcommDestroy(&red->psubcomm));
288: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetScatter_C", NULL));
289: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetNumber_C", NULL));
290: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetKSP_C", NULL));
291: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetOperators_C", NULL));
292: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFactorSetShiftType_C", NULL));
293: PetscCall(PetscFree(pc->data));
294: PetscFunctionReturn(PETSC_SUCCESS);
295: }
297: static PetscErrorCode PCSetFromOptions_Redundant(PC pc, PetscOptionItems *PetscOptionsObject)
298: {
299: PC_Redundant *red = (PC_Redundant *)pc->data;
301: PetscFunctionBegin;
302: PetscOptionsHeadBegin(PetscOptionsObject, "Redundant options");
303: PetscCall(PetscOptionsInt("-pc_redundant_number", "Number of redundant pc", "PCRedundantSetNumber", red->nsubcomm, &red->nsubcomm, NULL));
304: PetscOptionsHeadEnd();
305: PetscFunctionReturn(PETSC_SUCCESS);
306: }
308: static PetscErrorCode PCRedundantSetNumber_Redundant(PC pc, PetscInt nreds)
309: {
310: PC_Redundant *red = (PC_Redundant *)pc->data;
312: PetscFunctionBegin;
313: red->nsubcomm = nreds;
314: PetscFunctionReturn(PETSC_SUCCESS);
315: }
317: /*@
318: PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
320: Logically Collective
322: Input Parameters:
323: + pc - the preconditioner context
324: - nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
325: use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
327: Level: advanced
329: .seealso: [](ch_ksp), `PCREDUNDANT`
330: @*/
331: PetscErrorCode PCRedundantSetNumber(PC pc, PetscInt nredundant)
332: {
333: PetscFunctionBegin;
335: PetscCheck(nredundant > 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "num of redundant pc %" PetscInt_FMT " must be positive", nredundant);
336: PetscTryMethod(pc, "PCRedundantSetNumber_C", (PC, PetscInt), (pc, nredundant));
337: PetscFunctionReturn(PETSC_SUCCESS);
338: }
340: static PetscErrorCode PCRedundantSetScatter_Redundant(PC pc, VecScatter in, VecScatter out)
341: {
342: PC_Redundant *red = (PC_Redundant *)pc->data;
344: PetscFunctionBegin;
345: PetscCall(PetscObjectReference((PetscObject)in));
346: PetscCall(VecScatterDestroy(&red->scatterin));
348: red->scatterin = in;
350: PetscCall(PetscObjectReference((PetscObject)out));
351: PetscCall(VecScatterDestroy(&red->scatterout));
352: red->scatterout = out;
353: PetscFunctionReturn(PETSC_SUCCESS);
354: }
356: /*@
357: PCRedundantSetScatter - Sets the scatter used to copy values into the
358: redundant local solve and the scatter to move them back into the global
359: vector.
361: Logically Collective
363: Input Parameters:
364: + pc - the preconditioner context
365: . in - the scatter to move the values in
366: - out - the scatter to move them out
368: Level: advanced
370: .seealso: [](ch_ksp), `PCREDUNDANT`
371: @*/
372: PetscErrorCode PCRedundantSetScatter(PC pc, VecScatter in, VecScatter out)
373: {
374: PetscFunctionBegin;
378: PetscTryMethod(pc, "PCRedundantSetScatter_C", (PC, VecScatter, VecScatter), (pc, in, out));
379: PetscFunctionReturn(PETSC_SUCCESS);
380: }
382: static PetscErrorCode PCRedundantGetKSP_Redundant(PC pc, KSP *innerksp)
383: {
384: PC_Redundant *red = (PC_Redundant *)pc->data;
385: MPI_Comm comm, subcomm;
386: const char *prefix;
387: PetscBool issbaij;
389: PetscFunctionBegin;
390: if (!red->psubcomm) {
391: PetscCall(PCGetOptionsPrefix(pc, &prefix));
393: PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
394: PetscCall(PetscSubcommCreate(comm, &red->psubcomm));
395: PetscCall(PetscSubcommSetNumber(red->psubcomm, red->nsubcomm));
396: PetscCall(PetscSubcommSetType(red->psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
398: PetscCall(PetscSubcommSetOptionsPrefix(red->psubcomm, prefix));
399: PetscCall(PetscSubcommSetFromOptions(red->psubcomm));
401: /* create a new PC that processors in each subcomm have copy of */
402: subcomm = PetscSubcommChild(red->psubcomm);
404: PetscCall(KSPCreate(subcomm, &red->ksp));
405: PetscCall(KSPSetNestLevel(red->ksp, pc->kspnestlevel));
406: PetscCall(KSPSetErrorIfNotConverged(red->ksp, pc->erroriffailure));
407: PetscCall(PetscObjectIncrementTabLevel((PetscObject)red->ksp, (PetscObject)pc, 1));
408: PetscCall(KSPSetType(red->ksp, KSPPREONLY));
409: PetscCall(KSPGetPC(red->ksp, &red->pc));
410: PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATSEQSBAIJ, &issbaij));
411: if (!issbaij) PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATMPISBAIJ, &issbaij));
412: if (!issbaij) {
413: PetscCall(PCSetType(red->pc, PCLU));
414: } else {
415: PetscCall(PCSetType(red->pc, PCCHOLESKY));
416: }
417: if (red->shifttypeset) {
418: PetscCall(PCFactorSetShiftType(red->pc, red->shifttype));
419: red->shifttypeset = PETSC_FALSE;
420: }
421: PetscCall(KSPSetOptionsPrefix(red->ksp, prefix));
422: PetscCall(KSPAppendOptionsPrefix(red->ksp, "redundant_"));
423: }
424: *innerksp = red->ksp;
425: PetscFunctionReturn(PETSC_SUCCESS);
426: }
428: /*@
429: PCRedundantGetKSP - Gets the less parallel `KSP` created by the redundant `PC`.
431: Not Collective
433: Input Parameter:
434: . pc - the preconditioner context
436: Output Parameter:
437: . innerksp - the `KSP` on the smaller set of processes
439: Level: advanced
441: .seealso: [](ch_ksp), `PCREDUNDANT`
442: @*/
443: PetscErrorCode PCRedundantGetKSP(PC pc, KSP *innerksp)
444: {
445: PetscFunctionBegin;
447: PetscAssertPointer(innerksp, 2);
448: PetscUseMethod(pc, "PCRedundantGetKSP_C", (PC, KSP *), (pc, innerksp));
449: PetscFunctionReturn(PETSC_SUCCESS);
450: }
452: static PetscErrorCode PCRedundantGetOperators_Redundant(PC pc, Mat *mat, Mat *pmat)
453: {
454: PC_Redundant *red = (PC_Redundant *)pc->data;
456: PetscFunctionBegin;
457: if (mat) *mat = red->pmats;
458: if (pmat) *pmat = red->pmats;
459: PetscFunctionReturn(PETSC_SUCCESS);
460: }
462: /*@
463: PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
465: Not Collective
467: Input Parameter:
468: . pc - the preconditioner context
470: Output Parameters:
471: + mat - the matrix
472: - pmat - the (possibly different) preconditioner matrix
474: Level: advanced
476: .seealso: [](ch_ksp), `PCREDUNDANT`
477: @*/
478: PetscErrorCode PCRedundantGetOperators(PC pc, Mat *mat, Mat *pmat)
479: {
480: PetscFunctionBegin;
482: if (mat) PetscAssertPointer(mat, 2);
483: if (pmat) PetscAssertPointer(pmat, 3);
484: PetscUseMethod(pc, "PCRedundantGetOperators_C", (PC, Mat *, Mat *), (pc, mat, pmat));
485: PetscFunctionReturn(PETSC_SUCCESS);
486: }
488: /*MC
489: PCREDUNDANT - Runs a `KSP` solver with preconditioner for the entire problem on subgroups of processors
491: Options Database Key:
492: . -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
493: use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
495: Level: intermediate
497: Notes:
498: Options for the redundant preconditioners can be set using the options database prefix -redundant_
500: The default `KSP` is preonly and the default `PC` is `PCLU` or `PCCHOLESKY` if Pmat is of type `MATSBAIJ`.
502: `PCFactorSetShiftType()` applied to this `PC` will convey they shift type into the inner `PC` if it is factorization based.
504: Developer Note:
505: `PCSetInitialGuessNonzero()` is not used by this class but likely should be.
507: .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PCRedundantSetScatter()`,
508: `PCRedundantGetKSP()`, `PCRedundantGetOperators()`, `PCRedundantSetNumber()`, `PCREDISTRIBUTE`
509: M*/
511: PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc)
512: {
513: PC_Redundant *red;
514: PetscMPIInt size;
516: PetscFunctionBegin;
517: PetscCall(PetscNew(&red));
518: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
520: red->nsubcomm = size;
521: red->useparallelmat = PETSC_TRUE;
522: pc->data = (void *)red;
524: pc->ops->apply = PCApply_Redundant;
525: pc->ops->applytranspose = PCApplyTranspose_Redundant;
526: pc->ops->setup = PCSetUp_Redundant;
527: pc->ops->destroy = PCDestroy_Redundant;
528: pc->ops->reset = PCReset_Redundant;
529: pc->ops->setfromoptions = PCSetFromOptions_Redundant;
530: pc->ops->view = PCView_Redundant;
532: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetScatter_C", PCRedundantSetScatter_Redundant));
533: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetNumber_C", PCRedundantSetNumber_Redundant));
534: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetKSP_C", PCRedundantGetKSP_Redundant));
535: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetOperators_C", PCRedundantGetOperators_Redundant));
536: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFactorSetShiftType_C", PCFactorSetShiftType_Redundant));
537: PetscFunctionReturn(PETSC_SUCCESS);
538: }