Actual source code: pcis.c
1: #include <petsc/private/pcisimpl.h>
3: static PetscErrorCode PCISSetUseStiffnessScaling_IS(PC pc, PetscBool use)
4: {
5: PC_IS *pcis = (PC_IS *)pc->data;
7: PetscFunctionBegin;
8: pcis->use_stiffness_scaling = use;
9: PetscFunctionReturn(PETSC_SUCCESS);
10: }
12: /*@
13: PCISSetUseStiffnessScaling - Tells `PCIS` to construct partition of unity using
14: the local matrices' diagonal entries
16: Logically Collective
18: Input Parameters:
19: + pc - the preconditioning context
20: - use - whether or not it should use matrix diagonal to build partition of unity.
22: Level: intermediate
24: .seealso: [](ch_ksp), `PCBDDC`, `PCNN`, `PCISSetSubdomainDiagonalScaling()`, `PCISScatterArrayNToVecB()`,
25: `PCISSetSubdomainScalingFactor()`,
26: `PCISReset()`, `PCISInitialize()`, `PCISApplyInvSchur()`, `PCISApplySchur()`
27: @*/
28: PetscErrorCode PCISSetUseStiffnessScaling(PC pc, PetscBool use)
29: {
30: PetscFunctionBegin;
33: PetscTryMethod(pc, "PCISSetUseStiffnessScaling_C", (PC, PetscBool), (pc, use));
34: PetscFunctionReturn(PETSC_SUCCESS);
35: }
37: static PetscErrorCode PCISSetSubdomainDiagonalScaling_IS(PC pc, Vec scaling_factors)
38: {
39: PC_IS *pcis = (PC_IS *)pc->data;
41: PetscFunctionBegin;
42: PetscCall(PetscObjectReference((PetscObject)scaling_factors));
43: PetscCall(VecDestroy(&pcis->D));
44: pcis->D = scaling_factors;
45: if (pc->setupcalled) {
46: PetscInt sn;
48: PetscCall(VecGetSize(pcis->D, &sn));
49: if (sn == pcis->n) {
50: PetscCall(VecScatterBegin(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
51: PetscCall(VecScatterEnd(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
52: PetscCall(VecDestroy(&pcis->D));
53: PetscCall(VecDuplicate(pcis->vec1_B, &pcis->D));
54: PetscCall(VecCopy(pcis->vec1_B, pcis->D));
55: } else PetscCheck(sn == pcis->n_B, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Invalid size for scaling vector. Expected %" PetscInt_FMT " (or full %" PetscInt_FMT "), found %" PetscInt_FMT, pcis->n_B, pcis->n, sn);
56: }
57: PetscFunctionReturn(PETSC_SUCCESS);
58: }
60: /*@
61: PCISSetSubdomainDiagonalScaling - Set diagonal scaling for `PCIS`.
63: Logically Collective
65: Input Parameters:
66: + pc - the preconditioning context
67: - scaling_factors - scaling factors for the subdomain
69: Level: intermediate
71: Note:
72: Intended for use with jumping coefficients cases.
74: .seealso: [](ch_ksp), `PCBDDC`, `PCNN`, `PCISScatterArrayNToVecB()`,
75: `PCISSetSubdomainScalingFactor()`, `PCISSetUseStiffnessScaling()`,
76: `PCISReset()`, `PCISInitialize()`, `PCISApplyInvSchur()`, `PCISApplySchur()`
77: @*/
78: PetscErrorCode PCISSetSubdomainDiagonalScaling(PC pc, Vec scaling_factors)
79: {
80: PetscFunctionBegin;
83: PetscTryMethod(pc, "PCISSetSubdomainDiagonalScaling_C", (PC, Vec), (pc, scaling_factors));
84: PetscFunctionReturn(PETSC_SUCCESS);
85: }
87: static PetscErrorCode PCISSetSubdomainScalingFactor_IS(PC pc, PetscScalar scal)
88: {
89: PC_IS *pcis = (PC_IS *)pc->data;
91: PetscFunctionBegin;
92: pcis->scaling_factor = scal;
93: if (pcis->D) PetscCall(VecSet(pcis->D, pcis->scaling_factor));
94: PetscFunctionReturn(PETSC_SUCCESS);
95: }
97: /*@
98: PCISSetSubdomainScalingFactor - Set scaling factor for `PCIS`.
100: Not Collective
102: Input Parameters:
103: + pc - the preconditioning context
104: - scal - scaling factor for the subdomain
106: Level: intermediate
108: Note:
109: Intended for use with the jumping coefficients cases.
111: .seealso: [](ch_ksp), `PCBDDC`, `PCNN`, `PCISScatterArrayNToVecB()`,
112: `PCISSetSubdomainDiagonalScaling()`, `PCISSetUseStiffnessScaling()`,
113: `PCISReset()`, `PCISInitialize()`, `PCISApplyInvSchur()`, `PCISApplySchur()`
114: @*/
115: PetscErrorCode PCISSetSubdomainScalingFactor(PC pc, PetscScalar scal)
116: {
117: PetscFunctionBegin;
119: PetscTryMethod(pc, "PCISSetSubdomainScalingFactor_C", (PC, PetscScalar), (pc, scal));
120: PetscFunctionReturn(PETSC_SUCCESS);
121: }
123: /*@
124: PCISSetUp - sets up the `PC_IS` portion of `PCNN` and `PCBDDC` preconditioner context as part of their setup process
126: Input Parameters:
127: + pc - the `PC` object, must be of type `PCNN` or `PCBDDC`
128: . computematrices - Extract the blocks `A_II`, `A_BI`, `A_IB` and `A_BB` from the matrix
129: - computesolvers - Create the `KSP` for the local Dirichlet and Neumann problems
131: Level: advanced
133: .seealso: [](ch_ksp), `PCBDDC`, `PCNN`, `PCISSetUseStiffnessScaling()`, `PCISSetSubdomainDiagonalScaling()`, `PCISScatterArrayNToVecB()`,
134: `PCISSetSubdomainScalingFactor()`,
135: `PCISReset()`, `PCISApplySchur()`, `PCISApplyInvSchur()`
136: @*/
137: PetscErrorCode PCISSetUp(PC pc, PetscBool computematrices, PetscBool computesolvers)
138: {
139: PC_IS *pcis = (PC_IS *)(pc->data);
140: Mat_IS *matis;
141: MatReuse reuse;
142: PetscBool flg, issbaij;
144: PetscFunctionBegin;
145: PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATIS, &flg));
146: PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Requires preconditioning matrix of type MATIS");
147: matis = (Mat_IS *)pc->pmat->data;
148: if (pc->useAmat) {
149: PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATIS, &flg));
150: PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Requires linear system matrix of type MATIS");
151: }
153: /* first time creation, get info on substructuring */
154: if (!pc->setupcalled) {
155: PetscInt n_I;
156: PetscInt *idx_I_local, *idx_B_local, *idx_I_global, *idx_B_global;
157: PetscBT bt;
158: PetscInt i, j;
160: /* get info on mapping */
161: PetscCall(PetscObjectReference((PetscObject)matis->rmapping));
162: PetscCall(ISLocalToGlobalMappingDestroy(&pcis->mapping));
163: pcis->mapping = matis->rmapping;
164: PetscCall(ISLocalToGlobalMappingGetSize(pcis->mapping, &pcis->n));
165: PetscCall(ISLocalToGlobalMappingGetInfo(pcis->mapping, &(pcis->n_neigh), &(pcis->neigh), &(pcis->n_shared), &(pcis->shared)));
167: /* Identifying interior and interface nodes, in local numbering */
168: PetscCall(PetscBTCreate(pcis->n, &bt));
169: for (i = 0; i < pcis->n_neigh; i++)
170: for (j = 0; j < pcis->n_shared[i]; j++) PetscCall(PetscBTSet(bt, pcis->shared[i][j]));
172: /* Creating local and global index sets for interior and interface nodes. */
173: PetscCall(PetscMalloc1(pcis->n, &idx_I_local));
174: PetscCall(PetscMalloc1(pcis->n, &idx_B_local));
175: for (i = 0, pcis->n_B = 0, n_I = 0; i < pcis->n; i++) {
176: if (!PetscBTLookup(bt, i)) {
177: idx_I_local[n_I] = i;
178: n_I++;
179: } else {
180: idx_B_local[pcis->n_B] = i;
181: pcis->n_B++;
182: }
183: }
185: /* Getting the global numbering */
186: idx_B_global = idx_I_local + n_I; /* Just avoiding allocating extra memory, since we have vacant space */
187: idx_I_global = idx_B_local + pcis->n_B;
188: PetscCall(ISLocalToGlobalMappingApply(pcis->mapping, pcis->n_B, idx_B_local, idx_B_global));
189: PetscCall(ISLocalToGlobalMappingApply(pcis->mapping, n_I, idx_I_local, idx_I_global));
191: /* Creating the index sets */
192: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, pcis->n_B, idx_B_local, PETSC_COPY_VALUES, &pcis->is_B_local));
193: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), pcis->n_B, idx_B_global, PETSC_COPY_VALUES, &pcis->is_B_global));
194: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, n_I, idx_I_local, PETSC_COPY_VALUES, &pcis->is_I_local));
195: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), n_I, idx_I_global, PETSC_COPY_VALUES, &pcis->is_I_global));
197: /* Freeing memory */
198: PetscCall(PetscFree(idx_B_local));
199: PetscCall(PetscFree(idx_I_local));
200: PetscCall(PetscBTDestroy(&bt));
202: /* Creating work vectors and arrays */
203: PetscCall(VecDuplicate(matis->x, &pcis->vec1_N));
204: PetscCall(VecDuplicate(pcis->vec1_N, &pcis->vec2_N));
205: PetscCall(VecCreate(PETSC_COMM_SELF, &pcis->vec1_D));
206: PetscCall(VecSetSizes(pcis->vec1_D, pcis->n - pcis->n_B, PETSC_DECIDE));
207: PetscCall(VecSetType(pcis->vec1_D, ((PetscObject)pcis->vec1_N)->type_name));
208: PetscCall(VecDuplicate(pcis->vec1_D, &pcis->vec2_D));
209: PetscCall(VecDuplicate(pcis->vec1_D, &pcis->vec3_D));
210: PetscCall(VecDuplicate(pcis->vec1_D, &pcis->vec4_D));
211: PetscCall(VecCreate(PETSC_COMM_SELF, &pcis->vec1_B));
212: PetscCall(VecSetSizes(pcis->vec1_B, pcis->n_B, PETSC_DECIDE));
213: PetscCall(VecSetType(pcis->vec1_B, ((PetscObject)pcis->vec1_N)->type_name));
214: PetscCall(VecDuplicate(pcis->vec1_B, &pcis->vec2_B));
215: PetscCall(VecDuplicate(pcis->vec1_B, &pcis->vec3_B));
216: PetscCall(MatCreateVecs(pc->pmat, &pcis->vec1_global, NULL));
217: PetscCall(PetscMalloc1(pcis->n, &pcis->work_N));
218: /* scaling vector */
219: if (!pcis->D) { /* it can happen that the user passed in a scaling vector via PCISSetSubdomainDiagonalScaling */
220: PetscCall(VecDuplicate(pcis->vec1_B, &pcis->D));
221: PetscCall(VecSet(pcis->D, pcis->scaling_factor));
222: }
224: /* Creating the scatter contexts */
225: PetscCall(VecScatterCreate(pcis->vec1_N, pcis->is_I_local, pcis->vec1_D, (IS)0, &pcis->N_to_D));
226: PetscCall(VecScatterCreate(pcis->vec1_global, pcis->is_I_global, pcis->vec1_D, (IS)0, &pcis->global_to_D));
227: PetscCall(VecScatterCreate(pcis->vec1_N, pcis->is_B_local, pcis->vec1_B, (IS)0, &pcis->N_to_B));
228: PetscCall(VecScatterCreate(pcis->vec1_global, pcis->is_B_global, pcis->vec1_B, (IS)0, &pcis->global_to_B));
230: /* map from boundary to local */
231: PetscCall(ISLocalToGlobalMappingCreateIS(pcis->is_B_local, &pcis->BtoNmap));
232: }
234: {
235: PetscInt sn;
237: PetscCall(VecGetSize(pcis->D, &sn));
238: if (sn == pcis->n) {
239: PetscCall(VecScatterBegin(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
240: PetscCall(VecScatterEnd(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
241: PetscCall(VecDestroy(&pcis->D));
242: PetscCall(VecDuplicate(pcis->vec1_B, &pcis->D));
243: PetscCall(VecCopy(pcis->vec1_B, pcis->D));
244: } else PetscCheck(sn == pcis->n_B, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Invalid size for scaling vector. Expected %" PetscInt_FMT " (or full %" PetscInt_FMT "), found %" PetscInt_FMT, pcis->n_B, pcis->n, sn);
245: }
247: /*
248: Extracting the blocks A_II, A_BI, A_IB and A_BB from A. If the numbering
249: is such that interior nodes come first than the interface ones, we have
251: [ A_II | A_IB ]
252: A = [------+------]
253: [ A_BI | A_BB ]
254: */
255: if (computematrices) {
256: PetscBool amat = (PetscBool)(pc->mat != pc->pmat && pc->useAmat);
257: PetscInt bs, ibs;
259: reuse = MAT_INITIAL_MATRIX;
260: if (pcis->reusesubmatrices && pc->setupcalled) {
261: if (pc->flag == SAME_NONZERO_PATTERN) {
262: reuse = MAT_REUSE_MATRIX;
263: } else {
264: reuse = MAT_INITIAL_MATRIX;
265: }
266: }
267: if (reuse == MAT_INITIAL_MATRIX) {
268: PetscCall(MatDestroy(&pcis->A_II));
269: PetscCall(MatDestroy(&pcis->pA_II));
270: PetscCall(MatDestroy(&pcis->A_IB));
271: PetscCall(MatDestroy(&pcis->A_BI));
272: PetscCall(MatDestroy(&pcis->A_BB));
273: }
275: PetscCall(ISLocalToGlobalMappingGetBlockSize(pcis->mapping, &ibs));
276: PetscCall(MatGetBlockSize(matis->A, &bs));
277: PetscCall(MatCreateSubMatrix(matis->A, pcis->is_I_local, pcis->is_I_local, reuse, &pcis->pA_II));
278: if (amat) {
279: Mat_IS *amatis = (Mat_IS *)pc->mat->data;
280: PetscCall(MatCreateSubMatrix(amatis->A, pcis->is_I_local, pcis->is_I_local, reuse, &pcis->A_II));
281: } else {
282: PetscCall(PetscObjectReference((PetscObject)pcis->pA_II));
283: PetscCall(MatDestroy(&pcis->A_II));
284: pcis->A_II = pcis->pA_II;
285: }
286: PetscCall(MatSetBlockSize(pcis->A_II, bs == ibs ? bs : 1));
287: PetscCall(MatSetBlockSize(pcis->pA_II, bs == ibs ? bs : 1));
288: PetscCall(MatCreateSubMatrix(matis->A, pcis->is_B_local, pcis->is_B_local, reuse, &pcis->A_BB));
289: PetscCall(PetscObjectTypeCompare((PetscObject)matis->A, MATSEQSBAIJ, &issbaij));
290: if (!issbaij) {
291: PetscCall(MatCreateSubMatrix(matis->A, pcis->is_I_local, pcis->is_B_local, reuse, &pcis->A_IB));
292: PetscCall(MatCreateSubMatrix(matis->A, pcis->is_B_local, pcis->is_I_local, reuse, &pcis->A_BI));
293: } else {
294: Mat newmat;
296: PetscCall(MatConvert(matis->A, MATSEQBAIJ, MAT_INITIAL_MATRIX, &newmat));
297: PetscCall(MatCreateSubMatrix(newmat, pcis->is_I_local, pcis->is_B_local, reuse, &pcis->A_IB));
298: PetscCall(MatCreateSubMatrix(newmat, pcis->is_B_local, pcis->is_I_local, reuse, &pcis->A_BI));
299: PetscCall(MatDestroy(&newmat));
300: }
301: PetscCall(MatSetBlockSize(pcis->A_BB, bs == ibs ? bs : 1));
302: }
304: /* Creating scaling vector D */
305: PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_is_use_stiffness_scaling", &pcis->use_stiffness_scaling, NULL));
306: if (pcis->use_stiffness_scaling) {
307: PetscScalar *a;
308: PetscInt i, n;
310: if (pcis->A_BB) {
311: PetscCall(MatGetDiagonal(pcis->A_BB, pcis->D));
312: } else {
313: PetscCall(MatGetDiagonal(matis->A, pcis->vec1_N));
314: PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec1_N, pcis->D, INSERT_VALUES, SCATTER_FORWARD));
315: PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec1_N, pcis->D, INSERT_VALUES, SCATTER_FORWARD));
316: }
317: PetscCall(VecAbs(pcis->D));
318: PetscCall(VecGetLocalSize(pcis->D, &n));
319: PetscCall(VecGetArray(pcis->D, &a));
320: for (i = 0; i < n; i++)
321: if (PetscAbsScalar(a[i]) < PETSC_SMALL) a[i] = 1.0;
322: PetscCall(VecRestoreArray(pcis->D, &a));
323: }
324: PetscCall(VecSet(pcis->vec1_global, 0.0));
325: PetscCall(VecScatterBegin(pcis->global_to_B, pcis->D, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE));
326: PetscCall(VecScatterEnd(pcis->global_to_B, pcis->D, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE));
327: PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_global, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
328: PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_global, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
329: PetscCall(VecPointwiseDivide(pcis->D, pcis->D, pcis->vec1_B));
330: /* See historical note 01, at the bottom of this file. */
332: /* Creating the KSP contexts for the local Dirichlet and Neumann problems */
333: if (computesolvers) {
334: PC pc_ctx;
336: pcis->pure_neumann = matis->pure_neumann;
337: /* Dirichlet */
338: PetscCall(KSPCreate(PETSC_COMM_SELF, &pcis->ksp_D));
339: PetscCall(KSPSetNestLevel(pcis->ksp_D, pc->kspnestlevel));
340: PetscCall(KSPSetErrorIfNotConverged(pcis->ksp_D, pc->erroriffailure));
341: PetscCall(PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_D, (PetscObject)pc, 1));
342: PetscCall(KSPSetOperators(pcis->ksp_D, pcis->A_II, pcis->A_II));
343: PetscCall(KSPSetOptionsPrefix(pcis->ksp_D, "is_localD_"));
344: PetscCall(KSPGetPC(pcis->ksp_D, &pc_ctx));
345: PetscCall(PCSetType(pc_ctx, PCLU));
346: PetscCall(KSPSetType(pcis->ksp_D, KSPPREONLY));
347: PetscCall(KSPSetFromOptions(pcis->ksp_D));
348: /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
349: PetscCall(KSPSetUp(pcis->ksp_D));
350: /* Neumann */
351: PetscCall(KSPCreate(PETSC_COMM_SELF, &pcis->ksp_N));
352: PetscCall(KSPSetNestLevel(pcis->ksp_N, pc->kspnestlevel));
353: PetscCall(KSPSetErrorIfNotConverged(pcis->ksp_N, pc->erroriffailure));
354: PetscCall(PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_N, (PetscObject)pc, 1));
355: PetscCall(KSPSetOperators(pcis->ksp_N, matis->A, matis->A));
356: PetscCall(KSPSetOptionsPrefix(pcis->ksp_N, "is_localN_"));
357: PetscCall(KSPGetPC(pcis->ksp_N, &pc_ctx));
358: PetscCall(PCSetType(pc_ctx, PCLU));
359: PetscCall(KSPSetType(pcis->ksp_N, KSPPREONLY));
360: PetscCall(KSPSetFromOptions(pcis->ksp_N));
361: {
362: PetscBool damp_fixed = PETSC_FALSE, remove_nullspace_fixed = PETSC_FALSE, set_damping_factor_floating = PETSC_FALSE, not_damp_floating = PETSC_FALSE, not_remove_nullspace_floating = PETSC_FALSE;
363: PetscReal fixed_factor, floating_factor;
365: PetscCall(PetscOptionsGetReal(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_damp_fixed", &fixed_factor, &damp_fixed));
366: if (!damp_fixed) fixed_factor = 0.0;
367: PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_damp_fixed", &damp_fixed, NULL));
369: PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_remove_nullspace_fixed", &remove_nullspace_fixed, NULL));
371: PetscCall(PetscOptionsGetReal(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_set_damping_factor_floating", &floating_factor, &set_damping_factor_floating));
372: if (!set_damping_factor_floating) floating_factor = 0.0;
373: PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_set_damping_factor_floating", &set_damping_factor_floating, NULL));
374: if (!set_damping_factor_floating) floating_factor = 1.e-12;
376: PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_not_damp_floating", ¬_damp_floating, NULL));
378: PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_not_remove_nullspace_floating", ¬_remove_nullspace_floating, NULL));
380: if (pcis->pure_neumann) { /* floating subdomain */
381: if (!(not_damp_floating)) {
382: PetscCall(PCFactorSetShiftType(pc_ctx, MAT_SHIFT_NONZERO));
383: PetscCall(PCFactorSetShiftAmount(pc_ctx, floating_factor));
384: }
385: if (!(not_remove_nullspace_floating)) {
386: MatNullSpace nullsp;
387: PetscCall(MatNullSpaceCreate(PETSC_COMM_SELF, PETSC_TRUE, 0, NULL, &nullsp));
388: PetscCall(MatSetNullSpace(matis->A, nullsp));
389: PetscCall(MatNullSpaceDestroy(&nullsp));
390: }
391: } else { /* fixed subdomain */
392: if (damp_fixed) {
393: PetscCall(PCFactorSetShiftType(pc_ctx, MAT_SHIFT_NONZERO));
394: PetscCall(PCFactorSetShiftAmount(pc_ctx, floating_factor));
395: }
396: if (remove_nullspace_fixed) {
397: MatNullSpace nullsp;
398: PetscCall(MatNullSpaceCreate(PETSC_COMM_SELF, PETSC_TRUE, 0, NULL, &nullsp));
399: PetscCall(MatSetNullSpace(matis->A, nullsp));
400: PetscCall(MatNullSpaceDestroy(&nullsp));
401: }
402: }
403: }
404: /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
405: PetscCall(KSPSetUp(pcis->ksp_N));
406: }
407: PetscFunctionReturn(PETSC_SUCCESS);
408: }
410: /*@
411: PCISReset - Removes all the `PC_IS` parts of the `PC` implementation data structure
413: Input Parameter:
414: . pc - the `PC` object, must be of type `PCNN` or `PCBDDC`
416: Level: advanced
418: .seealso: [](ch_ksp), `PCISSetUseStiffnessScaling()`, `PCISSetSubdomainDiagonalScaling()`, `PCISScatterArrayNToVecB()`, `PCISSetSubdomainScalingFactor()`,
419: `PCISInitialize()`, `PCISApplySchur()`, `PCISApplyInvSchur()`
420: @*/
421: PetscErrorCode PCISReset(PC pc)
422: {
423: PC_IS *pcis = (PC_IS *)(pc->data);
424: PetscBool correcttype;
426: PetscFunctionBegin;
427: if (!pc) PetscFunctionReturn(PETSC_SUCCESS);
428: PetscCall(PetscObjectTypeCompareAny((PetscObject)pc, &correcttype, PCBDDC, PCNN, ""));
429: PetscCheck(correcttype, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PC must be of type PCNN or PCBDDC");
430: PetscCall(ISDestroy(&pcis->is_B_local));
431: PetscCall(ISDestroy(&pcis->is_I_local));
432: PetscCall(ISDestroy(&pcis->is_B_global));
433: PetscCall(ISDestroy(&pcis->is_I_global));
434: PetscCall(MatDestroy(&pcis->A_II));
435: PetscCall(MatDestroy(&pcis->pA_II));
436: PetscCall(MatDestroy(&pcis->A_IB));
437: PetscCall(MatDestroy(&pcis->A_BI));
438: PetscCall(MatDestroy(&pcis->A_BB));
439: PetscCall(VecDestroy(&pcis->D));
440: PetscCall(KSPDestroy(&pcis->ksp_N));
441: PetscCall(KSPDestroy(&pcis->ksp_D));
442: PetscCall(VecDestroy(&pcis->vec1_N));
443: PetscCall(VecDestroy(&pcis->vec2_N));
444: PetscCall(VecDestroy(&pcis->vec1_D));
445: PetscCall(VecDestroy(&pcis->vec2_D));
446: PetscCall(VecDestroy(&pcis->vec3_D));
447: PetscCall(VecDestroy(&pcis->vec4_D));
448: PetscCall(VecDestroy(&pcis->vec1_B));
449: PetscCall(VecDestroy(&pcis->vec2_B));
450: PetscCall(VecDestroy(&pcis->vec3_B));
451: PetscCall(VecDestroy(&pcis->vec1_global));
452: PetscCall(VecScatterDestroy(&pcis->global_to_D));
453: PetscCall(VecScatterDestroy(&pcis->N_to_B));
454: PetscCall(VecScatterDestroy(&pcis->N_to_D));
455: PetscCall(VecScatterDestroy(&pcis->global_to_B));
456: PetscCall(PetscFree(pcis->work_N));
457: if (pcis->n_neigh > -1) PetscCall(ISLocalToGlobalMappingRestoreInfo(pcis->mapping, &(pcis->n_neigh), &(pcis->neigh), &(pcis->n_shared), &(pcis->shared)));
458: PetscCall(ISLocalToGlobalMappingDestroy(&pcis->mapping));
459: PetscCall(ISLocalToGlobalMappingDestroy(&pcis->BtoNmap));
460: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetUseStiffnessScaling_C", NULL));
461: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainScalingFactor_C", NULL));
462: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainDiagonalScaling_C", NULL));
463: PetscFunctionReturn(PETSC_SUCCESS);
464: }
466: /*@
467: PCISInitialize - initializes the `PC_IS` portion of `PCNN` and `PCBDDC` preconditioner context
469: Input Parameter:
470: . pc - the `PC` object, must be of type `PCNN` or `PCBDDC`
472: Level: advanced
474: Note:
475: There is no preconditioner the `PCIS` prefixed routines provide functionality needed by `PCNN` or `PCBDDC`
477: .seealso: [](ch_ksp), `PCBDDC`, `PCNN`, `PCISSetUseStiffnessScaling()`, `PCISSetSubdomainDiagonalScaling()`, `PCISScatterArrayNToVecB()`,
478: `PCISSetSubdomainScalingFactor()`,
479: `PCISReset()`, `PCISApplySchur()`, `PCISApplyInvSchur()`
480: @*/
481: PetscErrorCode PCISInitialize(PC pc)
482: {
483: PC_IS *pcis = (PC_IS *)(pc->data);
484: PetscBool correcttype;
486: PetscFunctionBegin;
487: PetscCheck(pcis, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PC_IS context must be created by caller");
488: PetscCall(PetscObjectTypeCompareAny((PetscObject)pc, &correcttype, PCBDDC, PCNN, ""));
489: PetscCheck(correcttype, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PC must be of type PCNN or PCBDDC");
490: pcis->n_neigh = -1;
491: pcis->scaling_factor = 1.0;
492: pcis->reusesubmatrices = PETSC_TRUE;
493: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetUseStiffnessScaling_C", PCISSetUseStiffnessScaling_IS));
494: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainScalingFactor_C", PCISSetSubdomainScalingFactor_IS));
495: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainDiagonalScaling_C", PCISSetSubdomainDiagonalScaling_IS));
496: PetscFunctionReturn(PETSC_SUCCESS);
497: }
499: /*@
500: PCISApplySchur - applies the Schur complement arising from the `MATIS` inside the `PCNN` preconditioner
502: Input Parameters:
503: + pc - preconditioner context
504: . v - vector to which the Schur complement is to be applied (it is NOT modified inside this function, UNLESS vec2_B is null)
505: . vec1_B - location to store the result of Schur complement applied to chunk
506: . vec2_B - workspace or `NULL`, `v` is used as workspace in that case
507: . vec1_D - work space
508: - vec2_D - work space
510: Level: advanced
512: .seealso: [](ch_ksp), `PCBDDC`, `PCNN`, `PCISSetUseStiffnessScaling()`, `PCISSetSubdomainDiagonalScaling()`, `PCISScatterArrayNToVecB()`,
513: `PCISSetSubdomainScalingFactor()`, `PCISApplyInvSchur()`,
514: `PCISReset()`, `PCISInitialize()`
515: @*/
516: PetscErrorCode PCISApplySchur(PC pc, Vec v, Vec vec1_B, Vec vec2_B, Vec vec1_D, Vec vec2_D)
517: {
518: PC_IS *pcis = (PC_IS *)(pc->data);
520: PetscFunctionBegin;
521: if (!vec2_B) vec2_B = v;
523: PetscCall(MatMult(pcis->A_BB, v, vec1_B));
524: PetscCall(MatMult(pcis->A_IB, v, vec1_D));
525: PetscCall(KSPSolve(pcis->ksp_D, vec1_D, vec2_D));
526: PetscCall(KSPCheckSolve(pcis->ksp_D, pc, vec2_D));
527: PetscCall(MatMult(pcis->A_BI, vec2_D, vec2_B));
528: PetscCall(VecAXPY(vec1_B, -1.0, vec2_B));
529: PetscFunctionReturn(PETSC_SUCCESS);
530: }
532: /*@
533: PCISScatterArrayNToVecB - Scatters interface node values from a big array (of all local nodes, interior or interface,
534: including ghosts) into an interface vector, when in `SCATTER_FORWARD` mode, or vice-versa, when in `SCATTER_REVERSE`
535: mode.
537: Input Parameters:
538: + pc - preconditioner context
539: . array_N - [when in `SCATTER_FORWARD` mode] Array to be scattered into the vector otherwise output array
540: . imode - insert mode, `ADD_VALUES` or `INSERT_VALUES`
541: . smode - scatter mode, `SCATTER_FORWARD` or `SCATTER_REVERSE` mode]
542: - v_B - [when in `SCATTER_REVERSE` mode] Vector to be scattered into the array, otherwise output vector
544: Level: advanced
546: Note:
547: The entries in the array that do not correspond to interface nodes remain unaltered.
549: .seealso: [](ch_ksp), `PCBDDC`, `PCNN`, `PCISSetUseStiffnessScaling()`, `PCISSetSubdomainDiagonalScaling()`,
550: `PCISSetSubdomainScalingFactor()`, `PCISApplySchur()`, `PCISApplyInvSchur()`,
551: `PCISReset()`, `PCISInitialize()`, `InsertMode`
552: @*/
553: PetscErrorCode PCISScatterArrayNToVecB(PC pc, PetscScalar *array_N, Vec v_B, InsertMode imode, ScatterMode smode)
554: {
555: PetscInt i;
556: const PetscInt *idex;
557: PetscScalar *array_B;
558: PC_IS *pcis = (PC_IS *)(pc->data);
560: PetscFunctionBegin;
561: PetscCall(VecGetArray(v_B, &array_B));
562: PetscCall(ISGetIndices(pcis->is_B_local, &idex));
564: if (smode == SCATTER_FORWARD) {
565: if (imode == INSERT_VALUES) {
566: for (i = 0; i < pcis->n_B; i++) array_B[i] = array_N[idex[i]];
567: } else { /* ADD_VALUES */
568: for (i = 0; i < pcis->n_B; i++) array_B[i] += array_N[idex[i]];
569: }
570: } else { /* SCATTER_REVERSE */
571: if (imode == INSERT_VALUES) {
572: for (i = 0; i < pcis->n_B; i++) array_N[idex[i]] = array_B[i];
573: } else { /* ADD_VALUES */
574: for (i = 0; i < pcis->n_B; i++) array_N[idex[i]] += array_B[i];
575: }
576: }
577: PetscCall(ISRestoreIndices(pcis->is_B_local, &idex));
578: PetscCall(VecRestoreArray(v_B, &array_B));
579: PetscFunctionReturn(PETSC_SUCCESS);
580: }
582: /*@
583: PCISApplyInvSchur - Solves the Neumann problem related to applying the inverse of the Schur complement.
585: Input Parameters:
586: + pc - preconditioner context
587: . b - vector of local interface nodes (including ghosts)
588: . x - vector of local interface nodes (including ghosts); returns the application of the inverse of the Schur complement to `b`
589: . vec1_N - vector of local nodes (interior and interface, including ghosts); used as work space
590: - vec2_N - vector of local nodes (interior and interface, including ghosts); used as work space
592: Level: advanced
594: Note:
595: Solves the problem
596: .vb
597: [ A_II A_IB ] [ . ] [ 0 ]
598: [ ] [ ] = [ ]
599: [ A_BI A_BB ] [ x ] [ b ]
600: .ve
602: .seealso: [](ch_ksp), `PCBDDC`, `PCNN`, `PCISSetUseStiffnessScaling()`, `PCISSetSubdomainDiagonalScaling()`, `PCISScatterArrayNToVecB()`,
603: `PCISSetSubdomainScalingFactor()`,
604: `PCISReset()`, `PCISInitialize()`
605: @*/
606: PetscErrorCode PCISApplyInvSchur(PC pc, Vec b, Vec x, Vec vec1_N, Vec vec2_N)
607: {
608: PC_IS *pcis = (PC_IS *)(pc->data);
610: PetscFunctionBegin;
611: /*
612: Neumann solvers.
613: Applying the inverse of the local Schur complement, i.e, solving a Neumann
614: Problem with zero at the interior nodes of the RHS and extracting the interface
615: part of the solution. inverse Schur complement is applied to b and the result
616: is stored in x.
617: */
618: /* Setting the RHS vec1_N */
619: PetscCall(VecSet(vec1_N, 0.0));
620: PetscCall(VecScatterBegin(pcis->N_to_B, b, vec1_N, INSERT_VALUES, SCATTER_REVERSE));
621: PetscCall(VecScatterEnd(pcis->N_to_B, b, vec1_N, INSERT_VALUES, SCATTER_REVERSE));
622: /* Checking for consistency of the RHS */
623: {
624: PetscBool flg = PETSC_FALSE;
625: PetscCall(PetscOptionsGetBool(NULL, NULL, "-pc_is_check_consistency", &flg, NULL));
626: if (flg) {
627: PetscScalar average;
628: PetscViewer viewer;
629: PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc), &viewer));
631: PetscCall(VecSum(vec1_N, &average));
632: average = average / ((PetscReal)pcis->n);
633: PetscCall(PetscViewerASCIIPushSynchronized(viewer));
634: if (pcis->pure_neumann) {
635: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Subdomain %04d is floating. Average = % 1.14e\n", PetscGlobalRank, (double)PetscAbsScalar(average)));
636: } else {
637: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Subdomain %04d is fixed. Average = % 1.14e\n", PetscGlobalRank, (double)PetscAbsScalar(average)));
638: }
639: PetscCall(PetscViewerFlush(viewer));
640: PetscCall(PetscViewerASCIIPopSynchronized(viewer));
641: }
642: }
643: /* Solving the system for vec2_N */
644: PetscCall(KSPSolve(pcis->ksp_N, vec1_N, vec2_N));
645: PetscCall(KSPCheckSolve(pcis->ksp_N, pc, vec2_N));
646: /* Extracting the local interface vector out of the solution */
647: PetscCall(VecScatterBegin(pcis->N_to_B, vec2_N, x, INSERT_VALUES, SCATTER_FORWARD));
648: PetscCall(VecScatterEnd(pcis->N_to_B, vec2_N, x, INSERT_VALUES, SCATTER_FORWARD));
649: PetscFunctionReturn(PETSC_SUCCESS);
650: }