Actual source code: pcis.c
petsc-3.10.0 2018-09-12
2: #include <../src/ksp/pc/impls/is/pcis.h>
4: static PetscErrorCode PCISSetUseStiffnessScaling_IS(PC pc, PetscBool use)
5: {
6: PC_IS *pcis = (PC_IS*)pc->data;
9: pcis->use_stiffness_scaling = use;
10: return(0);
11: }
13: /*@
14: PCISSetUseStiffnessScaling - Tells PCIS to construct partition of unity using
15: local matrices' diagonal.
17: Not collective
19: Input Parameters:
20: + pc - the preconditioning context
21: - use - whether or not pcis use matrix diagonal to build partition of unity.
23: Level: intermediate
25: Notes:
27: .seealso: PCBDDC
28: @*/
29: PetscErrorCode PCISSetUseStiffnessScaling(PC pc, PetscBool use)
30: {
35: PetscTryMethod(pc,"PCISSetUseStiffnessScaling_C",(PC,PetscBool),(pc,use));
36: return(0);
37: }
39: static PetscErrorCode PCISSetSubdomainDiagonalScaling_IS(PC pc, Vec scaling_factors)
40: {
42: PC_IS *pcis = (PC_IS*)pc->data;
45: PetscObjectReference((PetscObject)scaling_factors);
46: VecDestroy(&pcis->D);
47: pcis->D = scaling_factors;
48: return(0);
49: }
51: /*@
52: PCISSetSubdomainDiagonalScaling - Set diagonal scaling for PCIS.
54: Not collective
56: Input Parameters:
57: + pc - the preconditioning context
58: - scaling_factors - scaling factors for the subdomain
60: Level: intermediate
62: Notes:
63: Intended to use with jumping coefficients cases.
65: .seealso: PCBDDC
66: @*/
67: PetscErrorCode PCISSetSubdomainDiagonalScaling(PC pc, Vec scaling_factors)
68: {
73: PetscTryMethod(pc,"PCISSetSubdomainDiagonalScaling_C",(PC,Vec),(pc,scaling_factors));
74: return(0);
75: }
77: static PetscErrorCode PCISSetSubdomainScalingFactor_IS(PC pc, PetscScalar scal)
78: {
79: PC_IS *pcis = (PC_IS*)pc->data;
82: pcis->scaling_factor = scal;
83: return(0);
84: }
86: /*@
87: PCISSetSubdomainScalingFactor - Set scaling factor for PCIS.
89: Not collective
91: Input Parameters:
92: + pc - the preconditioning context
93: - scal - scaling factor for the subdomain
95: Level: intermediate
97: Notes:
98: Intended to use with jumping coefficients cases.
100: .seealso: PCBDDC
101: @*/
102: PetscErrorCode PCISSetSubdomainScalingFactor(PC pc, PetscScalar scal)
103: {
108: PetscTryMethod(pc,"PCISSetSubdomainScalingFactor_C",(PC,PetscScalar),(pc,scal));
109: return(0);
110: }
113: /* -------------------------------------------------------------------------- */
114: /*
115: PCISSetUp -
116: */
117: PetscErrorCode PCISSetUp(PC pc, PetscBool computematrices, PetscBool computesolvers)
118: {
119: PC_IS *pcis = (PC_IS*)(pc->data);
120: Mat_IS *matis;
121: MatReuse reuse;
123: PetscBool flg,issbaij;
126: PetscObjectTypeCompare((PetscObject)pc->pmat,MATIS,&flg);
127: if (!flg) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Preconditioner type of Neumann Neumman requires matrix of type MATIS");
128: matis = (Mat_IS*)pc->pmat->data;
130: /* first time creation, get info on substructuring */
131: if (!pc->setupcalled) {
132: PetscInt n_I;
133: PetscInt *idx_I_local,*idx_B_local,*idx_I_global,*idx_B_global;
134: PetscBT bt;
135: PetscInt i,j;
137: /* get info on mapping */
138: PetscObjectReference((PetscObject)pc->pmat->rmap->mapping);
139: ISLocalToGlobalMappingDestroy(&pcis->mapping);
140: pcis->mapping = pc->pmat->rmap->mapping;
141: ISLocalToGlobalMappingGetSize(pcis->mapping,&pcis->n);
142: ISLocalToGlobalMappingGetInfo(pcis->mapping,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));
144: /* Identifying interior and interface nodes, in local numbering */
145: PetscBTCreate(pcis->n,&bt);
146: for (i=0;i<pcis->n_neigh;i++)
147: for (j=0;j<pcis->n_shared[i];j++) {
148: PetscBTSet(bt,pcis->shared[i][j]);
149: }
151: /* Creating local and global index sets for interior and inteface nodes. */
152: PetscMalloc1(pcis->n,&idx_I_local);
153: PetscMalloc1(pcis->n,&idx_B_local);
154: for (i=0, pcis->n_B=0, n_I=0; i<pcis->n; i++) {
155: if (!PetscBTLookup(bt,i)) {
156: idx_I_local[n_I] = i;
157: n_I++;
158: } else {
159: idx_B_local[pcis->n_B] = i;
160: pcis->n_B++;
161: }
162: }
164: /* Getting the global numbering */
165: idx_B_global = idx_I_local + n_I; /* Just avoiding allocating extra memory, since we have vacant space */
166: idx_I_global = idx_B_local + pcis->n_B;
167: ISLocalToGlobalMappingApply(pcis->mapping,pcis->n_B,idx_B_local,idx_B_global);
168: ISLocalToGlobalMappingApply(pcis->mapping,n_I,idx_I_local,idx_I_global);
170: /* Creating the index sets */
171: ISCreateGeneral(PETSC_COMM_SELF,pcis->n_B,idx_B_local,PETSC_COPY_VALUES, &pcis->is_B_local);
172: ISCreateGeneral(PetscObjectComm((PetscObject)pc),pcis->n_B,idx_B_global,PETSC_COPY_VALUES,&pcis->is_B_global);
173: ISCreateGeneral(PETSC_COMM_SELF,n_I,idx_I_local,PETSC_COPY_VALUES, &pcis->is_I_local);
174: ISCreateGeneral(PetscObjectComm((PetscObject)pc),n_I,idx_I_global,PETSC_COPY_VALUES,&pcis->is_I_global);
176: /* Freeing memory */
177: PetscFree(idx_B_local);
178: PetscFree(idx_I_local);
179: PetscBTDestroy(&bt);
181: /* Creating work vectors and arrays */
182: VecDuplicate(matis->x,&pcis->vec1_N);
183: VecDuplicate(pcis->vec1_N,&pcis->vec2_N);
184: VecCreate(PETSC_COMM_SELF,&pcis->vec1_D);
185: VecSetSizes(pcis->vec1_D,pcis->n-pcis->n_B,PETSC_DECIDE);
186: VecSetType(pcis->vec1_D,((PetscObject)pcis->vec1_N)->type_name);
187: VecDuplicate(pcis->vec1_D,&pcis->vec2_D);
188: VecDuplicate(pcis->vec1_D,&pcis->vec3_D);
189: VecDuplicate(pcis->vec1_D,&pcis->vec4_D);
190: VecCreate(PETSC_COMM_SELF,&pcis->vec1_B);
191: VecSetSizes(pcis->vec1_B,pcis->n_B,PETSC_DECIDE);
192: VecSetType(pcis->vec1_B,((PetscObject)pcis->vec1_N)->type_name);
193: VecDuplicate(pcis->vec1_B,&pcis->vec2_B);
194: VecDuplicate(pcis->vec1_B,&pcis->vec3_B);
195: MatCreateVecs(pc->pmat,&pcis->vec1_global,0);
196: PetscMalloc1(pcis->n,&pcis->work_N);
197: /* scaling vector */
198: if (!pcis->D) { /* it can happen that the user passed in a scaling vector via PCISSetSubdomainDiagonalScaling */
199: VecDuplicate(pcis->vec1_B,&pcis->D);
200: VecSet(pcis->D,pcis->scaling_factor);
201: }
203: /* Creating the scatter contexts */
204: VecScatterCreate(pcis->vec1_N,pcis->is_I_local,pcis->vec1_D,(IS)0,&pcis->N_to_D);
205: VecScatterCreate(pcis->vec1_global,pcis->is_I_global,pcis->vec1_D,(IS)0,&pcis->global_to_D);
206: VecScatterCreate(pcis->vec1_N,pcis->is_B_local,pcis->vec1_B,(IS)0,&pcis->N_to_B);
207: VecScatterCreate(pcis->vec1_global,pcis->is_B_global,pcis->vec1_B,(IS)0,&pcis->global_to_B);
209: /* map from boundary to local */
210: ISLocalToGlobalMappingCreateIS(pcis->is_B_local,&pcis->BtoNmap);
211: }
213: /*
214: Extracting the blocks A_II, A_BI, A_IB and A_BB from A. If the numbering
215: is such that interior nodes come first than the interface ones, we have
217: [ A_II | A_IB ]
218: A = [------+------]
219: [ A_BI | A_BB ]
220: */
221: if (computematrices) {
222: reuse = MAT_INITIAL_MATRIX;
223: if (pcis->reusesubmatrices && pc->setupcalled) {
224: if (pc->flag == SAME_NONZERO_PATTERN) {
225: reuse = MAT_REUSE_MATRIX;
226: } else {
227: reuse = MAT_INITIAL_MATRIX;
228: }
229: }
230: if (reuse == MAT_INITIAL_MATRIX) {
231: MatDestroy(&pcis->A_II);
232: MatDestroy(&pcis->A_IB);
233: MatDestroy(&pcis->A_BI);
234: MatDestroy(&pcis->A_BB);
235: }
237: MatCreateSubMatrix(matis->A,pcis->is_I_local,pcis->is_I_local,reuse,&pcis->A_II);
238: MatCreateSubMatrix(matis->A,pcis->is_B_local,pcis->is_B_local,reuse,&pcis->A_BB);
239: PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);
240: if (!issbaij) {
241: MatCreateSubMatrix(matis->A,pcis->is_I_local,pcis->is_B_local,reuse,&pcis->A_IB);
242: MatCreateSubMatrix(matis->A,pcis->is_B_local,pcis->is_I_local,reuse,&pcis->A_BI);
243: } else {
244: Mat newmat;
246: MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&newmat);
247: MatCreateSubMatrix(newmat,pcis->is_I_local,pcis->is_B_local,reuse,&pcis->A_IB);
248: MatCreateSubMatrix(newmat,pcis->is_B_local,pcis->is_I_local,reuse,&pcis->A_BI);
249: MatDestroy(&newmat);
250: }
251: }
253: /* Creating scaling vector D */
254: PetscOptionsGetBool(((PetscObject)pc)->options,((PetscObject)pc)->prefix,"-pc_is_use_stiffness_scaling",&pcis->use_stiffness_scaling,NULL);
255: if (pcis->use_stiffness_scaling) {
256: PetscScalar *a;
257: PetscInt i,n;
259: if (pcis->A_BB) {
260: MatGetDiagonal(pcis->A_BB,pcis->D);
261: } else {
262: MatGetDiagonal(matis->A,pcis->vec1_N);
263: VecScatterBegin(pcis->N_to_D,pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD);
264: VecScatterEnd(pcis->N_to_D,pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD);
265: }
266: VecGetLocalSize(pcis->D,&n);
267: VecGetArray(pcis->D,&a);
268: for (i=0;i<n;i++) if (PetscAbsScalar(a[i])<PETSC_SMALL) a[i] = 1.0;
269: VecRestoreArray(pcis->D,&a);
270: }
271: VecScatterBegin(pcis->N_to_B,matis->counter,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);
272: VecScatterEnd(pcis->N_to_B,matis->counter,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);
273: VecPointwiseDivide(pcis->D,pcis->D,pcis->vec1_B);
275: /* See historical note 01, at the bottom of this file. */
277: /* Creating the KSP contexts for the local Dirichlet and Neumann problems */
278: if (computesolvers) {
279: PC pc_ctx;
281: pcis->pure_neumann = matis->pure_neumann;
282: /* Dirichlet */
283: KSPCreate(PETSC_COMM_SELF,&pcis->ksp_D);
284: KSPSetErrorIfNotConverged(pcis->ksp_D,pc->erroriffailure);
285: PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_D,(PetscObject)pc,1);
286: KSPSetOperators(pcis->ksp_D,pcis->A_II,pcis->A_II);
287: KSPSetOptionsPrefix(pcis->ksp_D,"is_localD_");
288: KSPGetPC(pcis->ksp_D,&pc_ctx);
289: PCSetType(pc_ctx,PCLU);
290: KSPSetType(pcis->ksp_D,KSPPREONLY);
291: KSPSetFromOptions(pcis->ksp_D);
292: /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
293: KSPSetUp(pcis->ksp_D);
294: /* Neumann */
295: KSPCreate(PETSC_COMM_SELF,&pcis->ksp_N);
296: KSPSetErrorIfNotConverged(pcis->ksp_N,pc->erroriffailure);
297: PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_N,(PetscObject)pc,1);
298: KSPSetOperators(pcis->ksp_N,matis->A,matis->A);
299: KSPSetOptionsPrefix(pcis->ksp_N,"is_localN_");
300: KSPGetPC(pcis->ksp_N,&pc_ctx);
301: PCSetType(pc_ctx,PCLU);
302: KSPSetType(pcis->ksp_N,KSPPREONLY);
303: KSPSetFromOptions(pcis->ksp_N);
304: {
305: PetscBool damp_fixed = PETSC_FALSE,
306: remove_nullspace_fixed = PETSC_FALSE,
307: set_damping_factor_floating = PETSC_FALSE,
308: not_damp_floating = PETSC_FALSE,
309: not_remove_nullspace_floating = PETSC_FALSE;
310: PetscReal fixed_factor,
311: floating_factor;
313: PetscOptionsGetReal(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&fixed_factor,&damp_fixed);
314: if (!damp_fixed) fixed_factor = 0.0;
315: PetscOptionsGetBool(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&damp_fixed,NULL);
317: PetscOptionsGetBool(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_remove_nullspace_fixed",&remove_nullspace_fixed,NULL);
319: PetscOptionsGetReal(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",
320: &floating_factor,&set_damping_factor_floating);
321: if (!set_damping_factor_floating) floating_factor = 0.0;
322: PetscOptionsGetBool(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",&set_damping_factor_floating,NULL);
323: if (!set_damping_factor_floating) floating_factor = 1.e-12;
325: PetscOptionsGetBool(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_not_damp_floating",¬_damp_floating,NULL);
327: PetscOptionsGetBool(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_not_remove_nullspace_floating",¬_remove_nullspace_floating,NULL);
329: if (pcis->pure_neumann) { /* floating subdomain */
330: if (!(not_damp_floating)) {
331: PCFactorSetShiftType(pc_ctx,MAT_SHIFT_NONZERO);
332: PCFactorSetShiftAmount(pc_ctx,floating_factor);
333: }
334: if (!(not_remove_nullspace_floating)) {
335: MatNullSpace nullsp;
336: MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,NULL,&nullsp);
337: MatSetNullSpace(matis->A,nullsp);
338: MatNullSpaceDestroy(&nullsp);
339: }
340: } else { /* fixed subdomain */
341: if (damp_fixed) {
342: PCFactorSetShiftType(pc_ctx,MAT_SHIFT_NONZERO);
343: PCFactorSetShiftAmount(pc_ctx,floating_factor);
344: }
345: if (remove_nullspace_fixed) {
346: MatNullSpace nullsp;
347: MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,NULL,&nullsp);
348: MatSetNullSpace(matis->A,nullsp);
349: MatNullSpaceDestroy(&nullsp);
350: }
351: }
352: }
353: /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
354: KSPSetUp(pcis->ksp_N);
355: }
356: return(0);
357: }
359: /* -------------------------------------------------------------------------- */
360: /*
361: PCISDestroy -
362: */
363: PetscErrorCode PCISDestroy(PC pc)
364: {
365: PC_IS *pcis = (PC_IS*)(pc->data);
369: ISDestroy(&pcis->is_B_local);
370: ISDestroy(&pcis->is_I_local);
371: ISDestroy(&pcis->is_B_global);
372: ISDestroy(&pcis->is_I_global);
373: MatDestroy(&pcis->A_II);
374: MatDestroy(&pcis->A_IB);
375: MatDestroy(&pcis->A_BI);
376: MatDestroy(&pcis->A_BB);
377: VecDestroy(&pcis->D);
378: KSPDestroy(&pcis->ksp_N);
379: KSPDestroy(&pcis->ksp_D);
380: VecDestroy(&pcis->vec1_N);
381: VecDestroy(&pcis->vec2_N);
382: VecDestroy(&pcis->vec1_D);
383: VecDestroy(&pcis->vec2_D);
384: VecDestroy(&pcis->vec3_D);
385: VecDestroy(&pcis->vec4_D);
386: VecDestroy(&pcis->vec1_B);
387: VecDestroy(&pcis->vec2_B);
388: VecDestroy(&pcis->vec3_B);
389: VecDestroy(&pcis->vec1_global);
390: VecScatterDestroy(&pcis->global_to_D);
391: VecScatterDestroy(&pcis->N_to_B);
392: VecScatterDestroy(&pcis->N_to_D);
393: VecScatterDestroy(&pcis->global_to_B);
394: PetscFree(pcis->work_N);
395: if (pcis->n_neigh > -1) {
396: ISLocalToGlobalMappingRestoreInfo(pcis->mapping,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));
397: }
398: ISLocalToGlobalMappingDestroy(&pcis->mapping);
399: ISLocalToGlobalMappingDestroy(&pcis->BtoNmap);
400: PetscObjectComposeFunction((PetscObject)pc,"PCISSetUseStiffnessScaling_C",NULL);
401: PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainScalingFactor_C",NULL);
402: PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainDiagonalScaling_C",NULL);
403: return(0);
404: }
406: /* -------------------------------------------------------------------------- */
407: /*
408: PCISCreate -
409: */
410: PetscErrorCode PCISCreate(PC pc)
411: {
412: PC_IS *pcis = (PC_IS*)(pc->data);
416: pcis->n_neigh = -1;
417: pcis->scaling_factor = 1.0;
418: pcis->reusesubmatrices = PETSC_TRUE;
419: /* composing functions */
420: PetscObjectComposeFunction((PetscObject)pc,"PCISSetUseStiffnessScaling_C",PCISSetUseStiffnessScaling_IS);
421: PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainScalingFactor_C",PCISSetSubdomainScalingFactor_IS);
422: PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainDiagonalScaling_C",PCISSetSubdomainDiagonalScaling_IS);
423: return(0);
424: }
426: /* -------------------------------------------------------------------------- */
427: /*
428: PCISApplySchur -
430: Input parameters:
431: . pc - preconditioner context
432: . v - vector to which the Schur complement is to be applied (it is NOT modified inside this function, UNLESS vec2_B is null)
434: Output parameters:
435: . vec1_B - result of Schur complement applied to chunk
436: . vec2_B - garbage (used as work space), or null (and v is used as workspace)
437: . vec1_D - garbage (used as work space)
438: . vec2_D - garbage (used as work space)
440: */
441: PetscErrorCode PCISApplySchur(PC pc, Vec v, Vec vec1_B, Vec vec2_B, Vec vec1_D, Vec vec2_D)
442: {
444: PC_IS *pcis = (PC_IS*)(pc->data);
447: if (!vec2_B) vec2_B = v;
449: MatMult(pcis->A_BB,v,vec1_B);
450: MatMult(pcis->A_IB,v,vec1_D);
451: KSPSolve(pcis->ksp_D,vec1_D,vec2_D);
452: MatMult(pcis->A_BI,vec2_D,vec2_B);
453: VecAXPY(vec1_B,-1.0,vec2_B);
454: return(0);
455: }
457: /* -------------------------------------------------------------------------- */
458: /*
459: PCISScatterArrayNToVecB - Scatters interface node values from a big array (of all local nodes, interior or interface,
460: including ghosts) into an interface vector, when in SCATTER_FORWARD mode, or vice-versa, when in SCATTER_REVERSE
461: mode.
463: Input parameters:
464: . pc - preconditioner context
465: . array_N - [when in SCATTER_FORWARD mode] Array to be scattered into the vector
466: . v_B - [when in SCATTER_REVERSE mode] Vector to be scattered into the array
468: Output parameter:
469: . array_N - [when in SCATTER_REVERSE mode] Array to receive the scattered vector
470: . v_B - [when in SCATTER_FORWARD mode] Vector to receive the scattered array
472: Notes:
473: The entries in the array that do not correspond to interface nodes remain unaltered.
474: */
475: PetscErrorCode PCISScatterArrayNToVecB(PetscScalar *array_N, Vec v_B, InsertMode imode, ScatterMode smode, PC pc)
476: {
477: PetscInt i;
478: const PetscInt *idex;
480: PetscScalar *array_B;
481: PC_IS *pcis = (PC_IS*)(pc->data);
484: VecGetArray(v_B,&array_B);
485: ISGetIndices(pcis->is_B_local,&idex);
487: if (smode == SCATTER_FORWARD) {
488: if (imode == INSERT_VALUES) {
489: for (i=0; i<pcis->n_B; i++) array_B[i] = array_N[idex[i]];
490: } else { /* ADD_VALUES */
491: for (i=0; i<pcis->n_B; i++) array_B[i] += array_N[idex[i]];
492: }
493: } else { /* SCATTER_REVERSE */
494: if (imode == INSERT_VALUES) {
495: for (i=0; i<pcis->n_B; i++) array_N[idex[i]] = array_B[i];
496: } else { /* ADD_VALUES */
497: for (i=0; i<pcis->n_B; i++) array_N[idex[i]] += array_B[i];
498: }
499: }
500: ISRestoreIndices(pcis->is_B_local,&idex);
501: VecRestoreArray(v_B,&array_B);
502: return(0);
503: }
505: /* -------------------------------------------------------------------------- */
506: /*
507: PCISApplyInvSchur - Solves the Neumann problem related to applying the inverse of the Schur complement.
508: More precisely, solves the problem:
509: [ A_II A_IB ] [ . ] [ 0 ]
510: [ ] [ ] = [ ]
511: [ A_BI A_BB ] [ x ] [ b ]
513: Input parameters:
514: . pc - preconditioner context
515: . b - vector of local interface nodes (including ghosts)
517: Output parameters:
518: . x - vector of local interface nodes (including ghosts); returns the application of the inverse of the Schur
519: complement to b
520: . vec1_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
521: . vec2_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
523: */
524: PetscErrorCode PCISApplyInvSchur(PC pc, Vec b, Vec x, Vec vec1_N, Vec vec2_N)
525: {
527: PC_IS *pcis = (PC_IS*)(pc->data);
530: /*
531: Neumann solvers.
532: Applying the inverse of the local Schur complement, i.e, solving a Neumann
533: Problem with zero at the interior nodes of the RHS and extracting the interface
534: part of the solution. inverse Schur complement is applied to b and the result
535: is stored in x.
536: */
537: /* Setting the RHS vec1_N */
538: VecSet(vec1_N,0.0);
539: VecScatterBegin(pcis->N_to_B,b,vec1_N,INSERT_VALUES,SCATTER_REVERSE);
540: VecScatterEnd (pcis->N_to_B,b,vec1_N,INSERT_VALUES,SCATTER_REVERSE);
541: /* Checking for consistency of the RHS */
542: {
543: PetscBool flg = PETSC_FALSE;
544: PetscOptionsGetBool(NULL,NULL,"-pc_is_check_consistency",&flg,NULL);
545: if (flg) {
546: PetscScalar average;
547: PetscViewer viewer;
548: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&viewer);
550: VecSum(vec1_N,&average);
551: average = average / ((PetscReal)pcis->n);
552: PetscViewerASCIIPushSynchronized(viewer);
553: if (pcis->pure_neumann) {
554: PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d is floating. Average = % 1.14e\n",PetscGlobalRank,PetscAbsScalar(average));
555: } else {
556: PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d is fixed. Average = % 1.14e\n",PetscGlobalRank,PetscAbsScalar(average));
557: }
558: PetscViewerFlush(viewer);
559: PetscViewerASCIIPopSynchronized(viewer);
560: }
561: }
562: /* Solving the system for vec2_N */
563: KSPSolve(pcis->ksp_N,vec1_N,vec2_N);
564: /* Extracting the local interface vector out of the solution */
565: VecScatterBegin(pcis->N_to_B,vec2_N,x,INSERT_VALUES,SCATTER_FORWARD);
566: VecScatterEnd (pcis->N_to_B,vec2_N,x,INSERT_VALUES,SCATTER_FORWARD);
567: return(0);
568: }