Actual source code: pcis.c
petsc-3.6.1 2015-08-06
2: #include <../src/ksp/pc/impls/is/pcis.h> /*I "petscpc.h" I*/
6: static PetscErrorCode PCISSetUseStiffnessScaling_IS(PC pc, PetscBool use)
7: {
8: PC_IS *pcis = (PC_IS*)pc->data;
11: pcis->use_stiffness_scaling = use;
12: return(0);
13: }
17: /*@
18: PCISSetUseStiffnessScaling - Tells PCIS to construct partition of unity using
19: local matrices' diagonal.
21: Not collective
23: Input Parameters:
24: + pc - the preconditioning context
25: - use - whether or not pcis use matrix diagonal to build partition of unity.
27: Level: intermediate
29: Notes:
31: .seealso: PCBDDC
32: @*/
33: PetscErrorCode PCISSetUseStiffnessScaling(PC pc, PetscBool use)
34: {
39: PetscTryMethod(pc,"PCISSetUseStiffnessScaling_C",(PC,PetscBool),(pc,use));
40: return(0);
41: }
45: static PetscErrorCode PCISSetSubdomainDiagonalScaling_IS(PC pc, Vec scaling_factors)
46: {
48: PC_IS *pcis = (PC_IS*)pc->data;
51: VecDestroy(&pcis->D);
52: PetscObjectReference((PetscObject)scaling_factors);
53: pcis->D = scaling_factors;
54: return(0);
55: }
59: /*@
60: PCISSetSubdomainDiagonalScaling - Set diagonal scaling for PCIS.
62: Not collective
64: Input Parameters:
65: + pc - the preconditioning context
66: - scaling_factors - scaling factors for the subdomain
68: Level: intermediate
70: Notes:
71: Intended to use with jumping coefficients cases.
73: .seealso: PCBDDC
74: @*/
75: PetscErrorCode PCISSetSubdomainDiagonalScaling(PC pc, Vec scaling_factors)
76: {
81: PetscTryMethod(pc,"PCISSetSubdomainDiagonalScaling_C",(PC,Vec),(pc,scaling_factors));
82: return(0);
83: }
87: static PetscErrorCode PCISSetSubdomainScalingFactor_IS(PC pc, PetscScalar scal)
88: {
89: PC_IS *pcis = (PC_IS*)pc->data;
92: pcis->scaling_factor = scal;
93: return(0);
94: }
98: /*@
99: PCISSetSubdomainScalingFactor - Set scaling factor for PCIS.
101: Not collective
103: Input Parameters:
104: + pc - the preconditioning context
105: - scal - scaling factor for the subdomain
107: Level: intermediate
109: Notes:
110: Intended to use with jumping coefficients cases.
112: .seealso: PCBDDC
113: @*/
114: PetscErrorCode PCISSetSubdomainScalingFactor(PC pc, PetscScalar scal)
115: {
120: PetscTryMethod(pc,"PCISSetSubdomainScalingFactor_C",(PC,PetscScalar),(pc,scal));
121: return(0);
122: }
125: /* -------------------------------------------------------------------------- */
126: /*
127: PCISSetUp -
128: */
131: PetscErrorCode PCISSetUp(PC pc, PetscBool computesolvers)
132: {
133: PC_IS *pcis = (PC_IS*)(pc->data);
134: Mat_IS *matis;
135: MatReuse reuse;
137: PetscBool flg,issbaij;
138: Vec counter;
141: PetscObjectTypeCompare((PetscObject)pc->pmat,MATIS,&flg);
142: if (!flg) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Preconditioner type of Neumann Neumman requires matrix of type MATIS");
143: matis = (Mat_IS*)pc->pmat->data;
145: /* first time creation, get info on substructuring */
146: if (!pc->setupcalled) {
147: PetscInt n_I;
148: PetscInt *idx_I_local,*idx_B_local,*idx_I_global,*idx_B_global;
149: PetscInt *array;
150: PetscInt i,j;
152: /* get info on mapping */
153: PetscObjectReference((PetscObject)matis->mapping);
154: ISLocalToGlobalMappingDestroy(&pcis->mapping);
155: pcis->mapping = matis->mapping;
156: ISLocalToGlobalMappingGetSize(pcis->mapping,&pcis->n);
157: ISLocalToGlobalMappingGetInfo(pcis->mapping,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));
159: /* Identifying interior and interface nodes, in local numbering */
160: PetscMalloc1(pcis->n,&array);
161: PetscMemzero(array,pcis->n*sizeof(PetscInt));
162: for (i=0;i<pcis->n_neigh;i++)
163: for (j=0;j<pcis->n_shared[i];j++)
164: array[pcis->shared[i][j]] += 1;
166: /* Creating local and global index sets for interior and inteface nodes. */
167: PetscMalloc1(pcis->n,&idx_I_local);
168: PetscMalloc1(pcis->n,&idx_B_local);
169: for (i=0, pcis->n_B=0, n_I=0; i<pcis->n; i++) {
170: if (!array[i]) {
171: idx_I_local[n_I] = i;
172: n_I++;
173: } else {
174: idx_B_local[pcis->n_B] = i;
175: pcis->n_B++;
176: }
177: }
178: /* Getting the global numbering */
179: idx_B_global = idx_I_local + n_I; /* Just avoiding allocating extra memory, since we have vacant space */
180: idx_I_global = idx_B_local + pcis->n_B;
181: ISLocalToGlobalMappingApply(pcis->mapping,pcis->n_B,idx_B_local,idx_B_global);
182: ISLocalToGlobalMappingApply(pcis->mapping,n_I,idx_I_local,idx_I_global);
184: /* Creating the index sets */
185: ISCreateGeneral(PETSC_COMM_SELF,pcis->n_B,idx_B_local,PETSC_COPY_VALUES, &pcis->is_B_local);
186: ISCreateGeneral(PETSC_COMM_SELF,pcis->n_B,idx_B_global,PETSC_COPY_VALUES,&pcis->is_B_global);
187: ISCreateGeneral(PETSC_COMM_SELF,n_I,idx_I_local,PETSC_COPY_VALUES, &pcis->is_I_local);
188: ISCreateGeneral(PETSC_COMM_SELF,n_I,idx_I_global,PETSC_COPY_VALUES,&pcis->is_I_global);
190: /* Freeing memory */
191: PetscFree(idx_B_local);
192: PetscFree(idx_I_local);
193: PetscFree(array);
195: /* Creating work vectors and arrays */
196: VecDuplicate(matis->x,&pcis->vec1_N);
197: VecDuplicate(pcis->vec1_N,&pcis->vec2_N);
198: VecCreateSeq(PETSC_COMM_SELF,pcis->n-pcis->n_B,&pcis->vec1_D);
199: VecDuplicate(pcis->vec1_D,&pcis->vec2_D);
200: VecDuplicate(pcis->vec1_D,&pcis->vec3_D);
201: VecDuplicate(pcis->vec1_D,&pcis->vec4_D);
202: VecCreateSeq(PETSC_COMM_SELF,pcis->n_B,&pcis->vec1_B);
203: VecDuplicate(pcis->vec1_B,&pcis->vec2_B);
204: VecDuplicate(pcis->vec1_B,&pcis->vec3_B);
205: MatCreateVecs(pc->pmat,&pcis->vec1_global,0);
206: PetscMalloc1(pcis->n,&pcis->work_N);
207: /* scaling vector */
208: VecDuplicate(pcis->vec1_B,&pcis->D);
210: /* Creating the scatter contexts */
211: VecScatterCreate(pcis->vec1_global,pcis->is_I_global,pcis->vec1_D,(IS)0,&pcis->global_to_D);
212: VecScatterCreate(pcis->vec1_N,pcis->is_B_local,pcis->vec1_B,(IS)0,&pcis->N_to_B);
213: VecScatterCreate(pcis->vec1_global,pcis->is_B_global,pcis->vec1_B,(IS)0,&pcis->global_to_B);
215: /* map from boundary to local */
216: ISLocalToGlobalMappingCreateIS(pcis->is_B_local,&pcis->BtoNmap);
217: }
219: /*
220: Extracting the blocks A_II, A_BI, A_IB and A_BB from A. If the numbering
221: is such that interior nodes come first than the interface ones, we have
223: [ A_II | A_IB ]
224: A = [------+------]
225: [ A_BI | A_BB ]
226: */
227: reuse = MAT_INITIAL_MATRIX;
228: if (pcis->reusesubmatrices && pc->setupcalled) {
229: if (pc->flag == SAME_NONZERO_PATTERN) {
230: reuse = MAT_REUSE_MATRIX;
231: } else {
232: reuse = MAT_INITIAL_MATRIX;
233: }
234: }
235: if (reuse == MAT_INITIAL_MATRIX) {
236: MatDestroy(&pcis->A_II);
237: MatDestroy(&pcis->A_IB);
238: MatDestroy(&pcis->A_BI);
239: MatDestroy(&pcis->A_BB);
240: }
242: MatGetSubMatrix(matis->A,pcis->is_I_local,pcis->is_I_local,reuse,&pcis->A_II);
243: MatGetSubMatrix(matis->A,pcis->is_B_local,pcis->is_B_local,reuse,&pcis->A_BB);
244: PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);
245: if (!issbaij) {
246: MatGetSubMatrix(matis->A,pcis->is_I_local,pcis->is_B_local,reuse,&pcis->A_IB);
247: MatGetSubMatrix(matis->A,pcis->is_B_local,pcis->is_I_local,reuse,&pcis->A_BI);
248: } else {
249: Mat newmat;
250: MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&newmat);
251: MatGetSubMatrix(newmat,pcis->is_I_local,pcis->is_B_local,reuse,&pcis->A_IB);
252: MatGetSubMatrix(newmat,pcis->is_B_local,pcis->is_I_local,reuse,&pcis->A_BI);
253: MatDestroy(&newmat);
254: }
256: /* Creating scaling "matrix" D */
257: PetscOptionsGetBool(((PetscObject)pc)->prefix,"-pc_is_use_stiffness_scaling",&pcis->use_stiffness_scaling,NULL);
258: if (!pcis->use_stiffness_scaling) {
259: VecSet(pcis->D,pcis->scaling_factor);
260: } else {
261: MatGetDiagonal(matis->A,pcis->vec1_N);
262: VecScatterBegin(pcis->N_to_B,pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD);
263: VecScatterEnd(pcis->N_to_B,pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD);
264: }
265: VecCopy(pcis->D,pcis->vec1_B);
266: MatCreateVecs(pc->pmat,&counter,0); /* temporary auxiliar vector */
267: VecSet(counter,0.0);
268: VecScatterBegin(pcis->global_to_B,pcis->vec1_B,counter,ADD_VALUES,SCATTER_REVERSE);
269: VecScatterEnd(pcis->global_to_B,pcis->vec1_B,counter,ADD_VALUES,SCATTER_REVERSE);
270: VecScatterBegin(pcis->global_to_B,counter,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);
271: VecScatterEnd(pcis->global_to_B,counter,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);
272: VecPointwiseDivide(pcis->D,pcis->D,pcis->vec1_B);
273: VecDestroy(&counter);
275: /* See historical note 01, at the bottom of this file. */
277: /* Creating the KSP contexts for the local Dirichlet and Neumann problems */
278: if (computesolvers) {
279: PC pc_ctx;
281: pcis->pure_neumann = matis->pure_neumann;
282: /* Dirichlet */
283: KSPCreate(PETSC_COMM_SELF,&pcis->ksp_D);
284: KSPSetErrorIfNotConverged(pcis->ksp_D,pc->erroriffailure);
285: PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_D,(PetscObject)pc,1);
286: KSPSetOperators(pcis->ksp_D,pcis->A_II,pcis->A_II);
287: KSPSetOptionsPrefix(pcis->ksp_D,"is_localD_");
288: KSPGetPC(pcis->ksp_D,&pc_ctx);
289: PCSetType(pc_ctx,PCLU);
290: KSPSetType(pcis->ksp_D,KSPPREONLY);
291: KSPSetFromOptions(pcis->ksp_D);
292: /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
293: KSPSetUp(pcis->ksp_D);
294: /* Neumann */
295: KSPCreate(PETSC_COMM_SELF,&pcis->ksp_N);
296: KSPSetErrorIfNotConverged(pcis->ksp_N,pc->erroriffailure);
297: PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_N,(PetscObject)pc,1);
298: KSPSetOperators(pcis->ksp_N,matis->A,matis->A);
299: KSPSetOptionsPrefix(pcis->ksp_N,"is_localN_");
300: KSPGetPC(pcis->ksp_N,&pc_ctx);
301: PCSetType(pc_ctx,PCLU);
302: KSPSetType(pcis->ksp_N,KSPPREONLY);
303: KSPSetFromOptions(pcis->ksp_N);
304: {
305: PetscBool damp_fixed = PETSC_FALSE,
306: remove_nullspace_fixed = PETSC_FALSE,
307: set_damping_factor_floating = PETSC_FALSE,
308: not_damp_floating = PETSC_FALSE,
309: not_remove_nullspace_floating = PETSC_FALSE;
310: PetscReal fixed_factor,
311: floating_factor;
313: PetscOptionsGetReal(((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&fixed_factor,&damp_fixed);
314: if (!damp_fixed) fixed_factor = 0.0;
315: PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&damp_fixed,NULL);
317: PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_remove_nullspace_fixed",&remove_nullspace_fixed,NULL);
319: PetscOptionsGetReal(((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",
320: &floating_factor,&set_damping_factor_floating);
321: if (!set_damping_factor_floating) floating_factor = 0.0;
322: PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",&set_damping_factor_floating,NULL);
323: if (!set_damping_factor_floating) floating_factor = 1.e-12;
325: PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_not_damp_floating",¬_damp_floating,NULL);
327: PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_not_remove_nullspace_floating",¬_remove_nullspace_floating,NULL);
329: if (pcis->pure_neumann) { /* floating subdomain */
330: if (!(not_damp_floating)) {
331: PCFactorSetShiftType(pc_ctx,MAT_SHIFT_NONZERO);
332: PCFactorSetShiftAmount(pc_ctx,floating_factor);
333: }
334: if (!(not_remove_nullspace_floating)) {
335: MatNullSpace nullsp;
336: MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,NULL,&nullsp);
337: MatSetNullSpace(matis->A,nullsp);
338: MatNullSpaceDestroy(&nullsp);
339: }
340: } else { /* fixed subdomain */
341: if (damp_fixed) {
342: PCFactorSetShiftType(pc_ctx,MAT_SHIFT_NONZERO);
343: PCFactorSetShiftAmount(pc_ctx,floating_factor);
344: }
345: if (remove_nullspace_fixed) {
346: MatNullSpace nullsp;
347: MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,NULL,&nullsp);
348: MatSetNullSpace(matis->A,nullsp);
349: MatNullSpaceDestroy(&nullsp);
350: }
351: }
352: }
353: /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
354: KSPSetUp(pcis->ksp_N);
355: }
357: return(0);
358: }
360: /* -------------------------------------------------------------------------- */
361: /*
362: PCISDestroy -
363: */
366: PetscErrorCode PCISDestroy(PC pc)
367: {
368: PC_IS *pcis = (PC_IS*)(pc->data);
372: ISDestroy(&pcis->is_B_local);
373: ISDestroy(&pcis->is_I_local);
374: ISDestroy(&pcis->is_B_global);
375: ISDestroy(&pcis->is_I_global);
376: MatDestroy(&pcis->A_II);
377: MatDestroy(&pcis->A_IB);
378: MatDestroy(&pcis->A_BI);
379: MatDestroy(&pcis->A_BB);
380: VecDestroy(&pcis->D);
381: KSPDestroy(&pcis->ksp_N);
382: KSPDestroy(&pcis->ksp_D);
383: VecDestroy(&pcis->vec1_N);
384: VecDestroy(&pcis->vec2_N);
385: VecDestroy(&pcis->vec1_D);
386: VecDestroy(&pcis->vec2_D);
387: VecDestroy(&pcis->vec3_D);
388: VecDestroy(&pcis->vec4_D);
389: VecDestroy(&pcis->vec1_B);
390: VecDestroy(&pcis->vec2_B);
391: VecDestroy(&pcis->vec3_B);
392: VecDestroy(&pcis->vec1_global);
393: VecScatterDestroy(&pcis->global_to_D);
394: VecScatterDestroy(&pcis->N_to_B);
395: VecScatterDestroy(&pcis->global_to_B);
396: PetscFree(pcis->work_N);
397: if (pcis->n_neigh > -1) {
398: ISLocalToGlobalMappingRestoreInfo(pcis->mapping,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));
399: }
400: ISLocalToGlobalMappingDestroy(&pcis->mapping);
401: ISLocalToGlobalMappingDestroy(&pcis->BtoNmap);
402: PetscObjectComposeFunction((PetscObject)pc,"PCISSetUseStiffnessScaling_C",NULL);
403: PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainScalingFactor_C",NULL);
404: PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainDiagonalScaling_C",NULL);
405: return(0);
406: }
408: /* -------------------------------------------------------------------------- */
409: /*
410: PCISCreate -
411: */
414: PetscErrorCode PCISCreate(PC pc)
415: {
416: PC_IS *pcis = (PC_IS*)(pc->data);
420: pcis->is_B_local = 0;
421: pcis->is_I_local = 0;
422: pcis->is_B_global = 0;
423: pcis->is_I_global = 0;
424: pcis->A_II = 0;
425: pcis->A_IB = 0;
426: pcis->A_BI = 0;
427: pcis->A_BB = 0;
428: pcis->D = 0;
429: pcis->ksp_N = 0;
430: pcis->ksp_D = 0;
431: pcis->vec1_N = 0;
432: pcis->vec2_N = 0;
433: pcis->vec1_D = 0;
434: pcis->vec2_D = 0;
435: pcis->vec3_D = 0;
436: pcis->vec1_B = 0;
437: pcis->vec2_B = 0;
438: pcis->vec3_B = 0;
439: pcis->vec1_global = 0;
440: pcis->work_N = 0;
441: pcis->global_to_D = 0;
442: pcis->N_to_B = 0;
443: pcis->global_to_B = 0;
444: pcis->mapping = 0;
445: pcis->BtoNmap = 0;
446: pcis->n_neigh = -1;
447: pcis->scaling_factor = 1.0;
448: pcis->reusesubmatrices = PETSC_TRUE;
449: /* composing functions */
450: PetscObjectComposeFunction((PetscObject)pc,"PCISSetUseStiffnessScaling_C",PCISSetUseStiffnessScaling_IS);
451: PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainScalingFactor_C",PCISSetSubdomainScalingFactor_IS);
452: PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainDiagonalScaling_C",PCISSetSubdomainDiagonalScaling_IS);
453: return(0);
454: }
456: /* -------------------------------------------------------------------------- */
457: /*
458: PCISApplySchur -
460: Input parameters:
461: . pc - preconditioner context
462: . v - vector to which the Schur complement is to be applied (it is NOT modified inside this function, UNLESS vec2_B is null)
464: Output parameters:
465: . vec1_B - result of Schur complement applied to chunk
466: . vec2_B - garbage (used as work space), or null (and v is used as workspace)
467: . vec1_D - garbage (used as work space)
468: . vec2_D - garbage (used as work space)
470: */
473: PetscErrorCode PCISApplySchur(PC pc, Vec v, Vec vec1_B, Vec vec2_B, Vec vec1_D, Vec vec2_D)
474: {
476: PC_IS *pcis = (PC_IS*)(pc->data);
479: if (!vec2_B) vec2_B = v;
481: MatMult(pcis->A_BB,v,vec1_B);
482: MatMult(pcis->A_IB,v,vec1_D);
483: KSPSolve(pcis->ksp_D,vec1_D,vec2_D);
484: MatMult(pcis->A_BI,vec2_D,vec2_B);
485: VecAXPY(vec1_B,-1.0,vec2_B);
486: return(0);
487: }
489: /* -------------------------------------------------------------------------- */
490: /*
491: PCISScatterArrayNToVecB - Scatters interface node values from a big array (of all local nodes, interior or interface,
492: including ghosts) into an interface vector, when in SCATTER_FORWARD mode, or vice-versa, when in SCATTER_REVERSE
493: mode.
495: Input parameters:
496: . pc - preconditioner context
497: . array_N - [when in SCATTER_FORWARD mode] Array to be scattered into the vector
498: . v_B - [when in SCATTER_REVERSE mode] Vector to be scattered into the array
500: Output parameter:
501: . array_N - [when in SCATTER_REVERSE mode] Array to receive the scattered vector
502: . v_B - [when in SCATTER_FORWARD mode] Vector to receive the scattered array
504: Notes:
505: The entries in the array that do not correspond to interface nodes remain unaltered.
506: */
509: PetscErrorCode PCISScatterArrayNToVecB(PetscScalar *array_N, Vec v_B, InsertMode imode, ScatterMode smode, PC pc)
510: {
511: PetscInt i;
512: const PetscInt *idex;
514: PetscScalar *array_B;
515: PC_IS *pcis = (PC_IS*)(pc->data);
518: VecGetArray(v_B,&array_B);
519: ISGetIndices(pcis->is_B_local,&idex);
521: if (smode == SCATTER_FORWARD) {
522: if (imode == INSERT_VALUES) {
523: for (i=0; i<pcis->n_B; i++) array_B[i] = array_N[idex[i]];
524: } else { /* ADD_VALUES */
525: for (i=0; i<pcis->n_B; i++) array_B[i] += array_N[idex[i]];
526: }
527: } else { /* SCATTER_REVERSE */
528: if (imode == INSERT_VALUES) {
529: for (i=0; i<pcis->n_B; i++) array_N[idex[i]] = array_B[i];
530: } else { /* ADD_VALUES */
531: for (i=0; i<pcis->n_B; i++) array_N[idex[i]] += array_B[i];
532: }
533: }
534: ISRestoreIndices(pcis->is_B_local,&idex);
535: VecRestoreArray(v_B,&array_B);
536: return(0);
537: }
539: /* -------------------------------------------------------------------------- */
540: /*
541: PCISApplyInvSchur - Solves the Neumann problem related to applying the inverse of the Schur complement.
542: More precisely, solves the problem:
543: [ A_II A_IB ] [ . ] [ 0 ]
544: [ ] [ ] = [ ]
545: [ A_BI A_BB ] [ x ] [ b ]
547: Input parameters:
548: . pc - preconditioner context
549: . b - vector of local interface nodes (including ghosts)
551: Output parameters:
552: . x - vector of local interface nodes (including ghosts); returns the application of the inverse of the Schur
553: complement to b
554: . vec1_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
555: . vec2_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
557: */
560: PetscErrorCode PCISApplyInvSchur(PC pc, Vec b, Vec x, Vec vec1_N, Vec vec2_N)
561: {
563: PC_IS *pcis = (PC_IS*)(pc->data);
566: /*
567: Neumann solvers.
568: Applying the inverse of the local Schur complement, i.e, solving a Neumann
569: Problem with zero at the interior nodes of the RHS and extracting the interface
570: part of the solution. inverse Schur complement is applied to b and the result
571: is stored in x.
572: */
573: /* Setting the RHS vec1_N */
574: VecSet(vec1_N,0.0);
575: VecScatterBegin(pcis->N_to_B,b,vec1_N,INSERT_VALUES,SCATTER_REVERSE);
576: VecScatterEnd (pcis->N_to_B,b,vec1_N,INSERT_VALUES,SCATTER_REVERSE);
577: /* Checking for consistency of the RHS */
578: {
579: PetscBool flg = PETSC_FALSE;
580: PetscOptionsGetBool(NULL,"-pc_is_check_consistency",&flg,NULL);
581: if (flg) {
582: PetscScalar average;
583: PetscViewer viewer;
584: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&viewer);
586: VecSum(vec1_N,&average);
587: average = average / ((PetscReal)pcis->n);
588: PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);
589: if (pcis->pure_neumann) {
590: PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d is floating. Average = % 1.14e\n",PetscGlobalRank,PetscAbsScalar(average));
591: } else {
592: PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d is fixed. Average = % 1.14e\n",PetscGlobalRank,PetscAbsScalar(average));
593: }
594: PetscViewerFlush(viewer);
595: PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);
596: }
597: }
598: /* Solving the system for vec2_N */
599: KSPSolve(pcis->ksp_N,vec1_N,vec2_N);
600: /* Extracting the local interface vector out of the solution */
601: VecScatterBegin(pcis->N_to_B,vec2_N,x,INSERT_VALUES,SCATTER_FORWARD);
602: VecScatterEnd (pcis->N_to_B,vec2_N,x,INSERT_VALUES,SCATTER_FORWARD);
603: return(0);
604: }