Actual source code: pcis.c
petsc-3.5.4 2015-05-23
2: #include <../src/ksp/pc/impls/is/pcis.h> /*I "petscpc.h" I*/
6: static PetscErrorCode PCISSetUseStiffnessScaling_IS(PC pc, PetscBool use)
7: {
8: PC_IS *pcis = (PC_IS*)pc->data;
11: pcis->use_stiffness_scaling = use;
12: return(0);
13: }
17: /*@
18: PCISSetUseStiffnessScaling - Tells PCIS to construct partition of unity using
19: local matrices' diagonal.
21: Not collective
23: Input Parameters:
24: + pc - the preconditioning context
25: - use - whether or not pcis use matrix diagonal to build partition of unity.
27: Level: intermediate
29: Notes:
31: .seealso: PCBDDC
32: @*/
33: PetscErrorCode PCISSetUseStiffnessScaling(PC pc, PetscBool use)
34: {
39: PetscTryMethod(pc,"PCISSetUseStiffnessScaling_C",(PC,PetscBool),(pc,use));
40: return(0);
41: }
45: static PetscErrorCode PCISSetSubdomainDiagonalScaling_IS(PC pc, Vec scaling_factors)
46: {
48: PC_IS *pcis = (PC_IS*)pc->data;
51: VecDestroy(&pcis->D);
52: PetscObjectReference((PetscObject)scaling_factors);
53: pcis->D = scaling_factors;
54: return(0);
55: }
59: /*@
60: PCISSetSubdomainDiagonalScaling - Set diagonal scaling for PCIS.
62: Not collective
64: Input Parameters:
65: + pc - the preconditioning context
66: - scaling_factors - scaling factors for the subdomain
68: Level: intermediate
70: Notes:
71: Intended to use with jumping coefficients cases.
73: .seealso: PCBDDC
74: @*/
75: PetscErrorCode PCISSetSubdomainDiagonalScaling(PC pc, Vec scaling_factors)
76: {
81: PetscTryMethod(pc,"PCISSetSubdomainDiagonalScaling_C",(PC,Vec),(pc,scaling_factors));
82: return(0);
83: }
87: static PetscErrorCode PCISSetSubdomainScalingFactor_IS(PC pc, PetscScalar scal)
88: {
89: PC_IS *pcis = (PC_IS*)pc->data;
92: pcis->scaling_factor = scal;
93: return(0);
94: }
98: /*@
99: PCISSetSubdomainScalingFactor - Set scaling factor for PCIS.
101: Not collective
103: Input Parameters:
104: + pc - the preconditioning context
105: - scal - scaling factor for the subdomain
107: Level: intermediate
109: Notes:
110: Intended to use with jumping coefficients cases.
112: .seealso: PCBDDC
113: @*/
114: PetscErrorCode PCISSetSubdomainScalingFactor(PC pc, PetscScalar scal)
115: {
120: PetscTryMethod(pc,"PCISSetSubdomainScalingFactor_C",(PC,PetscScalar),(pc,scal));
121: return(0);
122: }
125: /* -------------------------------------------------------------------------- */
126: /*
127: PCISSetUp -
128: */
131: PetscErrorCode PCISSetUp(PC pc)
132: {
133: PC_IS *pcis = (PC_IS*)(pc->data);
134: Mat_IS *matis;
136: PetscBool flg,issbaij;
137: Vec counter;
140: PetscObjectTypeCompare((PetscObject)pc->pmat,MATIS,&flg);
141: if (!flg) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Preconditioner type of Neumann Neumman requires matrix of type MATIS");
142: matis = (Mat_IS*)pc->pmat->data;
144: pcis->pure_neumann = matis->pure_neumann;
146: /* get info on mapping */
147: ISLocalToGlobalMappingGetSize(matis->mapping,&pcis->n);
148: ISLocalToGlobalMappingGetInfo(matis->mapping,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));
149: pcis->ISLocalToGlobalMappingGetInfoWasCalled = PETSC_TRUE;
151: /* Creating local and global index sets for interior and inteface nodes. */
152: {
153: PetscInt n_I;
154: PetscInt *idx_I_local,*idx_B_local,*idx_I_global,*idx_B_global;
155: PetscInt *array;
156: PetscInt i,j;
158: /* Identifying interior and interface nodes, in local numbering */
159: PetscMalloc1(pcis->n,&array);
160: PetscMemzero(array,pcis->n*sizeof(PetscInt));
161: for (i=0;i<pcis->n_neigh;i++)
162: for (j=0;j<pcis->n_shared[i];j++)
163: array[pcis->shared[i][j]] += 1;
165: PetscMalloc1(pcis->n,&idx_I_local);
166: PetscMalloc1(pcis->n,&idx_B_local);
167: for (i=0, pcis->n_B=0, n_I=0; i<pcis->n; i++) {
168: if (!array[i]) {
169: idx_I_local[n_I] = i;
170: n_I++;
171: } else {
172: idx_B_local[pcis->n_B] = i;
173: pcis->n_B++;
174: }
175: }
176: /* Getting the global numbering */
177: idx_B_global = idx_I_local + n_I; /* Just avoiding allocating extra memory, since we have vacant space */
178: idx_I_global = idx_B_local + pcis->n_B;
179: ISLocalToGlobalMappingApply(matis->mapping,pcis->n_B,idx_B_local,idx_B_global);
180: ISLocalToGlobalMappingApply(matis->mapping,n_I, idx_I_local,idx_I_global);
182: /* Creating the index sets. */
183: ISCreateGeneral(PETSC_COMM_SELF,pcis->n_B,idx_B_local,PETSC_COPY_VALUES, &pcis->is_B_local);
184: ISCreateGeneral(PETSC_COMM_SELF,pcis->n_B,idx_B_global,PETSC_COPY_VALUES,&pcis->is_B_global);
185: ISCreateGeneral(PETSC_COMM_SELF,n_I,idx_I_local,PETSC_COPY_VALUES, &pcis->is_I_local);
186: ISCreateGeneral(PETSC_COMM_SELF,n_I,idx_I_global,PETSC_COPY_VALUES,&pcis->is_I_global);
188: /* Freeing memory and restoring arrays */
189: PetscFree(idx_B_local);
190: PetscFree(idx_I_local);
191: PetscFree(array);
192: }
194: /*
195: Extracting the blocks A_II, A_BI, A_IB and A_BB from A. If the numbering
196: is such that interior nodes come first than the interface ones, we have
198: [ | ]
199: [ A_II | A_IB ]
200: A = [ | ]
201: [-----------+------]
202: [ A_BI | A_BB ]
203: */
205: MatGetSubMatrix(matis->A,pcis->is_I_local,pcis->is_I_local,MAT_INITIAL_MATRIX,&pcis->A_II);
206: MatGetSubMatrix(matis->A,pcis->is_B_local,pcis->is_B_local,MAT_INITIAL_MATRIX,&pcis->A_BB);
207: PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);
208: if (!issbaij) {
209: MatGetSubMatrix(matis->A,pcis->is_I_local,pcis->is_B_local,MAT_INITIAL_MATRIX,&pcis->A_IB);
210: MatGetSubMatrix(matis->A,pcis->is_B_local,pcis->is_I_local,MAT_INITIAL_MATRIX,&pcis->A_BI);
211: } else {
212: Mat newmat;
213: MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&newmat);
214: MatGetSubMatrix(newmat,pcis->is_I_local,pcis->is_B_local,MAT_INITIAL_MATRIX,&pcis->A_IB);
215: MatGetSubMatrix(newmat,pcis->is_B_local,pcis->is_I_local,MAT_INITIAL_MATRIX,&pcis->A_BI);
216: MatDestroy(&newmat);
217: }
218: /*
219: Creating work vectors and arrays
220: */
221: VecDuplicate(matis->x,&pcis->vec1_N);
222: VecDuplicate(pcis->vec1_N,&pcis->vec2_N);
223: VecCreateSeq(PETSC_COMM_SELF,pcis->n-pcis->n_B,&pcis->vec1_D);
224: VecDuplicate(pcis->vec1_D,&pcis->vec2_D);
225: VecDuplicate(pcis->vec1_D,&pcis->vec3_D);
226: VecDuplicate(pcis->vec1_D,&pcis->vec4_D);
227: VecCreateSeq(PETSC_COMM_SELF,pcis->n_B,&pcis->vec1_B);
228: VecDuplicate(pcis->vec1_B,&pcis->vec2_B);
229: VecDuplicate(pcis->vec1_B,&pcis->vec3_B);
230: MatGetVecs(pc->pmat,&pcis->vec1_global,0);
231: PetscMalloc1((pcis->n),&pcis->work_N);
233: /* Creating the scatter contexts */
234: VecScatterCreate(pcis->vec1_global,pcis->is_I_global,pcis->vec1_D,(IS)0,&pcis->global_to_D);
235: VecScatterCreate(pcis->vec1_N,pcis->is_B_local,pcis->vec1_B,(IS)0,&pcis->N_to_B);
236: VecScatterCreate(pcis->vec1_global,pcis->is_B_global,pcis->vec1_B,(IS)0,&pcis->global_to_B);
238: /* Creating scaling "matrix" D */
239: PetscOptionsGetBool(((PetscObject)pc)->prefix,"-pc_is_use_stiffness_scaling",&pcis->use_stiffness_scaling,NULL);
240: if (!pcis->D) {
241: VecDuplicate(pcis->vec1_B,&pcis->D);
242: if (!pcis->use_stiffness_scaling) {
243: VecSet(pcis->D,pcis->scaling_factor);
244: } else {
245: MatGetDiagonal(matis->A,pcis->vec1_N);
246: VecScatterBegin(pcis->N_to_B,pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD);
247: VecScatterEnd (pcis->N_to_B,pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD);
248: }
249: }
250: VecCopy(pcis->D,pcis->vec1_B);
251: MatGetVecs(pc->pmat,&counter,0); /* temporary auxiliar vector */
252: VecSet(counter,0.0);
253: VecScatterBegin(pcis->global_to_B,pcis->vec1_B,counter,ADD_VALUES,SCATTER_REVERSE);
254: VecScatterEnd (pcis->global_to_B,pcis->vec1_B,counter,ADD_VALUES,SCATTER_REVERSE);
255: VecScatterBegin(pcis->global_to_B,counter,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);
256: VecScatterEnd (pcis->global_to_B,counter,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);
257: VecPointwiseDivide(pcis->D,pcis->D,pcis->vec1_B);
258: VecDestroy(&counter);
260: /* See historical note 01, at the bottom of this file. */
262: /*
263: Creating the KSP contexts for the local Dirichlet and Neumann problems.
264: */
265: if (pcis->computesolvers) {
266: PC pc_ctx;
267: /* Dirichlet */
268: KSPCreate(PETSC_COMM_SELF,&pcis->ksp_D);
269: PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_D,(PetscObject)pc,1);
270: KSPSetOperators(pcis->ksp_D,pcis->A_II,pcis->A_II);
271: KSPSetOptionsPrefix(pcis->ksp_D,"is_localD_");
272: KSPGetPC(pcis->ksp_D,&pc_ctx);
273: PCSetType(pc_ctx,PCLU);
274: KSPSetType(pcis->ksp_D,KSPPREONLY);
275: KSPSetFromOptions(pcis->ksp_D);
276: /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
277: KSPSetUp(pcis->ksp_D);
278: /* Neumann */
279: KSPCreate(PETSC_COMM_SELF,&pcis->ksp_N);
280: PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_N,(PetscObject)pc,1);
281: KSPSetOperators(pcis->ksp_N,matis->A,matis->A);
282: KSPSetOptionsPrefix(pcis->ksp_N,"is_localN_");
283: KSPGetPC(pcis->ksp_N,&pc_ctx);
284: PCSetType(pc_ctx,PCLU);
285: KSPSetType(pcis->ksp_N,KSPPREONLY);
286: KSPSetFromOptions(pcis->ksp_N);
287: {
288: PetscBool damp_fixed = PETSC_FALSE,
289: remove_nullspace_fixed = PETSC_FALSE,
290: set_damping_factor_floating = PETSC_FALSE,
291: not_damp_floating = PETSC_FALSE,
292: not_remove_nullspace_floating = PETSC_FALSE;
293: PetscReal fixed_factor,
294: floating_factor;
296: PetscOptionsGetReal(((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&fixed_factor,&damp_fixed);
297: if (!damp_fixed) fixed_factor = 0.0;
298: PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&damp_fixed,NULL);
300: PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_remove_nullspace_fixed",&remove_nullspace_fixed,NULL);
302: PetscOptionsGetReal(((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",
303: &floating_factor,&set_damping_factor_floating);
304: if (!set_damping_factor_floating) floating_factor = 0.0;
305: PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",&set_damping_factor_floating,NULL);
306: if (!set_damping_factor_floating) floating_factor = 1.e-12;
308: PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_not_damp_floating",¬_damp_floating,NULL);
310: PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_not_remove_nullspace_floating",¬_remove_nullspace_floating,NULL);
312: if (pcis->pure_neumann) { /* floating subdomain */
313: if (!(not_damp_floating)) {
314: PCFactorSetShiftType(pc_ctx,MAT_SHIFT_NONZERO);
315: PCFactorSetShiftAmount(pc_ctx,floating_factor);
316: }
317: if (!(not_remove_nullspace_floating)) {
318: MatNullSpace nullsp;
319: MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,NULL,&nullsp);
320: KSPSetNullSpace(pcis->ksp_N,nullsp);
321: MatNullSpaceDestroy(&nullsp);
322: }
323: } else { /* fixed subdomain */
324: if (damp_fixed) {
325: PCFactorSetShiftType(pc_ctx,MAT_SHIFT_NONZERO);
326: PCFactorSetShiftAmount(pc_ctx,floating_factor);
327: }
328: if (remove_nullspace_fixed) {
329: MatNullSpace nullsp;
330: MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,NULL,&nullsp);
331: KSPSetNullSpace(pcis->ksp_N,nullsp);
332: MatNullSpaceDestroy(&nullsp);
333: }
334: }
335: }
336: /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
337: KSPSetUp(pcis->ksp_N);
338: }
340: return(0);
341: }
343: /* -------------------------------------------------------------------------- */
344: /*
345: PCISDestroy -
346: */
349: PetscErrorCode PCISDestroy(PC pc)
350: {
351: PC_IS *pcis = (PC_IS*)(pc->data);
355: ISDestroy(&pcis->is_B_local);
356: ISDestroy(&pcis->is_I_local);
357: ISDestroy(&pcis->is_B_global);
358: ISDestroy(&pcis->is_I_global);
359: MatDestroy(&pcis->A_II);
360: MatDestroy(&pcis->A_IB);
361: MatDestroy(&pcis->A_BI);
362: MatDestroy(&pcis->A_BB);
363: VecDestroy(&pcis->D);
364: KSPDestroy(&pcis->ksp_N);
365: KSPDestroy(&pcis->ksp_D);
366: VecDestroy(&pcis->vec1_N);
367: VecDestroy(&pcis->vec2_N);
368: VecDestroy(&pcis->vec1_D);
369: VecDestroy(&pcis->vec2_D);
370: VecDestroy(&pcis->vec3_D);
371: VecDestroy(&pcis->vec4_D);
372: VecDestroy(&pcis->vec1_B);
373: VecDestroy(&pcis->vec2_B);
374: VecDestroy(&pcis->vec3_B);
375: VecDestroy(&pcis->vec1_global);
376: VecScatterDestroy(&pcis->global_to_D);
377: VecScatterDestroy(&pcis->N_to_B);
378: VecScatterDestroy(&pcis->global_to_B);
379: PetscFree(pcis->work_N);
380: if (pcis->ISLocalToGlobalMappingGetInfoWasCalled) {
381: ISLocalToGlobalMappingRestoreInfo((ISLocalToGlobalMapping)0,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));
382: }
383: PetscObjectComposeFunction((PetscObject)pc,"PCISSetUseStiffnessScaling_C",NULL);
384: PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainScalingFactor_C",NULL);
385: PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainDiagonalScaling_C",NULL);
386: return(0);
387: }
389: /* -------------------------------------------------------------------------- */
390: /*
391: PCISCreate -
392: */
395: PetscErrorCode PCISCreate(PC pc)
396: {
397: PC_IS *pcis = (PC_IS*)(pc->data);
401: pcis->is_B_local = 0;
402: pcis->is_I_local = 0;
403: pcis->is_B_global = 0;
404: pcis->is_I_global = 0;
405: pcis->A_II = 0;
406: pcis->A_IB = 0;
407: pcis->A_BI = 0;
408: pcis->A_BB = 0;
409: pcis->D = 0;
410: pcis->ksp_N = 0;
411: pcis->ksp_D = 0;
412: pcis->vec1_N = 0;
413: pcis->vec2_N = 0;
414: pcis->vec1_D = 0;
415: pcis->vec2_D = 0;
416: pcis->vec3_D = 0;
417: pcis->vec1_B = 0;
418: pcis->vec2_B = 0;
419: pcis->vec3_B = 0;
420: pcis->vec1_global = 0;
421: pcis->work_N = 0;
422: pcis->global_to_D = 0;
423: pcis->N_to_B = 0;
424: pcis->global_to_B = 0;
425: pcis->computesolvers = PETSC_TRUE;
427: pcis->ISLocalToGlobalMappingGetInfoWasCalled = PETSC_FALSE;
429: pcis->scaling_factor = 1.0;
430: /* composing functions */
431: PetscObjectComposeFunction((PetscObject)pc,"PCISSetUseStiffnessScaling_C",PCISSetUseStiffnessScaling_IS);
432: PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainScalingFactor_C",PCISSetSubdomainScalingFactor_IS);
433: PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainDiagonalScaling_C",PCISSetSubdomainDiagonalScaling_IS);
434: return(0);
435: }
437: /* -------------------------------------------------------------------------- */
438: /*
439: PCISApplySchur -
441: Input parameters:
442: . pc - preconditioner context
443: . v - vector to which the Schur complement is to be applied (it is NOT modified inside this function, UNLESS vec2_B is null)
445: Output parameters:
446: . vec1_B - result of Schur complement applied to chunk
447: . vec2_B - garbage (used as work space), or null (and v is used as workspace)
448: . vec1_D - garbage (used as work space)
449: . vec2_D - garbage (used as work space)
451: */
454: PetscErrorCode PCISApplySchur(PC pc, Vec v, Vec vec1_B, Vec vec2_B, Vec vec1_D, Vec vec2_D)
455: {
457: PC_IS *pcis = (PC_IS*)(pc->data);
460: if (!vec2_B) vec2_B = v;
462: MatMult(pcis->A_BB,v,vec1_B);
463: MatMult(pcis->A_IB,v,vec1_D);
464: KSPSolve(pcis->ksp_D,vec1_D,vec2_D);
465: MatMult(pcis->A_BI,vec2_D,vec2_B);
466: VecAXPY(vec1_B,-1.0,vec2_B);
467: return(0);
468: }
470: /* -------------------------------------------------------------------------- */
471: /*
472: PCISScatterArrayNToVecB - Scatters interface node values from a big array (of all local nodes, interior or interface,
473: including ghosts) into an interface vector, when in SCATTER_FORWARD mode, or vice-versa, when in SCATTER_REVERSE
474: mode.
476: Input parameters:
477: . pc - preconditioner context
478: . array_N - [when in SCATTER_FORWARD mode] Array to be scattered into the vector
479: . v_B - [when in SCATTER_REVERSE mode] Vector to be scattered into the array
481: Output parameter:
482: . array_N - [when in SCATTER_REVERSE mode] Array to receive the scattered vector
483: . v_B - [when in SCATTER_FORWARD mode] Vector to receive the scattered array
485: Notes:
486: The entries in the array that do not correspond to interface nodes remain unaltered.
487: */
490: PetscErrorCode PCISScatterArrayNToVecB(PetscScalar *array_N, Vec v_B, InsertMode imode, ScatterMode smode, PC pc)
491: {
492: PetscInt i;
493: const PetscInt *idex;
495: PetscScalar *array_B;
496: PC_IS *pcis = (PC_IS*)(pc->data);
499: VecGetArray(v_B,&array_B);
500: ISGetIndices(pcis->is_B_local,&idex);
502: if (smode == SCATTER_FORWARD) {
503: if (imode == INSERT_VALUES) {
504: for (i=0; i<pcis->n_B; i++) array_B[i] = array_N[idex[i]];
505: } else { /* ADD_VALUES */
506: for (i=0; i<pcis->n_B; i++) array_B[i] += array_N[idex[i]];
507: }
508: } else { /* SCATTER_REVERSE */
509: if (imode == INSERT_VALUES) {
510: for (i=0; i<pcis->n_B; i++) array_N[idex[i]] = array_B[i];
511: } else { /* ADD_VALUES */
512: for (i=0; i<pcis->n_B; i++) array_N[idex[i]] += array_B[i];
513: }
514: }
515: ISRestoreIndices(pcis->is_B_local,&idex);
516: VecRestoreArray(v_B,&array_B);
517: return(0);
518: }
520: /* -------------------------------------------------------------------------- */
521: /*
522: PCISApplyInvSchur - Solves the Neumann problem related to applying the inverse of the Schur complement.
523: More precisely, solves the problem:
524: [ A_II A_IB ] [ . ] [ 0 ]
525: [ ] [ ] = [ ]
526: [ A_BI A_BB ] [ x ] [ b ]
528: Input parameters:
529: . pc - preconditioner context
530: . b - vector of local interface nodes (including ghosts)
532: Output parameters:
533: . x - vector of local interface nodes (including ghosts); returns the application of the inverse of the Schur
534: complement to b
535: . vec1_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
536: . vec2_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
538: */
541: PetscErrorCode PCISApplyInvSchur(PC pc, Vec b, Vec x, Vec vec1_N, Vec vec2_N)
542: {
544: PC_IS *pcis = (PC_IS*)(pc->data);
547: /*
548: Neumann solvers.
549: Applying the inverse of the local Schur complement, i.e, solving a Neumann
550: Problem with zero at the interior nodes of the RHS and extracting the interface
551: part of the solution. inverse Schur complement is applied to b and the result
552: is stored in x.
553: */
554: /* Setting the RHS vec1_N */
555: VecSet(vec1_N,0.0);
556: VecScatterBegin(pcis->N_to_B,b,vec1_N,INSERT_VALUES,SCATTER_REVERSE);
557: VecScatterEnd (pcis->N_to_B,b,vec1_N,INSERT_VALUES,SCATTER_REVERSE);
558: /* Checking for consistency of the RHS */
559: {
560: PetscBool flg = PETSC_FALSE;
561: PetscOptionsGetBool(NULL,"-pc_is_check_consistency",&flg,NULL);
562: if (flg) {
563: PetscScalar average;
564: PetscViewer viewer;
565: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&viewer);
567: VecSum(vec1_N,&average);
568: average = average / ((PetscReal)pcis->n);
569: PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);
570: if (pcis->pure_neumann) {
571: PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d is floating. Average = % 1.14e\n",PetscGlobalRank,PetscAbsScalar(average));
572: } else {
573: PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d is fixed. Average = % 1.14e\n",PetscGlobalRank,PetscAbsScalar(average));
574: }
575: PetscViewerFlush(viewer);
576: PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);
577: }
578: }
579: /* Solving the system for vec2_N */
580: KSPSolve(pcis->ksp_N,vec1_N,vec2_N);
581: /* Extracting the local interface vector out of the solution */
582: VecScatterBegin(pcis->N_to_B,vec2_N,x,INSERT_VALUES,SCATTER_FORWARD);
583: VecScatterEnd (pcis->N_to_B,vec2_N,x,INSERT_VALUES,SCATTER_FORWARD);
584: return(0);
585: }