Actual source code: bjacobi.c
petsc-3.9.4 2018-09-11
2: /*
3: Defines a block Jacobi preconditioner.
4: */
6: #include <../src/ksp/pc/impls/bjacobi/bjacobi.h>
8: static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC,Mat,Mat);
9: static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC,Mat,Mat);
10: static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC);
12: static PetscErrorCode PCSetUp_BJacobi(PC pc)
13: {
14: PC_BJacobi *jac = (PC_BJacobi*)pc->data;
15: Mat mat = pc->mat,pmat = pc->pmat;
17: PetscBool hasop;
18: PetscInt N,M,start,i,sum,end;
19: PetscInt bs,i_start=-1,i_end=-1;
20: PetscMPIInt rank,size;
21: const char *pprefix,*mprefix;
24: MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);
25: MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
26: MatGetLocalSize(pc->pmat,&M,&N);
27: MatGetBlockSize(pc->pmat,&bs);
29: if (jac->n > 0 && jac->n < size) {
30: PCSetUp_BJacobi_Multiproc(pc);
31: return(0);
32: }
34: /* --------------------------------------------------------------------------
35: Determines the number of blocks assigned to each processor
36: -----------------------------------------------------------------------------*/
38: /* local block count given */
39: if (jac->n_local > 0 && jac->n < 0) {
40: MPIU_Allreduce(&jac->n_local,&jac->n,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));
41: if (jac->l_lens) { /* check that user set these correctly */
42: sum = 0;
43: for (i=0; i<jac->n_local; i++) {
44: if (jac->l_lens[i]/bs*bs !=jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
45: sum += jac->l_lens[i];
46: }
47: if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local lens set incorrectly");
48: } else {
49: PetscMalloc1(jac->n_local,&jac->l_lens);
50: for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i));
51: }
52: } else if (jac->n > 0 && jac->n_local < 0) { /* global block count given */
53: /* global blocks given: determine which ones are local */
54: if (jac->g_lens) {
55: /* check if the g_lens is has valid entries */
56: for (i=0; i<jac->n; i++) {
57: if (!jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Zero block not allowed");
58: if (jac->g_lens[i]/bs*bs != jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
59: }
60: if (size == 1) {
61: jac->n_local = jac->n;
62: PetscMalloc1(jac->n_local,&jac->l_lens);
63: PetscMemcpy(jac->l_lens,jac->g_lens,jac->n_local*sizeof(PetscInt));
64: /* check that user set these correctly */
65: sum = 0;
66: for (i=0; i<jac->n_local; i++) sum += jac->l_lens[i];
67: if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Global lens set incorrectly");
68: } else {
69: MatGetOwnershipRange(pc->pmat,&start,&end);
70: /* loop over blocks determing first one owned by me */
71: sum = 0;
72: for (i=0; i<jac->n+1; i++) {
73: if (sum == start) { i_start = i; goto start_1;}
74: if (i < jac->n) sum += jac->g_lens[i];
75: }
76: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
77: start_1:
78: for (i=i_start; i<jac->n+1; i++) {
79: if (sum == end) { i_end = i; goto end_1; }
80: if (i < jac->n) sum += jac->g_lens[i];
81: }
82: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
83: end_1:
84: jac->n_local = i_end - i_start;
85: PetscMalloc1(jac->n_local,&jac->l_lens);
86: PetscMemcpy(jac->l_lens,jac->g_lens+i_start,jac->n_local*sizeof(PetscInt));
87: }
88: } else { /* no global blocks given, determine then using default layout */
89: jac->n_local = jac->n/size + ((jac->n % size) > rank);
90: PetscMalloc1(jac->n_local,&jac->l_lens);
91: for (i=0; i<jac->n_local; i++) {
92: jac->l_lens[i] = ((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i))*bs;
93: if (!jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Too many blocks given");
94: }
95: }
96: } else if (jac->n < 0 && jac->n_local < 0) { /* no blocks given */
97: jac->n = size;
98: jac->n_local = 1;
99: PetscMalloc1(1,&jac->l_lens);
100: jac->l_lens[0] = M;
101: } else { /* jac->n > 0 && jac->n_local > 0 */
102: if (!jac->l_lens) {
103: PetscMalloc1(jac->n_local,&jac->l_lens);
104: for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i));
105: }
106: }
107: if (jac->n_local < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Number of blocks is less than number of processors");
109: /* -------------------------
110: Determines mat and pmat
111: ---------------------------*/
112: MatHasOperation(pc->mat,MATOP_GET_DIAGONAL_BLOCK,&hasop);
113: if (!hasop && size == 1) {
114: mat = pc->mat;
115: pmat = pc->pmat;
116: } else {
117: if (pc->useAmat) {
118: /* use block from Amat matrix, not Pmat for local MatMult() */
119: MatGetDiagonalBlock(pc->mat,&mat);
120: /* make submatrix have same prefix as entire matrix */
121: PetscObjectGetOptionsPrefix((PetscObject)pc->mat,&mprefix);
122: PetscObjectSetOptionsPrefix((PetscObject)mat,mprefix);
123: }
124: if (pc->pmat != pc->mat || !pc->useAmat) {
125: MatGetDiagonalBlock(pc->pmat,&pmat);
126: /* make submatrix have same prefix as entire matrix */
127: PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);
128: PetscObjectSetOptionsPrefix((PetscObject)pmat,pprefix);
129: } else pmat = mat;
130: }
132: /* ------
133: Setup code depends on the number of blocks
134: */
135: if (jac->n_local == 1) {
136: PCSetUp_BJacobi_Singleblock(pc,mat,pmat);
137: } else {
138: PCSetUp_BJacobi_Multiblock(pc,mat,pmat);
139: }
140: return(0);
141: }
143: /* Default destroy, if it has never been setup */
144: static PetscErrorCode PCDestroy_BJacobi(PC pc)
145: {
146: PC_BJacobi *jac = (PC_BJacobi*)pc->data;
150: PetscFree(jac->g_lens);
151: PetscFree(jac->l_lens);
152: PetscFree(pc->data);
153: return(0);
154: }
157: static PetscErrorCode PCSetFromOptions_BJacobi(PetscOptionItems *PetscOptionsObject,PC pc)
158: {
159: PC_BJacobi *jac = (PC_BJacobi*)pc->data;
161: PetscInt blocks,i;
162: PetscBool flg;
165: PetscOptionsHead(PetscOptionsObject,"Block Jacobi options");
166: PetscOptionsInt("-pc_bjacobi_blocks","Total number of blocks","PCBJacobiSetTotalBlocks",jac->n,&blocks,&flg);
167: if (flg) {
168: PCBJacobiSetTotalBlocks(pc,blocks,NULL);
169: }
170: if (jac->ksp) {
171: /* The sub-KSP has already been set up (e.g., PCSetUp_BJacobi_Singleblock), but KSPSetFromOptions was not called
172: * unless we had already been called. */
173: for (i=0; i<jac->n_local; i++) {
174: KSPSetFromOptions(jac->ksp[i]);
175: }
176: }
177: PetscOptionsTail();
178: return(0);
179: }
181: #include <petscdraw.h>
182: static PetscErrorCode PCView_BJacobi(PC pc,PetscViewer viewer)
183: {
184: PC_BJacobi *jac = (PC_BJacobi*)pc->data;
185: PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
186: PetscErrorCode ierr;
187: PetscMPIInt rank;
188: PetscInt i;
189: PetscBool iascii,isstring,isdraw;
190: PetscViewer sviewer;
193: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
194: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
195: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
196: if (iascii) {
197: if (pc->useAmat) {
198: PetscViewerASCIIPrintf(viewer," using Amat local matrix, number of blocks = %D\n",jac->n);
199: }
200: PetscViewerASCIIPrintf(viewer," number of blocks = %D\n",jac->n);
201: MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);
202: if (jac->same_local_solves) {
203: PetscViewerASCIIPrintf(viewer," Local solve is same for all blocks, in the following KSP and PC objects:\n");
204: if (jac->ksp && !jac->psubcomm) {
205: PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
206: if (!rank) {
207: PetscViewerASCIIPushTab(viewer);
208: KSPView(jac->ksp[0],sviewer);
209: PetscViewerASCIIPopTab(viewer);
210: }
211: PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
212: } else if (mpjac && jac->ksp && mpjac->psubcomm) {
213: PetscViewerGetSubViewer(viewer,mpjac->psubcomm->child,&sviewer);
214: if (!mpjac->psubcomm->color) {
215: PetscViewerASCIIPushTab(viewer);
216: KSPView(*(jac->ksp),sviewer);
217: PetscViewerASCIIPopTab(viewer);
218: }
219: PetscViewerRestoreSubViewer(viewer,mpjac->psubcomm->child,&sviewer);
220: }
221: } else {
222: PetscInt n_global;
223: MPIU_Allreduce(&jac->n_local,&n_global,1,MPIU_INT,MPI_MAX,PetscObjectComm((PetscObject)pc));
224: PetscViewerASCIIPushSynchronized(viewer);
225: PetscViewerASCIIPrintf(viewer," Local solve info for each block is in the following KSP and PC objects:\n");
226: PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D, first local block number = %D\n",
227: rank,jac->n_local,jac->first_local);
228: PetscViewerASCIIPushTab(viewer);
229: PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
230: for (i=0; i<jac->n_local; i++) {
231: PetscViewerASCIISynchronizedPrintf(viewer,"[%d] local block number %D\n",rank,i);
232: KSPView(jac->ksp[i],sviewer);
233: PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");
234: }
235: PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
236: PetscViewerASCIIPopTab(viewer);
237: PetscViewerFlush(viewer);
238: PetscViewerASCIIPopSynchronized(viewer);
239: }
240: } else if (isstring) {
241: PetscViewerStringSPrintf(viewer," blks=%D",jac->n);
242: PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
243: if (jac->ksp) {KSPView(jac->ksp[0],sviewer);}
244: PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
245: } else if (isdraw) {
246: PetscDraw draw;
247: char str[25];
248: PetscReal x,y,bottom,h;
250: PetscViewerDrawGetDraw(viewer,0,&draw);
251: PetscDrawGetCurrentPoint(draw,&x,&y);
252: PetscSNPrintf(str,25,"Number blocks %D",jac->n);
253: PetscDrawStringBoxed(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);
254: bottom = y - h;
255: PetscDrawPushCurrentPoint(draw,x,bottom);
256: /* warning the communicator on viewer is different then on ksp in parallel */
257: if (jac->ksp) {KSPView(jac->ksp[0],viewer);}
258: PetscDrawPopCurrentPoint(draw);
259: }
260: return(0);
261: }
263: /* -------------------------------------------------------------------------------------*/
265: static PetscErrorCode PCBJacobiGetSubKSP_BJacobi(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
266: {
267: PC_BJacobi *jac = (PC_BJacobi*)pc->data;;
270: if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call KSPSetUp() or PCSetUp() first");
272: if (n_local) *n_local = jac->n_local;
273: if (first_local) *first_local = jac->first_local;
274: *ksp = jac->ksp;
275: jac->same_local_solves = PETSC_FALSE; /* Assume that local solves are now different;
276: not necessarily true though! This flag is
277: used only for PCView_BJacobi() */
278: return(0);
279: }
281: static PetscErrorCode PCBJacobiSetTotalBlocks_BJacobi(PC pc,PetscInt blocks,PetscInt *lens)
282: {
283: PC_BJacobi *jac = (PC_BJacobi*)pc->data;
287: if (pc->setupcalled > 0 && jac->n!=blocks) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Cannot alter number of blocks after PCSetUp()/KSPSetUp() has been called");
288: jac->n = blocks;
289: if (!lens) jac->g_lens = 0;
290: else {
291: PetscMalloc1(blocks,&jac->g_lens);
292: PetscLogObjectMemory((PetscObject)pc,blocks*sizeof(PetscInt));
293: PetscMemcpy(jac->g_lens,lens,blocks*sizeof(PetscInt));
294: }
295: return(0);
296: }
298: static PetscErrorCode PCBJacobiGetTotalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
299: {
300: PC_BJacobi *jac = (PC_BJacobi*) pc->data;
303: *blocks = jac->n;
304: if (lens) *lens = jac->g_lens;
305: return(0);
306: }
308: static PetscErrorCode PCBJacobiSetLocalBlocks_BJacobi(PC pc,PetscInt blocks,const PetscInt lens[])
309: {
310: PC_BJacobi *jac;
314: jac = (PC_BJacobi*)pc->data;
316: jac->n_local = blocks;
317: if (!lens) jac->l_lens = 0;
318: else {
319: PetscMalloc1(blocks,&jac->l_lens);
320: PetscLogObjectMemory((PetscObject)pc,blocks*sizeof(PetscInt));
321: PetscMemcpy(jac->l_lens,lens,blocks*sizeof(PetscInt));
322: }
323: return(0);
324: }
326: static PetscErrorCode PCBJacobiGetLocalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
327: {
328: PC_BJacobi *jac = (PC_BJacobi*) pc->data;
331: *blocks = jac->n_local;
332: if (lens) *lens = jac->l_lens;
333: return(0);
334: }
336: /* -------------------------------------------------------------------------------------*/
338: /*@C
339: PCBJacobiGetSubKSP - Gets the local KSP contexts for all blocks on
340: this processor.
342: Not Collective
344: Input Parameter:
345: . pc - the preconditioner context
347: Output Parameters:
348: + n_local - the number of blocks on this processor, or NULL
349: . first_local - the global number of the first block on this processor, or NULL
350: - ksp - the array of KSP contexts
352: Notes:
353: After PCBJacobiGetSubKSP() the array of KSP contexts is not to be freed.
355: Currently for some matrix implementations only 1 block per processor
356: is supported.
358: You must call KSPSetUp() or PCSetUp() before calling PCBJacobiGetSubKSP().
360: Fortran Usage: You must pass in a KSP array that is large enough to contain all the local KSPs.
361: You can call PCBJacobiGetSubKSP(pc,nlocal,firstlocal,PETSC_NULL_KSP,ierr) to determine how large the
362: KSP array must be.
364: Level: advanced
366: .keywords: block, Jacobi, get, sub, KSP, context
368: .seealso: PCBJacobiGetSubKSP()
369: @*/
370: PetscErrorCode PCBJacobiGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
371: {
376: PetscUseMethod(pc,"PCBJacobiGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));
377: return(0);
378: }
380: /*@
381: PCBJacobiSetTotalBlocks - Sets the global number of blocks for the block
382: Jacobi preconditioner.
384: Collective on PC
386: Input Parameters:
387: + pc - the preconditioner context
388: . blocks - the number of blocks
389: - lens - [optional] integer array containing the size of each block
391: Options Database Key:
392: . -pc_bjacobi_blocks <blocks> - Sets the number of global blocks
394: Notes:
395: Currently only a limited number of blocking configurations are supported.
396: All processors sharing the PC must call this routine with the same data.
398: Level: intermediate
400: .keywords: set, number, Jacobi, global, total, blocks
402: .seealso: PCSetUseAmat(), PCBJacobiSetLocalBlocks()
403: @*/
404: PetscErrorCode PCBJacobiSetTotalBlocks(PC pc,PetscInt blocks,const PetscInt lens[])
405: {
410: if (blocks <= 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Must have positive blocks");
411: PetscTryMethod(pc,"PCBJacobiSetTotalBlocks_C",(PC,PetscInt,const PetscInt[]),(pc,blocks,lens));
412: return(0);
413: }
415: /*@C
416: PCBJacobiGetTotalBlocks - Gets the global number of blocks for the block
417: Jacobi preconditioner.
419: Not Collective
421: Input Parameter:
422: . pc - the preconditioner context
424: Output parameters:
425: + blocks - the number of blocks
426: - lens - integer array containing the size of each block
428: Level: intermediate
430: .keywords: get, number, Jacobi, global, total, blocks
432: .seealso: PCSetUseAmat(), PCBJacobiGetLocalBlocks()
433: @*/
434: PetscErrorCode PCBJacobiGetTotalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
435: {
441: PetscUseMethod(pc,"PCBJacobiGetTotalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));
442: return(0);
443: }
445: /*@
446: PCBJacobiSetLocalBlocks - Sets the local number of blocks for the block
447: Jacobi preconditioner.
449: Not Collective
451: Input Parameters:
452: + pc - the preconditioner context
453: . blocks - the number of blocks
454: - lens - [optional] integer array containing size of each block
456: Note:
457: Currently only a limited number of blocking configurations are supported.
459: Level: intermediate
461: .keywords: PC, set, number, Jacobi, local, blocks
463: .seealso: PCSetUseAmat(), PCBJacobiSetTotalBlocks()
464: @*/
465: PetscErrorCode PCBJacobiSetLocalBlocks(PC pc,PetscInt blocks,const PetscInt lens[])
466: {
471: if (blocks < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must have nonegative blocks");
472: PetscTryMethod(pc,"PCBJacobiSetLocalBlocks_C",(PC,PetscInt,const PetscInt []),(pc,blocks,lens));
473: return(0);
474: }
476: /*@C
477: PCBJacobiGetLocalBlocks - Gets the local number of blocks for the block
478: Jacobi preconditioner.
480: Not Collective
482: Input Parameters:
483: + pc - the preconditioner context
484: . blocks - the number of blocks
485: - lens - [optional] integer array containing size of each block
487: Note:
488: Currently only a limited number of blocking configurations are supported.
490: Level: intermediate
492: .keywords: PC, get, number, Jacobi, local, blocks
494: .seealso: PCSetUseAmat(), PCBJacobiGetTotalBlocks()
495: @*/
496: PetscErrorCode PCBJacobiGetLocalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
497: {
503: PetscUseMethod(pc,"PCBJacobiGetLocalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));
504: return(0);
505: }
507: /* -----------------------------------------------------------------------------------*/
509: /*MC
510: PCBJACOBI - Use block Jacobi preconditioning, each block is (approximately) solved with
511: its own KSP object.
513: Options Database Keys:
514: + -pc_use_amat - use Amat to apply block of operator in inner Krylov method
515: - -pc_bjacobi_blocks <n> - use n total blocks
517: Notes: Each processor can have one or more blocks, or a single block can be shared by several processes. Defaults to one block per processor.
519: To set options on the solvers for each block append -sub_ to all the KSP, KSP, and PC
520: options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
522: To set the options on the solvers separate for each block call PCBJacobiGetSubKSP()
523: and set the options directly on the resulting KSP object (you can access its PC
524: KSPGetPC())
526: For GPU-based vectors (CUDA, ViennaCL) it is recommended to use exactly one block per MPI process for best
527: performance. Different block partitioning may lead to additional data transfers
528: between host and GPU that lead to degraded performance.
530: The options prefix for each block is sub_, for example -sub_pc_type lu.
532: When multiple processes share a single block, each block encompasses exactly all the unknowns owned its set of processes.
534: Level: beginner
536: Concepts: block Jacobi
538: .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC,
539: PCASM, PCSetUseAmat(), PCGetUseAmat(), PCBJacobiGetSubKSP(), PCBJacobiSetTotalBlocks(),
540: PCBJacobiSetLocalBlocks(), PCSetModifySubmatrices()
541: M*/
543: PETSC_EXTERN PetscErrorCode PCCreate_BJacobi(PC pc)
544: {
546: PetscMPIInt rank;
547: PC_BJacobi *jac;
550: PetscNewLog(pc,&jac);
551: MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);
553: pc->ops->apply = 0;
554: pc->ops->applytranspose = 0;
555: pc->ops->setup = PCSetUp_BJacobi;
556: pc->ops->destroy = PCDestroy_BJacobi;
557: pc->ops->setfromoptions = PCSetFromOptions_BJacobi;
558: pc->ops->view = PCView_BJacobi;
559: pc->ops->applyrichardson = 0;
561: pc->data = (void*)jac;
562: jac->n = -1;
563: jac->n_local = -1;
564: jac->first_local = rank;
565: jac->ksp = 0;
566: jac->same_local_solves = PETSC_TRUE;
567: jac->g_lens = 0;
568: jac->l_lens = 0;
569: jac->psubcomm = 0;
571: PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetSubKSP_C",PCBJacobiGetSubKSP_BJacobi);
572: PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetTotalBlocks_C",PCBJacobiSetTotalBlocks_BJacobi);
573: PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetTotalBlocks_C",PCBJacobiGetTotalBlocks_BJacobi);
574: PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetLocalBlocks_C",PCBJacobiSetLocalBlocks_BJacobi);
575: PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetLocalBlocks_C",PCBJacobiGetLocalBlocks_BJacobi);
576: return(0);
577: }
579: /* --------------------------------------------------------------------------------------------*/
580: /*
581: These are for a single block per processor; works for AIJ, BAIJ; Seq and MPI
582: */
583: static PetscErrorCode PCReset_BJacobi_Singleblock(PC pc)
584: {
585: PC_BJacobi *jac = (PC_BJacobi*)pc->data;
586: PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
587: PetscErrorCode ierr;
590: KSPReset(jac->ksp[0]);
591: VecDestroy(&bjac->x);
592: VecDestroy(&bjac->y);
593: return(0);
594: }
596: static PetscErrorCode PCDestroy_BJacobi_Singleblock(PC pc)
597: {
598: PC_BJacobi *jac = (PC_BJacobi*)pc->data;
599: PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
600: PetscErrorCode ierr;
603: PCReset_BJacobi_Singleblock(pc);
604: KSPDestroy(&jac->ksp[0]);
605: PetscFree(jac->ksp);
606: PetscFree(jac->l_lens);
607: PetscFree(jac->g_lens);
608: PetscFree(bjac);
609: PetscFree(pc->data);
610: return(0);
611: }
613: static PetscErrorCode PCSetUpOnBlocks_BJacobi_Singleblock(PC pc)
614: {
615: PetscErrorCode ierr;
616: PC_BJacobi *jac = (PC_BJacobi*)pc->data;
617: KSP subksp = jac->ksp[0];
618: KSPConvergedReason reason;
621: KSPSetUp(subksp);
622: KSPGetConvergedReason(subksp,&reason);
623: if (reason == KSP_DIVERGED_PCSETUP_FAILED) {
624: pc->failedreason = PC_SUBPC_ERROR;
625: }
626: return(0);
627: }
629: static PetscErrorCode PCApply_BJacobi_Singleblock(PC pc,Vec x,Vec y)
630: {
631: PetscErrorCode ierr;
632: PC_BJacobi *jac = (PC_BJacobi*)pc->data;
633: PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
636: VecGetLocalVectorRead(x, bjac->x);
637: VecGetLocalVector(y, bjac->y);
638: /* Since the inner KSP matrix may point directly to the diagonal block of an MPI matrix the inner
639: matrix may change even if the outter KSP/PC has not updated the preconditioner, this will trigger a rebuild
640: of the inner preconditioner automatically unless we pass down the outter preconditioners reuse flag.*/
641: KSPSetReusePreconditioner(jac->ksp[0],pc->reusepreconditioner);
642: KSPSolve(jac->ksp[0],bjac->x,bjac->y);
643: VecRestoreLocalVectorRead(x, bjac->x);
644: VecRestoreLocalVector(y, bjac->y);
645: return(0);
646: }
648: static PetscErrorCode PCApplySymmetricLeft_BJacobi_Singleblock(PC pc,Vec x,Vec y)
649: {
650: PetscErrorCode ierr;
651: PC_BJacobi *jac = (PC_BJacobi*)pc->data;
652: PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
653: PetscScalar *y_array;
654: const PetscScalar *x_array;
655: PC subpc;
658: /*
659: The VecPlaceArray() is to avoid having to copy the
660: y vector into the bjac->x vector. The reason for
661: the bjac->x vector is that we need a sequential vector
662: for the sequential solve.
663: */
664: VecGetArrayRead(x,&x_array);
665: VecGetArray(y,&y_array);
666: VecPlaceArray(bjac->x,x_array);
667: VecPlaceArray(bjac->y,y_array);
668: /* apply the symmetric left portion of the inner PC operator */
669: /* note this by-passes the inner KSP and its options completely */
670: KSPGetPC(jac->ksp[0],&subpc);
671: PCApplySymmetricLeft(subpc,bjac->x,bjac->y);
672: VecResetArray(bjac->x);
673: VecResetArray(bjac->y);
674: VecRestoreArrayRead(x,&x_array);
675: VecRestoreArray(y,&y_array);
676: return(0);
677: }
679: static PetscErrorCode PCApplySymmetricRight_BJacobi_Singleblock(PC pc,Vec x,Vec y)
680: {
681: PetscErrorCode ierr;
682: PC_BJacobi *jac = (PC_BJacobi*)pc->data;
683: PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
684: PetscScalar *y_array;
685: const PetscScalar *x_array;
686: PC subpc;
689: /*
690: The VecPlaceArray() is to avoid having to copy the
691: y vector into the bjac->x vector. The reason for
692: the bjac->x vector is that we need a sequential vector
693: for the sequential solve.
694: */
695: VecGetArrayRead(x,&x_array);
696: VecGetArray(y,&y_array);
697: VecPlaceArray(bjac->x,x_array);
698: VecPlaceArray(bjac->y,y_array);
700: /* apply the symmetric right portion of the inner PC operator */
701: /* note this by-passes the inner KSP and its options completely */
703: KSPGetPC(jac->ksp[0],&subpc);
704: PCApplySymmetricRight(subpc,bjac->x,bjac->y);
706: VecRestoreArrayRead(x,&x_array);
707: VecRestoreArray(y,&y_array);
708: return(0);
709: }
711: static PetscErrorCode PCApplyTranspose_BJacobi_Singleblock(PC pc,Vec x,Vec y)
712: {
713: PetscErrorCode ierr;
714: PC_BJacobi *jac = (PC_BJacobi*)pc->data;
715: PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
716: PetscScalar *y_array;
717: const PetscScalar *x_array;
720: /*
721: The VecPlaceArray() is to avoid having to copy the
722: y vector into the bjac->x vector. The reason for
723: the bjac->x vector is that we need a sequential vector
724: for the sequential solve.
725: */
726: VecGetArrayRead(x,&x_array);
727: VecGetArray(y,&y_array);
728: VecPlaceArray(bjac->x,x_array);
729: VecPlaceArray(bjac->y,y_array);
730: KSPSolveTranspose(jac->ksp[0],bjac->x,bjac->y);
731: VecResetArray(bjac->x);
732: VecResetArray(bjac->y);
733: VecRestoreArrayRead(x,&x_array);
734: VecRestoreArray(y,&y_array);
735: return(0);
736: }
738: static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC pc,Mat mat,Mat pmat)
739: {
740: PC_BJacobi *jac = (PC_BJacobi*)pc->data;
741: PetscErrorCode ierr;
742: PetscInt m;
743: KSP ksp;
744: PC_BJacobi_Singleblock *bjac;
745: PetscBool wasSetup = PETSC_TRUE;
746: #if defined(PETSC_HAVE_VECCUDA) || defined(PETSC_HAVE_VIENNACL)
747: PetscBool is_gpumatrix = PETSC_FALSE;
748: #endif
751: if (!pc->setupcalled) {
752: const char *prefix;
754: if (!jac->ksp) {
755: wasSetup = PETSC_FALSE;
757: KSPCreate(PETSC_COMM_SELF,&ksp);
758: KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);
759: PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);
760: PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);
761: KSPSetType(ksp,KSPPREONLY);
762: PCGetOptionsPrefix(pc,&prefix);
763: KSPSetOptionsPrefix(ksp,prefix);
764: KSPAppendOptionsPrefix(ksp,"sub_");
766: pc->ops->reset = PCReset_BJacobi_Singleblock;
767: pc->ops->destroy = PCDestroy_BJacobi_Singleblock;
768: pc->ops->apply = PCApply_BJacobi_Singleblock;
769: pc->ops->applysymmetricleft = PCApplySymmetricLeft_BJacobi_Singleblock;
770: pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Singleblock;
771: pc->ops->applytranspose = PCApplyTranspose_BJacobi_Singleblock;
772: pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Singleblock;
774: PetscMalloc1(1,&jac->ksp);
775: jac->ksp[0] = ksp;
777: PetscNewLog(pc,&bjac);
778: jac->data = (void*)bjac;
779: } else {
780: ksp = jac->ksp[0];
781: bjac = (PC_BJacobi_Singleblock*)jac->data;
782: }
784: /*
785: The reason we need to generate these vectors is to serve
786: as the right-hand side and solution vector for the solve on the
787: block. We do not need to allocate space for the vectors since
788: that is provided via VecPlaceArray() just before the call to
789: KSPSolve() on the block.
790: */
791: MatGetSize(pmat,&m,&m);
792: VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->x);
793: VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->y);
794: #if defined(PETSC_HAVE_VECCUDA)
795: PetscObjectTypeCompareAny((PetscObject)pmat,&is_gpumatrix,MATAIJCUSPARSE,MATSEQAIJCUSPARSE,MATMPIAIJCUSPARSE,"");
796: if (is_gpumatrix) {
797: VecSetType(bjac->x,VECCUDA);
798: VecSetType(bjac->y,VECCUDA);
799: }
800: #endif
801: #if defined(PETSC_HAVE_VIENNACL)
802: PetscObjectTypeCompareAny((PetscObject)pmat,&is_gpumatrix,MATAIJVIENNACL,MATSEQAIJVIENNACL,MATMPIAIJVIENNACL,"");
803: if (is_gpumatrix) {
804: VecSetType(bjac->x,VECVIENNACL);
805: VecSetType(bjac->y,VECVIENNACL);
806: }
807: #endif
808: PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->x);
809: PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->y);
810: } else {
811: ksp = jac->ksp[0];
812: bjac = (PC_BJacobi_Singleblock*)jac->data;
813: }
814: if (pc->useAmat) {
815: KSPSetOperators(ksp,mat,pmat);
816: } else {
817: KSPSetOperators(ksp,pmat,pmat);
818: }
819: if (!wasSetup && pc->setfromoptionscalled) {
820: /* If PCSetFromOptions_BJacobi is called later, KSPSetFromOptions will be called at that time. */
821: KSPSetFromOptions(ksp);
822: }
823: return(0);
824: }
826: /* ---------------------------------------------------------------------------------------------*/
827: static PetscErrorCode PCReset_BJacobi_Multiblock(PC pc)
828: {
829: PC_BJacobi *jac = (PC_BJacobi*)pc->data;
830: PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
831: PetscErrorCode ierr;
832: PetscInt i;
835: if (bjac && bjac->pmat) {
836: MatDestroyMatrices(jac->n_local,&bjac->pmat);
837: if (pc->useAmat) {
838: MatDestroyMatrices(jac->n_local,&bjac->mat);
839: }
840: }
842: for (i=0; i<jac->n_local; i++) {
843: KSPReset(jac->ksp[i]);
844: if (bjac && bjac->x) {
845: VecDestroy(&bjac->x[i]);
846: VecDestroy(&bjac->y[i]);
847: ISDestroy(&bjac->is[i]);
848: }
849: }
850: PetscFree(jac->l_lens);
851: PetscFree(jac->g_lens);
852: return(0);
853: }
855: static PetscErrorCode PCDestroy_BJacobi_Multiblock(PC pc)
856: {
857: PC_BJacobi *jac = (PC_BJacobi*)pc->data;
858: PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
859: PetscErrorCode ierr;
860: PetscInt i;
863: PCReset_BJacobi_Multiblock(pc);
864: if (bjac) {
865: PetscFree2(bjac->x,bjac->y);
866: PetscFree(bjac->starts);
867: PetscFree(bjac->is);
868: }
869: PetscFree(jac->data);
870: for (i=0; i<jac->n_local; i++) {
871: KSPDestroy(&jac->ksp[i]);
872: }
873: PetscFree(jac->ksp);
874: PetscFree(pc->data);
875: return(0);
876: }
878: static PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiblock(PC pc)
879: {
880: PC_BJacobi *jac = (PC_BJacobi*)pc->data;
881: PetscErrorCode ierr;
882: PetscInt i,n_local = jac->n_local;
883: KSPConvergedReason reason;
886: for (i=0; i<n_local; i++) {
887: KSPSetUp(jac->ksp[i]);
888: KSPGetConvergedReason(jac->ksp[i],&reason);
889: if (reason == KSP_DIVERGED_PCSETUP_FAILED) {
890: pc->failedreason = PC_SUBPC_ERROR;
891: }
892: }
893: return(0);
894: }
896: /*
897: Preconditioner for block Jacobi
898: */
899: static PetscErrorCode PCApply_BJacobi_Multiblock(PC pc,Vec x,Vec y)
900: {
901: PC_BJacobi *jac = (PC_BJacobi*)pc->data;
902: PetscErrorCode ierr;
903: PetscInt i,n_local = jac->n_local;
904: PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
905: PetscScalar *yin;
906: const PetscScalar *xin;
909: VecGetArrayRead(x,&xin);
910: VecGetArray(y,&yin);
911: for (i=0; i<n_local; i++) {
912: /*
913: To avoid copying the subvector from x into a workspace we instead
914: make the workspace vector array point to the subpart of the array of
915: the global vector.
916: */
917: VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);
918: VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);
920: PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);
921: KSPSolve(jac->ksp[i],bjac->x[i],bjac->y[i]);
922: PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);
924: VecResetArray(bjac->x[i]);
925: VecResetArray(bjac->y[i]);
926: }
927: VecRestoreArrayRead(x,&xin);
928: VecRestoreArray(y,&yin);
929: return(0);
930: }
932: /*
933: Preconditioner for block Jacobi
934: */
935: static PetscErrorCode PCApplyTranspose_BJacobi_Multiblock(PC pc,Vec x,Vec y)
936: {
937: PC_BJacobi *jac = (PC_BJacobi*)pc->data;
938: PetscErrorCode ierr;
939: PetscInt i,n_local = jac->n_local;
940: PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
941: PetscScalar *yin;
942: const PetscScalar *xin;
945: VecGetArrayRead(x,&xin);
946: VecGetArray(y,&yin);
947: for (i=0; i<n_local; i++) {
948: /*
949: To avoid copying the subvector from x into a workspace we instead
950: make the workspace vector array point to the subpart of the array of
951: the global vector.
952: */
953: VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);
954: VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);
956: PetscLogEventBegin(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);
957: KSPSolveTranspose(jac->ksp[i],bjac->x[i],bjac->y[i]);
958: PetscLogEventEnd(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);
960: VecResetArray(bjac->x[i]);
961: VecResetArray(bjac->y[i]);
962: }
963: VecRestoreArrayRead(x,&xin);
964: VecRestoreArray(y,&yin);
965: return(0);
966: }
968: static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC pc,Mat mat,Mat pmat)
969: {
970: PC_BJacobi *jac = (PC_BJacobi*)pc->data;
971: PetscErrorCode ierr;
972: PetscInt m,n_local,N,M,start,i;
973: const char *prefix,*pprefix,*mprefix;
974: KSP ksp;
975: Vec x,y;
976: PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
977: PC subpc;
978: IS is;
979: MatReuse scall;
980: #if defined(PETSC_HAVE_VECCUDA) || defined(PETSC_HAVE_VIENNACL)
981: PetscBool is_gpumatrix = PETSC_FALSE;
982: #endif
985: MatGetLocalSize(pc->pmat,&M,&N);
987: n_local = jac->n_local;
989: if (pc->useAmat) {
990: PetscBool same;
991: PetscObjectTypeCompare((PetscObject)mat,((PetscObject)pmat)->type_name,&same);
992: if (!same) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_INCOMP,"Matrices not of same type");
993: }
995: if (!pc->setupcalled) {
996: scall = MAT_INITIAL_MATRIX;
998: if (!jac->ksp) {
999: pc->ops->reset = PCReset_BJacobi_Multiblock;
1000: pc->ops->destroy = PCDestroy_BJacobi_Multiblock;
1001: pc->ops->apply = PCApply_BJacobi_Multiblock;
1002: pc->ops->applytranspose= PCApplyTranspose_BJacobi_Multiblock;
1003: pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Multiblock;
1005: PetscNewLog(pc,&bjac);
1006: PetscMalloc1(n_local,&jac->ksp);
1007: PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(KSP)));
1008: PetscMalloc2(n_local,&bjac->x,n_local,&bjac->y);
1009: PetscMalloc1(n_local,&bjac->starts);
1010: PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(PetscScalar)));
1012: jac->data = (void*)bjac;
1013: PetscMalloc1(n_local,&bjac->is);
1014: PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(IS)));
1016: for (i=0; i<n_local; i++) {
1017: KSPCreate(PETSC_COMM_SELF,&ksp);
1018: KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);
1019: PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);
1020: PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);
1021: KSPSetType(ksp,KSPPREONLY);
1022: KSPGetPC(ksp,&subpc);
1023: PCGetOptionsPrefix(pc,&prefix);
1024: KSPSetOptionsPrefix(ksp,prefix);
1025: KSPAppendOptionsPrefix(ksp,"sub_");
1027: jac->ksp[i] = ksp;
1028: }
1029: } else {
1030: bjac = (PC_BJacobi_Multiblock*)jac->data;
1031: }
1033: start = 0;
1034: for (i=0; i<n_local; i++) {
1035: m = jac->l_lens[i];
1036: /*
1037: The reason we need to generate these vectors is to serve
1038: as the right-hand side and solution vector for the solve on the
1039: block. We do not need to allocate space for the vectors since
1040: that is provided via VecPlaceArray() just before the call to
1041: KSPSolve() on the block.
1043: */
1044: VecCreateSeq(PETSC_COMM_SELF,m,&x);
1045: VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&y);
1046: #if defined(PETSC_HAVE_VECCUDA)
1047: PetscObjectTypeCompareAny((PetscObject)pmat,&is_gpumatrix,MATAIJCUSPARSE,MATSEQAIJCUSPARSE,MATMPIAIJCUSPARSE,"");
1048: if (is_gpumatrix) {
1049: VecSetType(x,VECCUDA);
1050: VecSetType(y,VECCUDA);
1051: }
1052: #endif
1053: #if defined(PETSC_HAVE_VIENNACL)
1054: PetscObjectTypeCompareAny((PetscObject)pmat,&is_gpumatrix,MATAIJVIENNACL,MATSEQAIJVIENNACL,MATMPIAIJVIENNACL,"");
1055: if (is_gpumatrix) {
1056: VecSetType(x,VECVIENNACL);
1057: VecSetType(y,VECVIENNACL);
1058: }
1059: #endif
1060: PetscLogObjectParent((PetscObject)pc,(PetscObject)x);
1061: PetscLogObjectParent((PetscObject)pc,(PetscObject)y);
1063: bjac->x[i] = x;
1064: bjac->y[i] = y;
1065: bjac->starts[i] = start;
1067: ISCreateStride(PETSC_COMM_SELF,m,start,1,&is);
1068: bjac->is[i] = is;
1069: PetscLogObjectParent((PetscObject)pc,(PetscObject)is);
1071: start += m;
1072: }
1073: } else {
1074: bjac = (PC_BJacobi_Multiblock*)jac->data;
1075: /*
1076: Destroy the blocks from the previous iteration
1077: */
1078: if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1079: MatDestroyMatrices(n_local,&bjac->pmat);
1080: if (pc->useAmat) {
1081: MatDestroyMatrices(n_local,&bjac->mat);
1082: }
1083: scall = MAT_INITIAL_MATRIX;
1084: } else scall = MAT_REUSE_MATRIX;
1085: }
1087: MatCreateSubMatrices(pmat,n_local,bjac->is,bjac->is,scall,&bjac->pmat);
1088: if (pc->useAmat) {
1089: PetscObjectGetOptionsPrefix((PetscObject)mat,&mprefix);
1090: MatCreateSubMatrices(mat,n_local,bjac->is,bjac->is,scall,&bjac->mat);
1091: }
1092: /* Return control to the user so that the submatrices can be modified (e.g., to apply
1093: different boundary conditions for the submatrices than for the global problem) */
1094: PCModifySubMatrices(pc,n_local,bjac->is,bjac->is,bjac->pmat,pc->modifysubmatricesP);
1096: PetscObjectGetOptionsPrefix((PetscObject)pmat,&pprefix);
1097: for (i=0; i<n_local; i++) {
1098: PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->pmat[i]);
1099: PetscObjectSetOptionsPrefix((PetscObject)bjac->pmat[i],pprefix);
1100: if (pc->useAmat) {
1101: PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->mat[i]);
1102: PetscObjectSetOptionsPrefix((PetscObject)bjac->mat[i],mprefix);
1103: KSPSetOperators(jac->ksp[i],bjac->mat[i],bjac->pmat[i]);
1104: } else {
1105: KSPSetOperators(jac->ksp[i],bjac->pmat[i],bjac->pmat[i]);
1106: }
1107: if (pc->setfromoptionscalled) {
1108: KSPSetFromOptions(jac->ksp[i]);
1109: }
1110: }
1111: return(0);
1112: }
1114: /* ---------------------------------------------------------------------------------------------*/
1115: /*
1116: These are for a single block with multiple processes;
1117: */
1118: static PetscErrorCode PCReset_BJacobi_Multiproc(PC pc)
1119: {
1120: PC_BJacobi *jac = (PC_BJacobi*)pc->data;
1121: PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1122: PetscErrorCode ierr;
1125: VecDestroy(&mpjac->ysub);
1126: VecDestroy(&mpjac->xsub);
1127: MatDestroy(&mpjac->submats);
1128: if (jac->ksp) {KSPReset(jac->ksp[0]);}
1129: return(0);
1130: }
1132: static PetscErrorCode PCDestroy_BJacobi_Multiproc(PC pc)
1133: {
1134: PC_BJacobi *jac = (PC_BJacobi*)pc->data;
1135: PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1136: PetscErrorCode ierr;
1139: PCReset_BJacobi_Multiproc(pc);
1140: KSPDestroy(&jac->ksp[0]);
1141: PetscFree(jac->ksp);
1142: PetscSubcommDestroy(&mpjac->psubcomm);
1144: PetscFree(mpjac);
1145: PetscFree(pc->data);
1146: return(0);
1147: }
1149: static PetscErrorCode PCApply_BJacobi_Multiproc(PC pc,Vec x,Vec y)
1150: {
1151: PC_BJacobi *jac = (PC_BJacobi*)pc->data;
1152: PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1153: PetscErrorCode ierr;
1154: PetscScalar *yarray;
1155: const PetscScalar *xarray;
1156: KSPConvergedReason reason;
1159: /* place x's and y's local arrays into xsub and ysub */
1160: VecGetArrayRead(x,&xarray);
1161: VecGetArray(y,&yarray);
1162: VecPlaceArray(mpjac->xsub,xarray);
1163: VecPlaceArray(mpjac->ysub,yarray);
1165: /* apply preconditioner on each matrix block */
1166: PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);
1167: KSPSolve(jac->ksp[0],mpjac->xsub,mpjac->ysub);
1168: PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);
1169: KSPGetConvergedReason(jac->ksp[0],&reason);
1170: if (reason == KSP_DIVERGED_PCSETUP_FAILED) {
1171: pc->failedreason = PC_SUBPC_ERROR;
1172: }
1174: VecResetArray(mpjac->xsub);
1175: VecResetArray(mpjac->ysub);
1176: VecRestoreArrayRead(x,&xarray);
1177: VecRestoreArray(y,&yarray);
1178: return(0);
1179: }
1181: static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC pc)
1182: {
1183: PC_BJacobi *jac = (PC_BJacobi*)pc->data;
1184: PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1185: PetscErrorCode ierr;
1186: PetscInt m,n;
1187: MPI_Comm comm,subcomm=0;
1188: const char *prefix;
1189: PetscBool wasSetup = PETSC_TRUE;
1190: #if defined(PETSC_HAVE_VECCUDA) || defined(PETSC_HAVE_VIENNACL)
1191: PetscBool is_gpumatrix = PETSC_FALSE;
1192: #endif
1196: PetscObjectGetComm((PetscObject)pc,&comm);
1197: if (jac->n_local > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Only a single block in a subcommunicator is supported");
1198: jac->n_local = 1; /* currently only a single block is supported for a subcommunicator */
1199: if (!pc->setupcalled) {
1200: wasSetup = PETSC_FALSE;
1201: PetscNewLog(pc,&mpjac);
1202: jac->data = (void*)mpjac;
1204: /* initialize datastructure mpjac */
1205: if (!jac->psubcomm) {
1206: /* Create default contiguous subcommunicatiors if user does not provide them */
1207: PetscSubcommCreate(comm,&jac->psubcomm);
1208: PetscSubcommSetNumber(jac->psubcomm,jac->n);
1209: PetscSubcommSetType(jac->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);
1210: PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));
1211: }
1212: mpjac->psubcomm = jac->psubcomm;
1213: subcomm = PetscSubcommChild(mpjac->psubcomm);
1215: /* Get matrix blocks of pmat */
1216: MatGetMultiProcBlock(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);
1218: /* create a new PC that processors in each subcomm have copy of */
1219: PetscMalloc1(1,&jac->ksp);
1220: KSPCreate(subcomm,&jac->ksp[0]);
1221: KSPSetErrorIfNotConverged(jac->ksp[0],pc->erroriffailure);
1222: PetscObjectIncrementTabLevel((PetscObject)jac->ksp[0],(PetscObject)pc,1);
1223: PetscLogObjectParent((PetscObject)pc,(PetscObject)jac->ksp[0]);
1224: KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats);
1225: KSPGetPC(jac->ksp[0],&mpjac->pc);
1227: PCGetOptionsPrefix(pc,&prefix);
1228: KSPSetOptionsPrefix(jac->ksp[0],prefix);
1229: KSPAppendOptionsPrefix(jac->ksp[0],"sub_");
1230: /*
1231: PetscMPIInt rank,subsize,subrank;
1232: MPI_Comm_rank(comm,&rank);
1233: MPI_Comm_size(subcomm,&subsize);
1234: MPI_Comm_rank(subcomm,&subrank);
1236: MatGetLocalSize(mpjac->submats,&m,NULL);
1237: MatGetSize(mpjac->submats,&n,NULL);
1238: PetscSynchronizedPrintf(comm,"[%d], sub-size %d,sub-rank %d\n",rank,subsize,subrank);
1239: PetscSynchronizedFlush(comm,PETSC_STDOUT);
1240: */
1242: /* create dummy vectors xsub and ysub */
1243: MatGetLocalSize(mpjac->submats,&m,&n);
1244: VecCreateMPIWithArray(subcomm,1,n,PETSC_DECIDE,NULL,&mpjac->xsub);
1245: VecCreateMPIWithArray(subcomm,1,m,PETSC_DECIDE,NULL,&mpjac->ysub);
1246: #if defined(PETSC_HAVE_VECCUDA)
1247: PetscObjectTypeCompareAny((PetscObject)mpjac->submats,&is_gpumatrix,MATAIJCUSPARSE,MATMPIAIJCUSPARSE,"");
1248: if (is_gpumatrix) {
1249: VecSetType(mpjac->xsub,VECMPICUDA);
1250: VecSetType(mpjac->ysub,VECMPICUDA);
1251: }
1252: #endif
1253: #if defined(PETSC_HAVE_VIENNACL)
1254: PetscObjectTypeCompareAny((PetscObject)mpjac->submats,&is_gpumatrix,MATAIJVIENNACL,MATMPIAIJVIENNACL,"");
1255: if (is_gpumatrix) {
1256: VecSetType(mpjac->xsub,VECMPIVIENNACL);
1257: VecSetType(mpjac->ysub,VECMPIVIENNACL);
1258: }
1259: #endif
1260: PetscLogObjectParent((PetscObject)pc,(PetscObject)mpjac->xsub);
1261: PetscLogObjectParent((PetscObject)pc,(PetscObject)mpjac->ysub);
1263: pc->ops->reset = PCReset_BJacobi_Multiproc;
1264: pc->ops->destroy = PCDestroy_BJacobi_Multiproc;
1265: pc->ops->apply = PCApply_BJacobi_Multiproc;
1266: } else { /* pc->setupcalled */
1267: subcomm = PetscSubcommChild(mpjac->psubcomm);
1268: if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1269: /* destroy old matrix blocks, then get new matrix blocks */
1270: if (mpjac->submats) {MatDestroy(&mpjac->submats);}
1271: MatGetMultiProcBlock(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);
1272: } else {
1273: MatGetMultiProcBlock(pc->pmat,subcomm,MAT_REUSE_MATRIX,&mpjac->submats);
1274: }
1275: KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats);
1276: }
1278: if (!wasSetup && pc->setfromoptionscalled) {
1279: KSPSetFromOptions(jac->ksp[0]);
1280: }
1281: return(0);
1282: }