Actual source code: ispai.c
petsc-3.7.3 2016-08-01
2: /*
3: 3/99 Modified by Stephen Barnard to support SPAI version 3.0
4: */
6: /*
7: Provides an interface to the SPAI Sparse Approximate Inverse Preconditioner
8: Code written by Stephen Barnard.
10: Note: there is some BAD memory bleeding below!
12: This code needs work
14: 1) get rid of all memory bleeding
15: 2) fix PETSc/interface so that it gets if the matrix is symmetric from the matrix
16: rather than having the sp flag for PC_SPAI
17: 3) fix to set the block size based on the matrix block size
19: */
20: #define PETSC_SKIP_COMPLEX /* since spai uses I which conflicts with some complex implementations */
22: #include <petsc/private/pcimpl.h> /*I "petscpc.h" I*/
23: #include <../src/ksp/pc/impls/spai/petscspai.h>
25: /*
26: These are the SPAI include files
27: */
28: EXTERN_C_BEGIN
29: #define SPAI_USE_MPI /* required for setting SPAI_Comm correctly in basics.h */
30: #include <spai.h>
31: #include <matrix.h>
32: EXTERN_C_END
34: extern PetscErrorCode ConvertMatToMatrix(MPI_Comm,Mat,Mat,matrix**);
35: extern PetscErrorCode ConvertMatrixToMat(MPI_Comm,matrix*,Mat*);
36: extern PetscErrorCode ConvertVectorToVec(MPI_Comm,vector *v,Vec *Pv);
37: extern PetscErrorCode MM_to_PETSC(char*,char*,char*);
39: typedef struct {
41: matrix *B; /* matrix in SPAI format */
42: matrix *BT; /* transpose of matrix in SPAI format */
43: matrix *M; /* the approximate inverse in SPAI format */
45: Mat PM; /* the approximate inverse PETSc format */
47: double epsilon; /* tolerance */
48: int nbsteps; /* max number of "improvement" steps per line */
49: int max; /* max dimensions of is_I, q, etc. */
50: int maxnew; /* max number of new entries per step */
51: int block_size; /* constant block size */
52: int cache_size; /* one of (1,2,3,4,5,6) indicting size of cache */
53: int verbose; /* SPAI prints timing and statistics */
55: int sp; /* symmetric nonzero pattern */
56: MPI_Comm comm_spai; /* communicator to be used with spai */
57: } PC_SPAI;
59: /**********************************************************************/
63: static PetscErrorCode PCSetUp_SPAI(PC pc)
64: {
65: PC_SPAI *ispai = (PC_SPAI*)pc->data;
67: Mat AT;
70: init_SPAI();
72: if (ispai->sp) {
73: ConvertMatToMatrix(ispai->comm_spai,pc->pmat,pc->pmat,&ispai->B);
74: } else {
75: /* Use the transpose to get the column nonzero structure. */
76: MatTranspose(pc->pmat,MAT_INITIAL_MATRIX,&AT);
77: ConvertMatToMatrix(ispai->comm_spai,pc->pmat,AT,&ispai->B);
78: MatDestroy(&AT);
79: }
81: /* Destroy the transpose */
82: /* Don't know how to do it. PETSc developers? */
84: /* construct SPAI preconditioner */
85: /* FILE *messages */ /* file for warning messages */
86: /* double epsilon */ /* tolerance */
87: /* int nbsteps */ /* max number of "improvement" steps per line */
88: /* int max */ /* max dimensions of is_I, q, etc. */
89: /* int maxnew */ /* max number of new entries per step */
90: /* int block_size */ /* block_size == 1 specifies scalar elments
91: block_size == n specifies nxn constant-block elements
92: block_size == 0 specifies variable-block elements */
93: /* int cache_size */ /* one of (1,2,3,4,5,6) indicting size of cache. cache_size == 0 indicates no caching */
94: /* int verbose */ /* verbose == 0 specifies that SPAI is silent
95: verbose == 1 prints timing and matrix statistics */
97: bspai(ispai->B,&ispai->M,
98: stdout,
99: ispai->epsilon,
100: ispai->nbsteps,
101: ispai->max,
102: ispai->maxnew,
103: ispai->block_size,
104: ispai->cache_size,
105: ispai->verbose);
107: ConvertMatrixToMat(PetscObjectComm((PetscObject)pc),ispai->M,&ispai->PM);
109: /* free the SPAI matrices */
110: sp_free_matrix(ispai->B);
111: sp_free_matrix(ispai->M);
112: return(0);
113: }
115: /**********************************************************************/
119: static PetscErrorCode PCApply_SPAI(PC pc,Vec xx,Vec y)
120: {
121: PC_SPAI *ispai = (PC_SPAI*)pc->data;
125: /* Now using PETSc's multiply */
126: MatMult(ispai->PM,xx,y);
127: return(0);
128: }
130: /**********************************************************************/
134: static PetscErrorCode PCDestroy_SPAI(PC pc)
135: {
137: PC_SPAI *ispai = (PC_SPAI*)pc->data;
140: MatDestroy(&ispai->PM);
141: MPI_Comm_free(&(ispai->comm_spai));
142: PetscFree(pc->data);
143: return(0);
144: }
146: /**********************************************************************/
150: static PetscErrorCode PCView_SPAI(PC pc,PetscViewer viewer)
151: {
152: PC_SPAI *ispai = (PC_SPAI*)pc->data;
154: PetscBool iascii;
157: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
158: if (iascii) {
159: PetscViewerASCIIPrintf(viewer," SPAI preconditioner\n");
160: PetscViewerASCIIPrintf(viewer," epsilon %g\n", (double)ispai->epsilon);
161: PetscViewerASCIIPrintf(viewer," nbsteps %d\n", ispai->nbsteps);
162: PetscViewerASCIIPrintf(viewer," max %d\n", ispai->max);
163: PetscViewerASCIIPrintf(viewer," maxnew %d\n", ispai->maxnew);
164: PetscViewerASCIIPrintf(viewer," block_size %d\n",ispai->block_size);
165: PetscViewerASCIIPrintf(viewer," cache_size %d\n",ispai->cache_size);
166: PetscViewerASCIIPrintf(viewer," verbose %d\n", ispai->verbose);
167: PetscViewerASCIIPrintf(viewer," sp %d\n", ispai->sp);
168: }
169: return(0);
170: }
174: static PetscErrorCode PCSPAISetEpsilon_SPAI(PC pc,double epsilon1)
175: {
176: PC_SPAI *ispai = (PC_SPAI*)pc->data;
179: ispai->epsilon = epsilon1;
180: return(0);
181: }
183: /**********************************************************************/
187: static PetscErrorCode PCSPAISetNBSteps_SPAI(PC pc,int nbsteps1)
188: {
189: PC_SPAI *ispai = (PC_SPAI*)pc->data;
192: ispai->nbsteps = nbsteps1;
193: return(0);
194: }
196: /**********************************************************************/
198: /* added 1/7/99 g.h. */
201: static PetscErrorCode PCSPAISetMax_SPAI(PC pc,int max1)
202: {
203: PC_SPAI *ispai = (PC_SPAI*)pc->data;
206: ispai->max = max1;
207: return(0);
208: }
210: /**********************************************************************/
214: static PetscErrorCode PCSPAISetMaxNew_SPAI(PC pc,int maxnew1)
215: {
216: PC_SPAI *ispai = (PC_SPAI*)pc->data;
219: ispai->maxnew = maxnew1;
220: return(0);
221: }
223: /**********************************************************************/
227: static PetscErrorCode PCSPAISetBlockSize_SPAI(PC pc,int block_size1)
228: {
229: PC_SPAI *ispai = (PC_SPAI*)pc->data;
232: ispai->block_size = block_size1;
233: return(0);
234: }
236: /**********************************************************************/
240: static PetscErrorCode PCSPAISetCacheSize_SPAI(PC pc,int cache_size)
241: {
242: PC_SPAI *ispai = (PC_SPAI*)pc->data;
245: ispai->cache_size = cache_size;
246: return(0);
247: }
249: /**********************************************************************/
253: static PetscErrorCode PCSPAISetVerbose_SPAI(PC pc,int verbose)
254: {
255: PC_SPAI *ispai = (PC_SPAI*)pc->data;
258: ispai->verbose = verbose;
259: return(0);
260: }
262: /**********************************************************************/
266: static PetscErrorCode PCSPAISetSp_SPAI(PC pc,int sp)
267: {
268: PC_SPAI *ispai = (PC_SPAI*)pc->data;
271: ispai->sp = sp;
272: return(0);
273: }
275: /* -------------------------------------------------------------------*/
279: /*@
280: PCSPAISetEpsilon -- Set the tolerance for the SPAI preconditioner
282: Input Parameters:
283: + pc - the preconditioner
284: - eps - epsilon (default .4)
286: Notes: Espilon must be between 0 and 1. It controls the
287: quality of the approximation of M to the inverse of
288: A. Higher values of epsilon lead to more work, more
289: fill, and usually better preconditioners. In many
290: cases the best choice of epsilon is the one that
291: divides the total solution time equally between the
292: preconditioner and the solver.
294: Level: intermediate
296: .seealso: PCSPAI, PCSetType()
297: @*/
298: PetscErrorCode PCSPAISetEpsilon(PC pc,double epsilon1)
299: {
303: PetscTryMethod(pc,"PCSPAISetEpsilon_C",(PC,double),(pc,epsilon1));
304: return(0);
305: }
307: /**********************************************************************/
311: /*@
312: PCSPAISetNBSteps - set maximum number of improvement steps per row in
313: the SPAI preconditioner
315: Input Parameters:
316: + pc - the preconditioner
317: - n - number of steps (default 5)
319: Notes: SPAI constructs to approximation to every column of
320: the exact inverse of A in a series of improvement
321: steps. The quality of the approximation is determined
322: by epsilon. If an approximation achieving an accuracy
323: of epsilon is not obtained after ns steps, SPAI simply
324: uses the best approximation constructed so far.
326: Level: intermediate
328: .seealso: PCSPAI, PCSetType(), PCSPAISetMaxNew()
329: @*/
330: PetscErrorCode PCSPAISetNBSteps(PC pc,int nbsteps1)
331: {
335: PetscTryMethod(pc,"PCSPAISetNBSteps_C",(PC,int),(pc,nbsteps1));
336: return(0);
337: }
339: /**********************************************************************/
341: /* added 1/7/99 g.h. */
344: /*@
345: PCSPAISetMax - set the size of various working buffers in
346: the SPAI preconditioner
348: Input Parameters:
349: + pc - the preconditioner
350: - n - size (default is 5000)
352: Level: intermediate
354: .seealso: PCSPAI, PCSetType()
355: @*/
356: PetscErrorCode PCSPAISetMax(PC pc,int max1)
357: {
361: PetscTryMethod(pc,"PCSPAISetMax_C",(PC,int),(pc,max1));
362: return(0);
363: }
365: /**********************************************************************/
369: /*@
370: PCSPAISetMaxNew - set maximum number of new nonzero candidates per step
371: in SPAI preconditioner
373: Input Parameters:
374: + pc - the preconditioner
375: - n - maximum number (default 5)
377: Level: intermediate
379: .seealso: PCSPAI, PCSetType(), PCSPAISetNBSteps()
380: @*/
381: PetscErrorCode PCSPAISetMaxNew(PC pc,int maxnew1)
382: {
386: PetscTryMethod(pc,"PCSPAISetMaxNew_C",(PC,int),(pc,maxnew1));
387: return(0);
388: }
390: /**********************************************************************/
394: /*@
395: PCSPAISetBlockSize - set the block size for the SPAI preconditioner
397: Input Parameters:
398: + pc - the preconditioner
399: - n - block size (default 1)
401: Notes: A block
402: size of 1 treats A as a matrix of scalar elements. A
403: block size of s > 1 treats A as a matrix of sxs
404: blocks. A block size of 0 treats A as a matrix with
405: variable sized blocks, which are determined by
406: searching for dense square diagonal blocks in A.
407: This can be very effective for finite-element
408: matrices.
410: SPAI will convert A to block form, use a block
411: version of the preconditioner algorithm, and then
412: convert the result back to scalar form.
414: In many cases the a block-size parameter other than 1
415: can lead to very significant improvement in
416: performance.
419: Level: intermediate
421: .seealso: PCSPAI, PCSetType()
422: @*/
423: PetscErrorCode PCSPAISetBlockSize(PC pc,int block_size1)
424: {
428: PetscTryMethod(pc,"PCSPAISetBlockSize_C",(PC,int),(pc,block_size1));
429: return(0);
430: }
432: /**********************************************************************/
436: /*@
437: PCSPAISetCacheSize - specify cache size in the SPAI preconditioner
439: Input Parameters:
440: + pc - the preconditioner
441: - n - cache size {0,1,2,3,4,5} (default 5)
443: Notes: SPAI uses a hash table to cache messages and avoid
444: redundant communication. If suggest always using
445: 5. This parameter is irrelevant in the serial
446: version.
448: Level: intermediate
450: .seealso: PCSPAI, PCSetType()
451: @*/
452: PetscErrorCode PCSPAISetCacheSize(PC pc,int cache_size)
453: {
457: PetscTryMethod(pc,"PCSPAISetCacheSize_C",(PC,int),(pc,cache_size));
458: return(0);
459: }
461: /**********************************************************************/
465: /*@
466: PCSPAISetVerbose - verbosity level for the SPAI preconditioner
468: Input Parameters:
469: + pc - the preconditioner
470: - n - level (default 1)
472: Notes: print parameters, timings and matrix statistics
474: Level: intermediate
476: .seealso: PCSPAI, PCSetType()
477: @*/
478: PetscErrorCode PCSPAISetVerbose(PC pc,int verbose)
479: {
483: PetscTryMethod(pc,"PCSPAISetVerbose_C",(PC,int),(pc,verbose));
484: return(0);
485: }
487: /**********************************************************************/
491: /*@
492: PCSPAISetSp - specify a symmetric matrix sparsity pattern in the SPAI preconditioner
494: Input Parameters:
495: + pc - the preconditioner
496: - n - 0 or 1
498: Notes: If A has a symmetric nonzero pattern use -sp 1 to
499: improve performance by eliminating some communication
500: in the parallel version. Even if A does not have a
501: symmetric nonzero pattern -sp 1 may well lead to good
502: results, but the code will not follow the published
503: SPAI algorithm exactly.
506: Level: intermediate
508: .seealso: PCSPAI, PCSetType()
509: @*/
510: PetscErrorCode PCSPAISetSp(PC pc,int sp)
511: {
515: PetscTryMethod(pc,"PCSPAISetSp_C",(PC,int),(pc,sp));
516: return(0);
517: }
519: /**********************************************************************/
521: /**********************************************************************/
525: static PetscErrorCode PCSetFromOptions_SPAI(PetscOptionItems *PetscOptionsObject,PC pc)
526: {
527: PC_SPAI *ispai = (PC_SPAI*)pc->data;
529: int nbsteps1,max1,maxnew1,block_size1,cache_size,verbose,sp;
530: double epsilon1;
531: PetscBool flg;
534: PetscOptionsHead(PetscOptionsObject,"SPAI options");
535: PetscOptionsReal("-pc_spai_epsilon","","PCSPAISetEpsilon",ispai->epsilon,&epsilon1,&flg);
536: if (flg) {
537: PCSPAISetEpsilon(pc,epsilon1);
538: }
539: PetscOptionsInt("-pc_spai_nbsteps","","PCSPAISetNBSteps",ispai->nbsteps,&nbsteps1,&flg);
540: if (flg) {
541: PCSPAISetNBSteps(pc,nbsteps1);
542: }
543: /* added 1/7/99 g.h. */
544: PetscOptionsInt("-pc_spai_max","","PCSPAISetMax",ispai->max,&max1,&flg);
545: if (flg) {
546: PCSPAISetMax(pc,max1);
547: }
548: PetscOptionsInt("-pc_spai_maxnew","","PCSPAISetMaxNew",ispai->maxnew,&maxnew1,&flg);
549: if (flg) {
550: PCSPAISetMaxNew(pc,maxnew1);
551: }
552: PetscOptionsInt("-pc_spai_block_size","","PCSPAISetBlockSize",ispai->block_size,&block_size1,&flg);
553: if (flg) {
554: PCSPAISetBlockSize(pc,block_size1);
555: }
556: PetscOptionsInt("-pc_spai_cache_size","","PCSPAISetCacheSize",ispai->cache_size,&cache_size,&flg);
557: if (flg) {
558: PCSPAISetCacheSize(pc,cache_size);
559: }
560: PetscOptionsInt("-pc_spai_verbose","","PCSPAISetVerbose",ispai->verbose,&verbose,&flg);
561: if (flg) {
562: PCSPAISetVerbose(pc,verbose);
563: }
564: PetscOptionsInt("-pc_spai_sp","","PCSPAISetSp",ispai->sp,&sp,&flg);
565: if (flg) {
566: PCSPAISetSp(pc,sp);
567: }
568: PetscOptionsTail();
569: return(0);
570: }
572: /**********************************************************************/
574: /*MC
575: PCSPAI - Use the Sparse Approximate Inverse method of Grote and Barnard
576: as a preconditioner (SIAM J. Sci. Comput.; vol 18, nr 3)
578: Options Database Keys:
579: + -pc_spai_epsilon <eps> - set tolerance
580: . -pc_spai_nbstep <n> - set nbsteps
581: . -pc_spai_max <m> - set max
582: . -pc_spai_max_new <m> - set maxnew
583: . -pc_spai_block_size <n> - set block size
584: . -pc_spai_cache_size <n> - set cache size
585: . -pc_spai_sp <m> - set sp
586: - -pc_spai_set_verbose <true,false> - verbose output
588: Notes: This only works with AIJ matrices.
590: Level: beginner
592: Concepts: approximate inverse
594: .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC,
595: PCSPAISetEpsilon(), PCSPAISetMax(), PCSPAISetMaxNew(), PCSPAISetBlockSize(),
596: PCSPAISetVerbose(), PCSPAISetSp()
597: M*/
601: PETSC_EXTERN PetscErrorCode PCCreate_SPAI(PC pc)
602: {
603: PC_SPAI *ispai;
607: PetscNewLog(pc,&ispai);
608: pc->data = ispai;
610: pc->ops->destroy = PCDestroy_SPAI;
611: pc->ops->apply = PCApply_SPAI;
612: pc->ops->applyrichardson = 0;
613: pc->ops->setup = PCSetUp_SPAI;
614: pc->ops->view = PCView_SPAI;
615: pc->ops->setfromoptions = PCSetFromOptions_SPAI;
617: ispai->epsilon = .4;
618: ispai->nbsteps = 5;
619: ispai->max = 5000;
620: ispai->maxnew = 5;
621: ispai->block_size = 1;
622: ispai->cache_size = 5;
623: ispai->verbose = 0;
625: ispai->sp = 1;
626: MPI_Comm_dup(PetscObjectComm((PetscObject)pc),&(ispai->comm_spai));
628: PetscObjectComposeFunction((PetscObject)pc,"PCSPAISetEpsilon_C",PCSPAISetEpsilon_SPAI);
629: PetscObjectComposeFunction((PetscObject)pc,"PCSPAISetNBSteps_C",PCSPAISetNBSteps_SPAI);
630: PetscObjectComposeFunction((PetscObject)pc,"PCSPAISetMax_C",PCSPAISetMax_SPAI);
631: PetscObjectComposeFunction((PetscObject)pc,"PCSPAISetMaxNew_C",PCSPAISetMaxNew_SPAI);
632: PetscObjectComposeFunction((PetscObject)pc,"PCSPAISetBlockSize_C",PCSPAISetBlockSize_SPAI);
633: PetscObjectComposeFunction((PetscObject)pc,"PCSPAISetCacheSize_C",PCSPAISetCacheSize_SPAI);
634: PetscObjectComposeFunction((PetscObject)pc,"PCSPAISetVerbose_C",PCSPAISetVerbose_SPAI);
635: PetscObjectComposeFunction((PetscObject)pc,"PCSPAISetSp_C",PCSPAISetSp_SPAI);
636: return(0);
637: }
639: /**********************************************************************/
641: /*
642: Converts from a PETSc matrix to an SPAI matrix
643: */
646: PetscErrorCode ConvertMatToMatrix(MPI_Comm comm, Mat A,Mat AT,matrix **B)
647: {
648: matrix *M;
649: int i,j,col;
650: int row_indx;
651: int len,pe,local_indx,start_indx;
652: int *mapping;
653: PetscErrorCode ierr;
654: const int *cols;
655: const double *vals;
656: int n,mnl,nnl,nz,rstart,rend;
657: PetscMPIInt size,rank;
658: struct compressed_lines *rows;
661: MPI_Comm_size(comm,&size);
662: MPI_Comm_rank(comm,&rank);
663: MatGetSize(A,&n,&n);
664: MatGetLocalSize(A,&mnl,&nnl);
666: /*
667: not sure why a barrier is required. commenting out
668: MPI_Barrier(comm);
669: */
671: M = new_matrix((SPAI_Comm)comm);
673: M->n = n;
674: M->bs = 1;
675: M->max_block_size = 1;
677: M->mnls = (int*)malloc(sizeof(int)*size);
678: M->start_indices = (int*)malloc(sizeof(int)*size);
679: M->pe = (int*)malloc(sizeof(int)*n);
680: M->block_sizes = (int*)malloc(sizeof(int)*n);
681: for (i=0; i<n; i++) M->block_sizes[i] = 1;
683: MPI_Allgather(&mnl,1,MPI_INT,M->mnls,1,MPI_INT,comm);
685: M->start_indices[0] = 0;
686: for (i=1; i<size; i++) M->start_indices[i] = M->start_indices[i-1] + M->mnls[i-1];
688: M->mnl = M->mnls[M->myid];
689: M->my_start_index = M->start_indices[M->myid];
691: for (i=0; i<size; i++) {
692: start_indx = M->start_indices[i];
693: for (j=0; j<M->mnls[i]; j++) M->pe[start_indx+j] = i;
694: }
696: if (AT) {
697: M->lines = new_compressed_lines(M->mnls[rank],1);
698: } else {
699: M->lines = new_compressed_lines(M->mnls[rank],0);
700: }
702: rows = M->lines;
704: /* Determine the mapping from global indices to pointers */
705: PetscMalloc1(M->n,&mapping);
706: pe = 0;
707: local_indx = 0;
708: for (i=0; i<M->n; i++) {
709: if (local_indx >= M->mnls[pe]) {
710: pe++;
711: local_indx = 0;
712: }
713: mapping[i] = local_indx + M->start_indices[pe];
714: local_indx++;
715: }
717: /*********************************************************/
718: /************** Set up the row structure *****************/
719: /*********************************************************/
721: MatGetOwnershipRange(A,&rstart,&rend);
722: for (i=rstart; i<rend; i++) {
723: row_indx = i - rstart;
724: MatGetRow(A,i,&nz,&cols,&vals);
725: /* allocate buffers */
726: rows->ptrs[row_indx] = (int*)malloc(nz*sizeof(int));
727: rows->A[row_indx] = (double*)malloc(nz*sizeof(double));
728: /* copy the matrix */
729: for (j=0; j<nz; j++) {
730: col = cols[j];
731: len = rows->len[row_indx]++;
733: rows->ptrs[row_indx][len] = mapping[col];
734: rows->A[row_indx][len] = vals[j];
735: }
736: rows->slen[row_indx] = rows->len[row_indx];
738: MatRestoreRow(A,i,&nz,&cols,&vals);
739: }
742: /************************************************************/
743: /************** Set up the column structure *****************/
744: /*********************************************************/
746: if (AT) {
748: for (i=rstart; i<rend; i++) {
749: row_indx = i - rstart;
750: MatGetRow(AT,i,&nz,&cols,&vals);
751: /* allocate buffers */
752: rows->rptrs[row_indx] = (int*)malloc(nz*sizeof(int));
753: /* copy the matrix (i.e., the structure) */
754: for (j=0; j<nz; j++) {
755: col = cols[j];
756: len = rows->rlen[row_indx]++;
758: rows->rptrs[row_indx][len] = mapping[col];
759: }
760: MatRestoreRow(AT,i,&nz,&cols,&vals);
761: }
762: }
764: PetscFree(mapping);
766: order_pointers(M);
767: M->maxnz = calc_maxnz(M);
768: *B = M;
769: return(0);
770: }
772: /**********************************************************************/
774: /*
775: Converts from an SPAI matrix B to a PETSc matrix PB.
776: This assumes that the SPAI matrix B is stored in
777: COMPRESSED-ROW format.
778: */
781: PetscErrorCode ConvertMatrixToMat(MPI_Comm comm,matrix *B,Mat *PB)
782: {
783: PetscMPIInt size,rank;
785: int m,n,M,N;
786: int d_nz,o_nz;
787: int *d_nnz,*o_nnz;
788: int i,k,global_row,global_col,first_diag_col,last_diag_col;
789: PetscScalar val;
792: MPI_Comm_size(comm,&size);
793: MPI_Comm_rank(comm,&rank);
795: m = n = B->mnls[rank];
796: d_nz = o_nz = 0;
798: /* Determine preallocation for MatCreateMPIAIJ */
799: PetscMalloc1(m,&d_nnz);
800: PetscMalloc1(m,&o_nnz);
801: for (i=0; i<m; i++) d_nnz[i] = o_nnz[i] = 0;
802: first_diag_col = B->start_indices[rank];
803: last_diag_col = first_diag_col + B->mnls[rank];
804: for (i=0; i<B->mnls[rank]; i++) {
805: for (k=0; k<B->lines->len[i]; k++) {
806: global_col = B->lines->ptrs[i][k];
807: if ((global_col >= first_diag_col) && (global_col < last_diag_col)) d_nnz[i]++;
808: else o_nnz[i]++;
809: }
810: }
812: M = N = B->n;
813: /* Here we only know how to create AIJ format */
814: MatCreate(comm,PB);
815: MatSetSizes(*PB,m,n,M,N);
816: MatSetType(*PB,MATAIJ);
817: MatSeqAIJSetPreallocation(*PB,d_nz,d_nnz);
818: MatMPIAIJSetPreallocation(*PB,d_nz,d_nnz,o_nz,o_nnz);
820: for (i=0; i<B->mnls[rank]; i++) {
821: global_row = B->start_indices[rank]+i;
822: for (k=0; k<B->lines->len[i]; k++) {
823: global_col = B->lines->ptrs[i][k];
825: val = B->lines->A[i][k];
826: MatSetValues(*PB,1,&global_row,1,&global_col,&val,ADD_VALUES);
827: }
828: }
830: PetscFree(d_nnz);
831: PetscFree(o_nnz);
833: MatAssemblyBegin(*PB,MAT_FINAL_ASSEMBLY);
834: MatAssemblyEnd(*PB,MAT_FINAL_ASSEMBLY);
835: return(0);
836: }
838: /**********************************************************************/
840: /*
841: Converts from an SPAI vector v to a PETSc vec Pv.
842: */
845: PetscErrorCode ConvertVectorToVec(MPI_Comm comm,vector *v,Vec *Pv)
846: {
848: PetscMPIInt size,rank;
849: int m,M,i,*mnls,*start_indices,*global_indices;
852: MPI_Comm_size(comm,&size);
853: MPI_Comm_rank(comm,&rank);
855: m = v->mnl;
856: M = v->n;
859: VecCreateMPI(comm,m,M,Pv);
861: PetscMalloc1(size,&mnls);
862: MPI_Allgather(&v->mnl,1,MPI_INT,mnls,1,MPI_INT,comm);
864: PetscMalloc1(size,&start_indices);
866: start_indices[0] = 0;
867: for (i=1; i<size; i++) start_indices[i] = start_indices[i-1] +mnls[i-1];
869: PetscMalloc1(v->mnl,&global_indices);
870: for (i=0; i<v->mnl; i++) global_indices[i] = start_indices[rank] + i;
872: PetscFree(mnls);
873: PetscFree(start_indices);
875: VecSetValues(*Pv,v->mnl,global_indices,v->v,INSERT_VALUES);
876: VecAssemblyBegin(*Pv);
877: VecAssemblyEnd(*Pv);
879: PetscFree(global_indices);
880: return(0);
881: }