Actual source code: matis.c
petsc-3.7.7 2017-09-25
2: /*
3: Creates a matrix class for using the Neumann-Neumann type preconditioners.
4: This stores the matrices in globally unassembled form. Each processor
5: assembles only its local Neumann problem and the parallel matrix vector
6: product is handled "implicitly".
8: We provide:
9: MatMult()
11: Currently this allows for only one subdomain per processor.
13: */
15: #include <../src/mat/impls/is/matis.h> /*I "petscmat.h" I*/
17: static PetscErrorCode MatISComputeSF_Private(Mat);
21: static PetscErrorCode MatISComputeSF_Private(Mat B)
22: {
23: Mat_IS *matis = (Mat_IS*)(B->data);
24: const PetscInt *gidxs;
28: MatGetSize(matis->A,&matis->sf_nleaves,NULL);
29: MatGetLocalSize(B,&matis->sf_nroots,NULL);
30: PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);
31: ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);
32: /* PETSC_OWN_POINTER refers to ilocal which is NULL */
33: PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);
34: ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);
35: PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);
36: return(0);
37: }
41: /*@
42: MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
44: Collective on MPI_Comm
46: Input Parameters:
47: + B - the matrix
48: . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix
49: (same value is used for all local rows)
50: . d_nnz - array containing the number of nonzeros in the various rows of the
51: DIAGONAL portion of the local submatrix (possibly different for each row)
52: or NULL, if d_nz is used to specify the nonzero structure.
53: The size of this array is equal to the number of local rows, i.e 'm'.
54: For matrices that will be factored, you must leave room for (and set)
55: the diagonal entry even if it is zero.
56: . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local
57: submatrix (same value is used for all local rows).
58: - o_nnz - array containing the number of nonzeros in the various rows of the
59: OFF-DIAGONAL portion of the local submatrix (possibly different for
60: each row) or NULL, if o_nz is used to specify the nonzero
61: structure. The size of this array is equal to the number
62: of local rows, i.e 'm'.
64: If the *_nnz parameter is given then the *_nz parameter is ignored
66: Level: intermediate
68: Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
69: from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
70: matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
72: .keywords: matrix
74: .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat()
75: @*/
76: PetscErrorCode MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
77: {
83: PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));
84: return(0);
85: }
89: PetscErrorCode MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
90: {
91: Mat_IS *matis = (Mat_IS*)(B->data);
92: PetscInt bs,i,nlocalcols;
96: if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
97: if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
98: MatISComputeSF_Private(B);
99: }
100: if (!d_nnz) {
101: for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
102: } else {
103: for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
104: }
105: if (!o_nnz) {
106: for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
107: } else {
108: for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
109: }
110: PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);
111: MatGetSize(matis->A,NULL,&nlocalcols);
112: MatGetBlockSize(matis->A,&bs);
113: PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);
114: for (i=0;i<matis->sf_nleaves;i++) {
115: matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
116: }
117: MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);
118: for (i=0;i<matis->sf_nleaves/bs;i++) {
119: matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
120: }
121: MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);
122: for (i=0;i<matis->sf_nleaves/bs;i++) {
123: matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
124: }
125: MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);
126: return(0);
127: }
131: PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
132: {
133: Mat_IS *matis = (Mat_IS*)(A->data);
134: PetscInt *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
135: const PetscInt *global_indices_r,*global_indices_c;
136: PetscInt i,j,bs,rows,cols;
137: PetscInt lrows,lcols;
138: PetscInt local_rows,local_cols;
139: PetscMPIInt nsubdomains;
140: PetscBool isdense,issbaij;
141: PetscErrorCode ierr;
144: MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);
145: MatGetSize(A,&rows,&cols);
146: MatGetBlockSize(A,&bs);
147: MatGetSize(matis->A,&local_rows,&local_cols);
148: PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);
149: PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);
150: ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);
151: if (A->rmap->mapping != A->cmap->mapping) {
152: ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&global_indices_c);
153: } else {
154: global_indices_c = global_indices_r;
155: }
157: if (issbaij) {
158: MatGetRowUpperTriangular(matis->A);
159: }
160: /*
161: An SF reduce is needed to sum up properly on shared rows.
162: Note that generally preallocation is not exact, since it overestimates nonzeros
163: */
164: if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
165: MatISComputeSF_Private(A);
166: }
167: MatGetLocalSize(A,&lrows,&lcols);
168: MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);
169: /* All processes need to compute entire row ownership */
170: PetscMalloc1(rows,&row_ownership);
171: MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);
172: for (i=0;i<nsubdomains;i++) {
173: for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
174: row_ownership[j] = i;
175: }
176: }
177: MatGetOwnershipRangesColumn(A,(const PetscInt**)&mat_ranges);
179: /*
180: my_dnz and my_onz contains exact contribution to preallocation from each local mat
181: then, they will be summed up properly. This way, preallocation is always sufficient
182: */
183: PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);
184: /* preallocation as a MATAIJ */
185: if (isdense) { /* special case for dense local matrices */
186: for (i=0;i<local_rows;i++) {
187: PetscInt owner = row_ownership[global_indices_r[i]];
188: for (j=0;j<local_cols;j++) {
189: PetscInt index_col = global_indices_c[j];
190: if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
191: my_dnz[i] += 1;
192: } else { /* offdiag block */
193: my_onz[i] += 1;
194: }
195: }
196: }
197: } else { /* TODO: this could be optimized using MatGetRowIJ */
198: for (i=0;i<local_rows;i++) {
199: const PetscInt *cols;
200: PetscInt ncols,index_row = global_indices_r[i];
201: MatGetRow(matis->A,i,&ncols,&cols,NULL);
202: for (j=0;j<ncols;j++) {
203: PetscInt owner = row_ownership[index_row];
204: PetscInt index_col = global_indices_c[cols[j]];
205: if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
206: my_dnz[i] += 1;
207: } else { /* offdiag block */
208: my_onz[i] += 1;
209: }
210: /* same as before, interchanging rows and cols */
211: if (issbaij && index_col != index_row) {
212: owner = row_ownership[index_col];
213: if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
214: my_dnz[cols[j]] += 1;
215: } else {
216: my_onz[cols[j]] += 1;
217: }
218: }
219: }
220: MatRestoreRow(matis->A,i,&ncols,&cols,NULL);
221: }
222: }
223: if (global_indices_c != global_indices_r) {
224: ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&global_indices_c);
225: }
226: ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);
227: PetscFree(row_ownership);
229: /* Reduce my_dnz and my_onz */
230: if (maxreduce) {
231: PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);
232: PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);
233: PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);
234: PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);
235: } else {
236: PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);
237: PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);
238: PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);
239: PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);
240: }
241: PetscFree2(my_dnz,my_onz);
243: /* Resize preallocation if overestimated */
244: for (i=0;i<lrows;i++) {
245: dnz[i] = PetscMin(dnz[i],lcols);
246: onz[i] = PetscMin(onz[i],cols-lcols);
247: }
248: /* set preallocation */
249: MatMPIAIJSetPreallocation(B,0,dnz,0,onz);
250: for (i=0;i<lrows/bs;i++) {
251: dnz[i] = dnz[i*bs]/bs;
252: onz[i] = onz[i*bs]/bs;
253: }
254: MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);
255: MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);
256: MatPreallocateFinalize(dnz,onz);
257: if (issbaij) {
258: MatRestoreRowUpperTriangular(matis->A);
259: }
260: return(0);
261: }
265: PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
266: {
267: Mat_IS *matis = (Mat_IS*)(mat->data);
268: Mat local_mat;
269: /* info on mat */
270: PetscInt bs,rows,cols,lrows,lcols;
271: PetscInt local_rows,local_cols;
272: PetscBool isdense,issbaij,isseqaij;
273: PetscMPIInt nsubdomains;
274: /* values insertion */
275: PetscScalar *array;
276: /* work */
280: /* get info from mat */
281: MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);
282: if (nsubdomains == 1) {
283: if (reuse == MAT_INITIAL_MATRIX) {
284: MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));
285: } else {
286: MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);
287: }
288: return(0);
289: }
290: MatGetSize(mat,&rows,&cols);
291: MatGetBlockSize(mat,&bs);
292: MatGetLocalSize(mat,&lrows,&lcols);
293: MatGetSize(matis->A,&local_rows,&local_cols);
294: PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);
295: PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);
296: PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);
298: if (reuse == MAT_INITIAL_MATRIX) {
299: MatType new_mat_type;
300: PetscBool issbaij_red;
302: /* determining new matrix type */
303: MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));
304: if (issbaij_red) {
305: new_mat_type = MATSBAIJ;
306: } else {
307: if (bs>1) {
308: new_mat_type = MATBAIJ;
309: } else {
310: new_mat_type = MATAIJ;
311: }
312: }
314: MatCreate(PetscObjectComm((PetscObject)mat),M);
315: MatSetSizes(*M,lrows,lcols,rows,cols);
316: MatSetBlockSize(*M,bs);
317: MatSetType(*M,new_mat_type);
318: MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);
319: } else {
320: PetscInt mbs,mrows,mcols,mlrows,mlcols;
321: /* some checks */
322: MatGetBlockSize(*M,&mbs);
323: MatGetSize(*M,&mrows,&mcols);
324: MatGetLocalSize(*M,&mlrows,&mlcols);
325: if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
326: if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
327: if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
328: if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
329: if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
330: MatZeroEntries(*M);
331: }
333: if (issbaij) {
334: MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);
335: } else {
336: PetscObjectReference((PetscObject)matis->A);
337: local_mat = matis->A;
338: }
340: /* Set values */
341: MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);
342: if (isdense) { /* special case for dense local matrices */
343: PetscInt i,*dummy;
345: PetscMalloc1(PetscMax(local_rows,local_cols),&dummy);
346: for (i=0;i<PetscMax(local_rows,local_cols);i++) dummy[i] = i;
347: MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);
348: MatDenseGetArray(local_mat,&array);
349: MatSetValuesLocal(*M,local_rows,dummy,local_cols,dummy,array,ADD_VALUES);
350: MatDenseRestoreArray(local_mat,&array);
351: PetscFree(dummy);
352: } else if (isseqaij) {
353: PetscInt i,nvtxs,*xadj,*adjncy;
354: PetscBool done;
356: MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);
357: if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
358: MatSeqAIJGetArray(local_mat,&array);
359: for (i=0;i<nvtxs;i++) {
360: MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);
361: }
362: MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);
363: if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
364: MatSeqAIJRestoreArray(local_mat,&array);
365: } else { /* very basic values insertion for all other matrix types */
366: PetscInt i;
368: for (i=0;i<local_rows;i++) {
369: PetscInt j;
370: const PetscInt *local_indices_cols;
372: MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);
373: MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);
374: MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);
375: }
376: }
377: MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);
378: MatDestroy(&local_mat);
379: MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);
380: if (isdense) {
381: MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);
382: }
383: return(0);
384: }
388: /*@
389: MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
391: Input Parameter:
392: . mat - the matrix (should be of type MATIS)
393: . reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
395: Output Parameter:
396: . newmat - the matrix in AIJ format
398: Level: developer
400: Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.
402: .seealso: MATIS
403: @*/
404: PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
405: {
412: if (reuse != MAT_INITIAL_MATRIX) {
415: if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
416: }
417: PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));
418: return(0);
419: }
423: PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
424: {
426: Mat_IS *matis = (Mat_IS*)(mat->data);
427: PetscInt bs,m,n,M,N;
428: Mat B,localmat;
431: MatGetBlockSize(mat,&bs);
432: MatGetSize(mat,&M,&N);
433: MatGetLocalSize(mat,&m,&n);
434: MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);
435: MatDuplicate(matis->A,op,&localmat);
436: MatISSetLocalMat(B,localmat);
437: MatDestroy(&localmat);
438: MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
439: MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
440: *newmat = B;
441: return(0);
442: }
446: PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool *flg)
447: {
449: Mat_IS *matis = (Mat_IS*)A->data;
450: PetscBool local_sym;
453: MatIsHermitian(matis->A,tol,&local_sym);
454: MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));
455: return(0);
456: }
460: PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool *flg)
461: {
463: Mat_IS *matis = (Mat_IS*)A->data;
464: PetscBool local_sym;
467: MatIsSymmetric(matis->A,tol,&local_sym);
468: MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));
469: return(0);
470: }
474: PetscErrorCode MatDestroy_IS(Mat A)
475: {
477: Mat_IS *b = (Mat_IS*)A->data;
480: MatDestroy(&b->A);
481: VecScatterDestroy(&b->cctx);
482: VecScatterDestroy(&b->rctx);
483: VecDestroy(&b->x);
484: VecDestroy(&b->y);
485: PetscSFDestroy(&b->sf);
486: PetscFree2(b->sf_rootdata,b->sf_leafdata);
487: PetscFree(A->data);
488: PetscObjectChangeTypeName((PetscObject)A,0);
489: PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);
490: PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);
491: PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);
492: PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);
493: return(0);
494: }
498: PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
499: {
501: Mat_IS *is = (Mat_IS*)A->data;
502: PetscScalar zero = 0.0;
505: /* scatter the global vector x into the local work vector */
506: VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);
507: VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);
509: /* multiply the local matrix */
510: MatMult(is->A,is->x,is->y);
512: /* scatter product back into global memory */
513: VecSet(y,zero);
514: VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);
515: VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);
516: return(0);
517: }
521: PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
522: {
523: Vec temp_vec;
527: if (v3 != v2) {
528: MatMult(A,v1,v3);
529: VecAXPY(v3,1.0,v2);
530: } else {
531: VecDuplicate(v2,&temp_vec);
532: MatMult(A,v1,temp_vec);
533: VecAXPY(temp_vec,1.0,v2);
534: VecCopy(temp_vec,v3);
535: VecDestroy(&temp_vec);
536: }
537: return(0);
538: }
542: PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x)
543: {
544: Mat_IS *is = (Mat_IS*)A->data;
548: /* scatter the global vector x into the local work vector */
549: VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);
550: VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);
552: /* multiply the local matrix */
553: MatMultTranspose(is->A,is->y,is->x);
555: /* scatter product back into global vector */
556: VecSet(x,0);
557: VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);
558: VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);
559: return(0);
560: }
564: PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
565: {
566: Vec temp_vec;
570: if (v3 != v2) {
571: MatMultTranspose(A,v1,v3);
572: VecAXPY(v3,1.0,v2);
573: } else {
574: VecDuplicate(v2,&temp_vec);
575: MatMultTranspose(A,v1,temp_vec);
576: VecAXPY(temp_vec,1.0,v2);
577: VecCopy(temp_vec,v3);
578: VecDestroy(&temp_vec);
579: }
580: return(0);
581: }
585: PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
586: {
587: Mat_IS *a = (Mat_IS*)A->data;
589: PetscViewer sviewer;
592: PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
593: MatView(a->A,sviewer);
594: PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
595: return(0);
596: }
600: PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
601: {
603: PetscInt nr,rbs,nc,cbs;
604: Mat_IS *is = (Mat_IS*)A->data;
605: IS from,to;
606: Vec cglobal,rglobal;
611: /* Destroy any previous data */
612: VecDestroy(&is->x);
613: VecDestroy(&is->y);
614: VecScatterDestroy(&is->rctx);
615: VecScatterDestroy(&is->cctx);
616: MatDestroy(&is->A);
617: PetscSFDestroy(&is->sf);
618: PetscFree2(is->sf_rootdata,is->sf_leafdata);
620: /* Setup Layout and set local to global maps */
621: PetscLayoutSetUp(A->rmap);
622: PetscLayoutSetUp(A->cmap);
623: PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);
624: PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);
626: /* Create the local matrix A */
627: ISLocalToGlobalMappingGetSize(rmapping,&nr);
628: ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);
629: ISLocalToGlobalMappingGetSize(cmapping,&nc);
630: ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);
631: MatCreate(PETSC_COMM_SELF,&is->A);
632: MatSetType(is->A,MATAIJ);
633: MatSetSizes(is->A,nr,nc,nr,nc);
634: MatSetBlockSizes(is->A,rbs,cbs);
635: MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);
636: MatAppendOptionsPrefix(is->A,"is_");
637: MatSetFromOptions(is->A);
638: PetscLayoutSetUp(is->A->rmap);
639: PetscLayoutSetUp(is->A->cmap);
641: /* Create the local work vectors */
642: MatCreateVecs(is->A,&is->x,&is->y);
644: /* setup the global to local scatters */
645: MatCreateVecs(A,&cglobal,&rglobal);
646: ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);
647: ISLocalToGlobalMappingApplyIS(rmapping,to,&from);
648: VecScatterCreate(rglobal,from,is->y,to,&is->rctx);
649: if (rmapping != cmapping) {
650: ISDestroy(&to);
651: ISDestroy(&from);
652: ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);
653: ISLocalToGlobalMappingApplyIS(cmapping,to,&from);
654: VecScatterCreate(cglobal,from,is->x,to,&is->cctx);
655: } else {
656: PetscObjectReference((PetscObject)is->rctx);
657: is->cctx = is->rctx;
658: }
659: VecDestroy(&rglobal);
660: VecDestroy(&cglobal);
661: ISDestroy(&to);
662: ISDestroy(&from);
663: return(0);
664: }
668: PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
669: {
670: Mat_IS *is = (Mat_IS*)mat->data;
671: PetscInt rows_l[2048],cols_l[2048];
675: #if defined(PETSC_USE_DEBUG)
676: if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
677: #endif
678: ISG2LMapApply(mat->rmap->mapping,m,rows,rows_l);
679: ISG2LMapApply(mat->cmap->mapping,n,cols,cols_l);
680: MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);
681: return(0);
682: }
686: PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
687: {
689: Mat_IS *is = (Mat_IS*)A->data;
692: MatSetValues(is->A,m,rows,n,cols,values,addv);
693: return(0);
694: }
698: PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
699: {
701: Mat_IS *is = (Mat_IS*)A->data;
704: MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);
705: return(0);
706: }
710: PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
711: {
712: PetscInt n_l = 0, *rows_l = NULL;
716: if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
717: if (n) {
718: PetscMalloc1(n,&rows_l);
719: ISGlobalToLocalMappingApply(A->rmap->mapping,IS_GTOLM_DROP,n,rows,&n_l,rows_l);
720: }
721: MatZeroRowsLocal(A,n_l,rows_l,diag,x,b);
722: PetscFree(rows_l);
723: return(0);
724: }
728: PetscErrorCode MatZeroRowsLocal_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
729: {
730: Mat_IS *is = (Mat_IS*)A->data;
732: PetscInt i;
733: PetscScalar *array;
736: if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
737: {
738: /*
739: Set up is->x as a "counting vector". This is in order to MatMult_IS
740: work properly in the interface nodes.
741: */
742: Vec counter;
743: MatCreateVecs(A,NULL,&counter);
744: VecSet(counter,0.);
745: VecSet(is->y,1.);
746: VecScatterBegin(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);
747: VecScatterEnd(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);
748: VecScatterBegin(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);
749: VecScatterEnd(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);
750: VecDestroy(&counter);
751: }
752: if (!n) {
753: is->pure_neumann = PETSC_TRUE;
754: } else {
755: is->pure_neumann = PETSC_FALSE;
757: VecGetArray(is->y,&array);
758: MatZeroRows(is->A,n,rows,diag,0,0);
759: for (i=0; i<n; i++) {
760: MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);
761: }
762: MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);
763: MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);
764: VecRestoreArray(is->y,&array);
765: }
766: return(0);
767: }
771: PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
772: {
773: Mat_IS *is = (Mat_IS*)A->data;
777: MatAssemblyBegin(is->A,type);
778: return(0);
779: }
783: PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
784: {
785: Mat_IS *is = (Mat_IS*)A->data;
789: MatAssemblyEnd(is->A,type);
790: return(0);
791: }
795: PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
796: {
797: Mat_IS *is = (Mat_IS*)mat->data;
800: *local = is->A;
801: return(0);
802: }
806: /*@
807: MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
809: Input Parameter:
810: . mat - the matrix
812: Output Parameter:
813: . local - the local matrix
815: Level: advanced
817: Notes:
818: This can be called if you have precomputed the nonzero structure of the
819: matrix and want to provide it to the inner matrix object to improve the performance
820: of the MatSetValues() operation.
822: This function does not increase the reference count for the local Mat. Do not destroy it and do not attempt to use
823: your reference after destroying the parent mat.
825: .seealso: MATIS
826: @*/
827: PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
828: {
834: PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));
835: return(0);
836: }
840: PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
841: {
842: Mat_IS *is = (Mat_IS*)mat->data;
843: PetscInt nrows,ncols,orows,ocols;
847: if (is->A) {
848: MatGetSize(is->A,&orows,&ocols);
849: MatGetSize(local,&nrows,&ncols);
850: if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
851: }
852: PetscObjectReference((PetscObject)local);
853: MatDestroy(&is->A);
854: is->A = local;
855: return(0);
856: }
860: /*@
861: MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.
863: Input Parameter:
864: . mat - the matrix
865: . local - the local matrix
867: Output Parameter:
869: Level: advanced
871: Notes:
872: This can be called if you have precomputed the local matrix and
873: want to provide it to the matrix object MATIS.
875: .seealso: MATIS
876: @*/
877: PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
878: {
884: PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));
885: return(0);
886: }
890: PetscErrorCode MatZeroEntries_IS(Mat A)
891: {
892: Mat_IS *a = (Mat_IS*)A->data;
896: MatZeroEntries(a->A);
897: return(0);
898: }
902: PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
903: {
904: Mat_IS *is = (Mat_IS*)A->data;
908: MatScale(is->A,a);
909: return(0);
910: }
914: PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
915: {
916: Mat_IS *is = (Mat_IS*)A->data;
920: /* get diagonal of the local matrix */
921: MatGetDiagonal(is->A,is->y);
923: /* scatter diagonal back into global vector */
924: VecSet(v,0);
925: VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);
926: VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);
927: return(0);
928: }
932: PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
933: {
934: Mat_IS *a = (Mat_IS*)A->data;
938: MatSetOption(a->A,op,flg);
939: return(0);
940: }
944: /*@
945: MatCreateIS - Creates a "process" unassmembled matrix, it is assembled on each
946: process but not across processes.
948: Input Parameters:
949: + comm - MPI communicator that will share the matrix
950: . bs - block size of the matrix
951: . m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
952: . rmap - local to global map for rows
953: - cmap - local to global map for cols
955: Output Parameter:
956: . A - the resulting matrix
958: Level: advanced
960: Notes: See MATIS for more details
961: m and n are NOT related to the size of the map, they are the size of the part of the vector owned
962: by that process. The sizes of rmap and cmap define the size of the local matrices.
963: If either rmap or cmap are NULL, than the matrix is assumed to be square
965: .seealso: MATIS, MatSetLocalToGlobalMapping()
966: @*/
967: PetscErrorCode MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A)
968: {
972: if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mapping");
973: MatCreate(comm,A);
974: MatSetSizes(*A,m,n,M,N);
975: MatSetBlockSize(*A,bs);
976: MatSetType(*A,MATIS);
977: MatSetUp(*A);
978: if (rmap && cmap) {
979: MatSetLocalToGlobalMapping(*A,rmap,cmap);
980: } else if (!rmap) {
981: MatSetLocalToGlobalMapping(*A,cmap,cmap);
982: } else {
983: MatSetLocalToGlobalMapping(*A,rmap,rmap);
984: }
985: return(0);
986: }
988: /*MC
989: MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition type preconditioners (e.g. PCBDDC).
990: This stores the matrices in globally unassembled form. Each processor
991: assembles only its local Neumann problem and the parallel matrix vector
992: product is handled "implicitly".
994: Operations Provided:
995: + MatMult()
996: . MatMultAdd()
997: . MatMultTranspose()
998: . MatMultTransposeAdd()
999: . MatZeroEntries()
1000: . MatSetOption()
1001: . MatZeroRows()
1002: . MatZeroRowsLocal()
1003: . MatSetValues()
1004: . MatSetValuesLocal()
1005: . MatScale()
1006: . MatGetDiagonal()
1007: - MatSetLocalToGlobalMapping()
1009: Options Database Keys:
1010: . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1012: Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1014: You must call MatSetLocalToGlobalMapping() before using this matrix type.
1016: You can do matrix preallocation on the local matrix after you obtain it with
1017: MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()
1019: Level: advanced
1021: .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), PCBDDC
1023: M*/
1027: PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1028: {
1030: Mat_IS *b;
1033: PetscNewLog(A,&b);
1034: A->data = (void*)b;
1036: /* matrix ops */
1037: PetscMemzero(A->ops,sizeof(struct _MatOps));
1038: A->ops->mult = MatMult_IS;
1039: A->ops->multadd = MatMultAdd_IS;
1040: A->ops->multtranspose = MatMultTranspose_IS;
1041: A->ops->multtransposeadd = MatMultTransposeAdd_IS;
1042: A->ops->destroy = MatDestroy_IS;
1043: A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1044: A->ops->setvalues = MatSetValues_IS;
1045: A->ops->setvalueslocal = MatSetValuesLocal_IS;
1046: A->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_IS;
1047: A->ops->zerorows = MatZeroRows_IS;
1048: A->ops->zerorowslocal = MatZeroRowsLocal_IS;
1049: A->ops->assemblybegin = MatAssemblyBegin_IS;
1050: A->ops->assemblyend = MatAssemblyEnd_IS;
1051: A->ops->view = MatView_IS;
1052: A->ops->zeroentries = MatZeroEntries_IS;
1053: A->ops->scale = MatScale_IS;
1054: A->ops->getdiagonal = MatGetDiagonal_IS;
1055: A->ops->setoption = MatSetOption_IS;
1056: A->ops->ishermitian = MatIsHermitian_IS;
1057: A->ops->issymmetric = MatIsSymmetric_IS;
1058: A->ops->duplicate = MatDuplicate_IS;
1060: /* special MATIS functions */
1061: PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);
1062: PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);
1063: PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);
1064: PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);
1065: PetscObjectChangeTypeName((PetscObject)A,MATIS);
1066: return(0);
1067: }