Actual source code: matis.c
petsc-3.9.4 2018-09-11
1: /*
2: Creates a matrix class for using the Neumann-Neumann type preconditioners.
3: This stores the matrices in globally unassembled form. Each processor
4: assembles only its local Neumann problem and the parallel matrix vector
5: product is handled "implicitly".
7: Currently this allows for only one subdomain per processor.
8: */
10: #include <../src/mat/impls/is/matis.h>
11: #include <../src/mat/impls/aij/mpi/mpiaij.h>
12: #include <petsc/private/sfimpl.h>
14: #define MATIS_MAX_ENTRIES_INSERTION 2048
15: static PetscErrorCode MatSetValuesLocal_IS(Mat,PetscInt,const PetscInt*,PetscInt,const PetscInt*,const PetscScalar*,InsertMode);
16: static PetscErrorCode MatSetValuesBlockedLocal_IS(Mat,PetscInt,const PetscInt*,PetscInt,const PetscInt*,const PetscScalar*,InsertMode);
18: static PetscErrorCode MatISContainerDestroyFields_Private(void *ptr)
19: {
20: MatISLocalFields lf = (MatISLocalFields)ptr;
21: PetscInt i;
22: PetscErrorCode ierr;
25: for (i=0;i<lf->nr;i++) {
26: ISDestroy(&lf->rf[i]);
27: }
28: for (i=0;i<lf->nc;i++) {
29: ISDestroy(&lf->cf[i]);
30: }
31: PetscFree2(lf->rf,lf->cf);
32: PetscFree(lf);
33: return(0);
34: }
36: static PetscErrorCode MatISContainerDestroyArray_Private(void *ptr)
37: {
41: PetscFree(ptr);
42: return(0);
43: }
45: PETSC_INTERN PetscErrorCode MatConvert_MPIAIJ_IS(Mat A,MatType type,MatReuse reuse,Mat *newmat)
46: {
47: Mat_MPIAIJ *aij = (Mat_MPIAIJ*)A->data;
48: Mat_SeqAIJ *diag = (Mat_SeqAIJ*)(aij->A->data);
49: Mat_SeqAIJ *offd = (Mat_SeqAIJ*)(aij->B->data);
50: Mat lA;
51: ISLocalToGlobalMapping rl2g,cl2g;
52: IS is;
53: MPI_Comm comm;
54: void *ptrs[2];
55: const char *names[2] = {"_convert_csr_aux","_convert_csr_data"};
56: PetscScalar *dd,*od,*aa,*data;
57: PetscInt *di,*dj,*oi,*oj;
58: PetscInt *aux,*ii,*jj;
59: PetscInt lc,dr,dc,oc,str,stc,nnz,i,jd,jo,cum;
60: PetscErrorCode ierr;
63: PetscObjectGetComm((PetscObject)A,&comm);
64: if (!aij->garray) SETERRQ(comm,PETSC_ERR_SUP,"garray not present");
66: /* access relevant information from MPIAIJ */
67: MatGetOwnershipRange(A,&str,NULL);
68: MatGetOwnershipRangeColumn(A,&stc,NULL);
69: MatGetLocalSize(A,&dr,&dc);
70: di = diag->i;
71: dj = diag->j;
72: dd = diag->a;
73: oc = aij->B->cmap->n;
74: oi = offd->i;
75: oj = offd->j;
76: od = offd->a;
77: nnz = diag->i[dr] + offd->i[dr];
79: /* generate l2g maps for rows and cols */
80: ISCreateStride(comm,dr,str,1,&is);
81: ISLocalToGlobalMappingCreateIS(is,&rl2g);
82: ISDestroy(&is);
83: if (dr) {
84: PetscMalloc1(dc+oc,&aux);
85: for (i=0; i<dc; i++) aux[i] = i+stc;
86: for (i=0; i<oc; i++) aux[i+dc] = aij->garray[i];
87: ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);
88: lc = dc+oc;
89: } else {
90: ISCreateGeneral(comm,0,NULL,PETSC_OWN_POINTER,&is);
91: lc = 0;
92: }
93: ISLocalToGlobalMappingCreateIS(is,&cl2g);
94: ISDestroy(&is);
96: /* create MATIS object */
97: MatCreate(comm,newmat);
98: MatSetSizes(*newmat,dr,dc,PETSC_DECIDE,PETSC_DECIDE);
99: MatSetType(*newmat,MATIS);
100: MatSetLocalToGlobalMapping(*newmat,rl2g,cl2g);
101: ISLocalToGlobalMappingDestroy(&rl2g);
102: ISLocalToGlobalMappingDestroy(&cl2g);
104: /* merge local matrices */
105: PetscMalloc1(nnz+dr+1,&aux);
106: PetscMalloc1(nnz,&data);
107: ii = aux;
108: jj = aux+dr+1;
109: aa = data;
110: *ii = *(di++) + *(oi++);
111: for (jd=0,jo=0,cum=0;*ii<nnz;cum++)
112: {
113: for (;jd<*di;jd++) { *jj++ = *dj++; *aa++ = *dd++; }
114: for (;jo<*oi;jo++) { *jj++ = *oj++ + dc; *aa++ = *od++; }
115: *(++ii) = *(di++) + *(oi++);
116: }
117: for (;cum<dr;cum++) *(++ii) = nnz;
118: ii = aux;
119: jj = aux+dr+1;
120: aa = data;
121: MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,lc,ii,jj,aa,&lA);
123: /* create containers to destroy the data */
124: ptrs[0] = aux;
125: ptrs[1] = data;
126: for (i=0; i<2; i++) {
127: PetscContainer c;
129: PetscContainerCreate(PETSC_COMM_SELF,&c);
130: PetscContainerSetPointer(c,ptrs[i]);
131: PetscContainerSetUserDestroy(c,MatISContainerDestroyArray_Private);
132: PetscObjectCompose((PetscObject)lA,names[i],(PetscObject)c);
133: PetscContainerDestroy(&c);
134: }
136: /* finalize matrix */
137: MatISSetLocalMat(*newmat,lA);
138: MatDestroy(&lA);
139: MatAssemblyBegin(*newmat,MAT_FINAL_ASSEMBLY);
140: MatAssemblyEnd(*newmat,MAT_FINAL_ASSEMBLY);
141: return(0);
142: }
144: /*@
145: MatISSetUpSF - Setup star forest objects used by MatIS.
147: Collective on MPI_Comm
149: Input Parameters:
150: + A - the matrix
152: Level: advanced
154: Notes: This function does not need to be called by the user.
156: .keywords: matrix
158: .seealso: MatCreate(), MatCreateIS(), MatISSetPreallocation(), MatISGetLocalMat()
159: @*/
160: PetscErrorCode MatISSetUpSF(Mat A)
161: {
167: PetscTryMethod(A,"MatISSetUpSF_C",(Mat),(A));
168: return(0);
169: }
171: PETSC_INTERN PetscErrorCode MatConvert_Nest_IS(Mat A,MatType type,MatReuse reuse,Mat *newmat)
172: {
173: Mat **nest,*snest,**rnest,lA,B;
174: IS *iscol,*isrow,*islrow,*islcol;
175: ISLocalToGlobalMapping rl2g,cl2g;
176: MPI_Comm comm;
177: PetscInt *lr,*lc,*l2gidxs;
178: PetscInt i,j,nr,nc,rbs,cbs;
179: PetscBool convert,lreuse,*istrans;
180: PetscErrorCode ierr;
183: MatNestGetSubMats(A,&nr,&nc,&nest);
184: lreuse = PETSC_FALSE;
185: rnest = NULL;
186: if (reuse == MAT_REUSE_MATRIX) {
187: PetscBool ismatis,isnest;
189: PetscObjectTypeCompare((PetscObject)*newmat,MATIS,&ismatis);
190: if (!ismatis) SETERRQ1(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_USER,"Cannot reuse matrix of type %s",((PetscObject)(*newmat))->type);
191: MatISGetLocalMat(*newmat,&lA);
192: PetscObjectTypeCompare((PetscObject)lA,MATNEST,&isnest);
193: if (isnest) {
194: MatNestGetSubMats(lA,&i,&j,&rnest);
195: lreuse = (PetscBool)(i == nr && j == nc);
196: if (!lreuse) rnest = NULL;
197: }
198: }
199: PetscObjectGetComm((PetscObject)A,&comm);
200: PetscCalloc2(nr,&lr,nc,&lc);
201: PetscCalloc6(nr,&isrow,nc,&iscol,
202: nr,&islrow,nc,&islcol,
203: nr*nc,&snest,nr*nc,&istrans);
204: MatNestGetISs(A,isrow,iscol);
205: for (i=0;i<nr;i++) {
206: for (j=0;j<nc;j++) {
207: PetscBool ismatis;
208: PetscInt l1,l2,lb1,lb2,ij=i*nc+j;
210: /* Null matrix pointers are allowed in MATNEST */
211: if (!nest[i][j]) continue;
213: /* Nested matrices should be of type MATIS */
214: PetscObjectTypeCompare((PetscObject)nest[i][j],MATTRANSPOSEMAT,&istrans[ij]);
215: if (istrans[ij]) {
216: Mat T,lT;
217: MatTransposeGetMat(nest[i][j],&T);
218: PetscObjectTypeCompare((PetscObject)T,MATIS,&ismatis);
219: if (!ismatis) SETERRQ2(comm,PETSC_ERR_SUP,"Cannot convert from MATNEST to MATIS! Matrix block (%D,%D) (transposed) is not of type MATIS",i,j);
220: MatISGetLocalMat(T,&lT);
221: MatCreateTranspose(lT,&snest[ij]);
222: } else {
223: PetscObjectTypeCompare((PetscObject)nest[i][j],MATIS,&ismatis);
224: if (!ismatis) SETERRQ2(comm,PETSC_ERR_SUP,"Cannot convert from MATNEST to MATIS! Matrix block (%D,%D) is not of type MATIS",i,j);
225: MatISGetLocalMat(nest[i][j],&snest[ij]);
226: }
228: /* Check compatibility of local sizes */
229: MatGetSize(snest[ij],&l1,&l2);
230: MatGetBlockSizes(snest[ij],&lb1,&lb2);
231: if (!l1 || !l2) continue;
232: if (lr[i] && l1 != lr[i]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot convert from MATNEST to MATIS! Matrix block (%D,%D) has invalid local size %D != %D",i,j,lr[i],l1);
233: if (lc[j] && l2 != lc[j]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot convert from MATNEST to MATIS! Matrix block (%D,%D) has invalid local size %D != %D",i,j,lc[j],l2);
234: lr[i] = l1;
235: lc[j] = l2;
237: /* check compatibilty for local matrix reusage */
238: if (rnest && !rnest[i][j] != !snest[ij]) lreuse = PETSC_FALSE;
239: }
240: }
242: #if defined (PETSC_USE_DEBUG)
243: /* Check compatibility of l2g maps for rows */
244: for (i=0;i<nr;i++) {
245: rl2g = NULL;
246: for (j=0;j<nc;j++) {
247: PetscInt n1,n2;
249: if (!nest[i][j]) continue;
250: if (istrans[i*nc+j]) {
251: Mat T;
253: MatTransposeGetMat(nest[i][j],&T);
254: MatGetLocalToGlobalMapping(T,NULL,&cl2g);
255: } else {
256: MatGetLocalToGlobalMapping(nest[i][j],&cl2g,NULL);
257: }
258: ISLocalToGlobalMappingGetSize(cl2g,&n1);
259: if (!n1) continue;
260: if (!rl2g) {
261: rl2g = cl2g;
262: } else {
263: const PetscInt *idxs1,*idxs2;
264: PetscBool same;
266: ISLocalToGlobalMappingGetSize(rl2g,&n2);
267: if (n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot convert from MATNEST to MATIS! Matrix block (%D,%D) has invalid row l2gmap size %D != %D",i,j,n1,n2);
268: ISLocalToGlobalMappingGetIndices(cl2g,&idxs1);
269: ISLocalToGlobalMappingGetIndices(rl2g,&idxs2);
270: PetscMemcmp(idxs1,idxs2,n1*sizeof(PetscInt),&same);
271: ISLocalToGlobalMappingRestoreIndices(cl2g,&idxs1);
272: ISLocalToGlobalMappingRestoreIndices(rl2g,&idxs2);
273: if (!same) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot convert from MATNEST to MATIS! Matrix block (%D,%D) has invalid row l2gmap",i,j);
274: }
275: }
276: }
277: /* Check compatibility of l2g maps for columns */
278: for (i=0;i<nc;i++) {
279: rl2g = NULL;
280: for (j=0;j<nr;j++) {
281: PetscInt n1,n2;
283: if (!nest[j][i]) continue;
284: if (istrans[j*nc+i]) {
285: Mat T;
287: MatTransposeGetMat(nest[j][i],&T);
288: MatGetLocalToGlobalMapping(T,&cl2g,NULL);
289: } else {
290: MatGetLocalToGlobalMapping(nest[j][i],NULL,&cl2g);
291: }
292: ISLocalToGlobalMappingGetSize(cl2g,&n1);
293: if (!n1) continue;
294: if (!rl2g) {
295: rl2g = cl2g;
296: } else {
297: const PetscInt *idxs1,*idxs2;
298: PetscBool same;
300: ISLocalToGlobalMappingGetSize(rl2g,&n2);
301: if (n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot convert from MATNEST to MATIS! Matrix block (%D,%D) has invalid column l2gmap size %D != %D",j,i,n1,n2);
302: ISLocalToGlobalMappingGetIndices(cl2g,&idxs1);
303: ISLocalToGlobalMappingGetIndices(rl2g,&idxs2);
304: PetscMemcmp(idxs1,idxs2,n1*sizeof(PetscInt),&same);
305: ISLocalToGlobalMappingRestoreIndices(cl2g,&idxs1);
306: ISLocalToGlobalMappingRestoreIndices(rl2g,&idxs2);
307: if (!same) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot convert from MATNEST to MATIS! Matrix block (%D,%D) has invalid column l2gmap",j,i);
308: }
309: }
310: }
311: #endif
313: B = NULL;
314: if (reuse != MAT_REUSE_MATRIX) {
315: PetscInt stl;
317: /* Create l2g map for the rows of the new matrix and index sets for the local MATNEST */
318: for (i=0,stl=0;i<nr;i++) stl += lr[i];
319: PetscMalloc1(stl,&l2gidxs);
320: for (i=0,stl=0;i<nr;i++) {
321: Mat usedmat;
322: Mat_IS *matis;
323: const PetscInt *idxs;
325: /* local IS for local NEST */
326: ISCreateStride(PETSC_COMM_SELF,lr[i],stl,1,&islrow[i]);
328: /* l2gmap */
329: j = 0;
330: usedmat = nest[i][j];
331: while (!usedmat && j < nc-1) usedmat = nest[i][++j];
332: if (!usedmat) SETERRQ(comm,PETSC_ERR_SUP,"Cannot find valid row mat");
334: if (istrans[i*nc+j]) {
335: Mat T;
336: MatTransposeGetMat(usedmat,&T);
337: usedmat = T;
338: }
339: MatISSetUpSF(usedmat);
340: matis = (Mat_IS*)(usedmat->data);
341: ISGetIndices(isrow[i],&idxs);
342: if (istrans[i*nc+j]) {
343: PetscSFBcastBegin(matis->csf,MPIU_INT,idxs,l2gidxs+stl);
344: PetscSFBcastEnd(matis->csf,MPIU_INT,idxs,l2gidxs+stl);
345: } else {
346: PetscSFBcastBegin(matis->sf,MPIU_INT,idxs,l2gidxs+stl);
347: PetscSFBcastEnd(matis->sf,MPIU_INT,idxs,l2gidxs+stl);
348: }
349: ISRestoreIndices(isrow[i],&idxs);
350: stl += lr[i];
351: }
352: ISLocalToGlobalMappingCreate(comm,1,stl,l2gidxs,PETSC_OWN_POINTER,&rl2g);
354: /* Create l2g map for columns of the new matrix and index sets for the local MATNEST */
355: for (i=0,stl=0;i<nc;i++) stl += lc[i];
356: PetscMalloc1(stl,&l2gidxs);
357: for (i=0,stl=0;i<nc;i++) {
358: Mat usedmat;
359: Mat_IS *matis;
360: const PetscInt *idxs;
362: /* local IS for local NEST */
363: ISCreateStride(PETSC_COMM_SELF,lc[i],stl,1,&islcol[i]);
365: /* l2gmap */
366: j = 0;
367: usedmat = nest[j][i];
368: while (!usedmat && j < nr-1) usedmat = nest[++j][i];
369: if (!usedmat) SETERRQ(comm,PETSC_ERR_SUP,"Cannot find valid column mat");
370: if (istrans[j*nc+i]) {
371: Mat T;
372: MatTransposeGetMat(usedmat,&T);
373: usedmat = T;
374: }
375: MatISSetUpSF(usedmat);
376: matis = (Mat_IS*)(usedmat->data);
377: ISGetIndices(iscol[i],&idxs);
378: if (istrans[j*nc+i]) {
379: PetscSFBcastBegin(matis->sf,MPIU_INT,idxs,l2gidxs+stl);
380: PetscSFBcastEnd(matis->sf,MPIU_INT,idxs,l2gidxs+stl);
381: } else {
382: PetscSFBcastBegin(matis->csf,MPIU_INT,idxs,l2gidxs+stl);
383: PetscSFBcastEnd(matis->csf,MPIU_INT,idxs,l2gidxs+stl);
384: }
385: ISRestoreIndices(iscol[i],&idxs);
386: stl += lc[i];
387: }
388: ISLocalToGlobalMappingCreate(comm,1,stl,l2gidxs,PETSC_OWN_POINTER,&cl2g);
390: /* Create MATIS */
391: MatCreate(comm,&B);
392: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
393: MatGetBlockSizes(A,&rbs,&cbs);
394: MatSetBlockSizes(B,rbs,cbs);
395: MatSetType(B,MATIS);
396: MatSetLocalToGlobalMapping(B,rl2g,cl2g);
397: ISLocalToGlobalMappingDestroy(&rl2g);
398: ISLocalToGlobalMappingDestroy(&cl2g);
399: MatCreateNest(PETSC_COMM_SELF,nr,islrow,nc,islcol,snest,&lA);
400: for (i=0;i<nr*nc;i++) {
401: if (istrans[i]) {
402: MatDestroy(&snest[i]);
403: }
404: }
405: MatISSetLocalMat(B,lA);
406: MatDestroy(&lA);
407: MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
408: MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
409: if (reuse == MAT_INPLACE_MATRIX) {
410: MatHeaderReplace(A,&B);
411: } else {
412: *newmat = B;
413: }
414: } else {
415: if (lreuse) {
416: MatISGetLocalMat(*newmat,&lA);
417: for (i=0;i<nr;i++) {
418: for (j=0;j<nc;j++) {
419: if (snest[i*nc+j]) {
420: MatNestSetSubMat(lA,i,j,snest[i*nc+j]);
421: if (istrans[i*nc+j]) {
422: MatDestroy(&snest[i*nc+j]);
423: }
424: }
425: }
426: }
427: } else {
428: PetscInt stl;
429: for (i=0,stl=0;i<nr;i++) {
430: ISCreateStride(PETSC_COMM_SELF,lr[i],stl,1,&islrow[i]);
431: stl += lr[i];
432: }
433: for (i=0,stl=0;i<nc;i++) {
434: ISCreateStride(PETSC_COMM_SELF,lc[i],stl,1,&islcol[i]);
435: stl += lc[i];
436: }
437: MatCreateNest(PETSC_COMM_SELF,nr,islrow,nc,islcol,snest,&lA);
438: for (i=0;i<nr*nc;i++) {
439: if (istrans[i]) {
440: MatDestroy(&snest[i]);
441: }
442: }
443: MatISSetLocalMat(*newmat,lA);
444: MatDestroy(&lA);
445: }
446: MatAssemblyBegin(*newmat,MAT_FINAL_ASSEMBLY);
447: MatAssemblyEnd(*newmat,MAT_FINAL_ASSEMBLY);
448: }
450: /* Create local matrix in MATNEST format */
451: convert = PETSC_FALSE;
452: PetscOptionsGetBool(NULL,((PetscObject)A)->prefix,"-matis_convert_local_nest",&convert,NULL);
453: if (convert) {
454: Mat M;
455: MatISLocalFields lf;
456: PetscContainer c;
458: MatISGetLocalMat(*newmat,&lA);
459: MatConvert(lA,MATAIJ,MAT_INITIAL_MATRIX,&M);
460: MatISSetLocalMat(*newmat,M);
461: MatDestroy(&M);
463: /* attach local fields to the matrix */
464: PetscNew(&lf);
465: PetscCalloc2(nr,&lf->rf,nc,&lf->cf);
466: for (i=0;i<nr;i++) {
467: PetscInt n,st;
469: ISGetLocalSize(islrow[i],&n);
470: ISStrideGetInfo(islrow[i],&st,NULL);
471: ISCreateStride(comm,n,st,1,&lf->rf[i]);
472: }
473: for (i=0;i<nc;i++) {
474: PetscInt n,st;
476: ISGetLocalSize(islcol[i],&n);
477: ISStrideGetInfo(islcol[i],&st,NULL);
478: ISCreateStride(comm,n,st,1,&lf->cf[i]);
479: }
480: lf->nr = nr;
481: lf->nc = nc;
482: PetscContainerCreate(PetscObjectComm((PetscObject)(*newmat)),&c);
483: PetscContainerSetPointer(c,lf);
484: PetscContainerSetUserDestroy(c,MatISContainerDestroyFields_Private);
485: PetscObjectCompose((PetscObject)(*newmat),"_convert_nest_lfields",(PetscObject)c);
486: PetscContainerDestroy(&c);
487: }
489: /* Free workspace */
490: for (i=0;i<nr;i++) {
491: ISDestroy(&islrow[i]);
492: }
493: for (i=0;i<nc;i++) {
494: ISDestroy(&islcol[i]);
495: }
496: PetscFree6(isrow,iscol,islrow,islcol,snest,istrans);
497: PetscFree2(lr,lc);
498: return(0);
499: }
501: static PetscErrorCode MatDiagonalScale_IS(Mat A, Vec l, Vec r)
502: {
503: Mat_IS *matis = (Mat_IS*)A->data;
504: Vec ll,rr;
505: const PetscScalar *Y,*X;
506: PetscScalar *x,*y;
507: PetscErrorCode ierr;
510: MatISSetUpSF(A);
511: if (l) {
512: ll = matis->y;
513: VecGetArrayRead(l,&Y);
514: VecGetArray(ll,&y);
515: PetscSFBcastBegin(matis->sf,MPIU_SCALAR,Y,y);
516: } else {
517: ll = NULL;
518: }
519: if (r) {
520: rr = matis->x;
521: VecGetArrayRead(r,&X);
522: VecGetArray(rr,&x);
523: PetscSFBcastBegin(matis->csf,MPIU_SCALAR,X,x);
524: } else {
525: rr = NULL;
526: }
527: if (ll) {
528: PetscSFBcastEnd(matis->sf,MPIU_SCALAR,Y,y);
529: VecRestoreArrayRead(l,&Y);
530: VecRestoreArray(ll,&y);
531: }
532: if (rr) {
533: PetscSFBcastEnd(matis->csf,MPIU_SCALAR,X,x);
534: VecRestoreArrayRead(r,&X);
535: VecRestoreArray(rr,&x);
536: }
537: MatDiagonalScale(matis->A,ll,rr);
538: return(0);
539: }
541: static PetscErrorCode MatGetInfo_IS(Mat A,MatInfoType flag,MatInfo *ginfo)
542: {
543: Mat_IS *matis = (Mat_IS*)A->data;
544: MatInfo info;
545: PetscReal isend[6],irecv[6];
546: PetscInt bs;
550: MatGetBlockSize(A,&bs);
551: if (matis->A->ops->getinfo) {
552: MatGetInfo(matis->A,MAT_LOCAL,&info);
553: isend[0] = info.nz_used;
554: isend[1] = info.nz_allocated;
555: isend[2] = info.nz_unneeded;
556: isend[3] = info.memory;
557: isend[4] = info.mallocs;
558: } else {
559: isend[0] = 0.;
560: isend[1] = 0.;
561: isend[2] = 0.;
562: isend[3] = 0.;
563: isend[4] = 0.;
564: }
565: isend[5] = matis->A->num_ass;
566: if (flag == MAT_LOCAL) {
567: ginfo->nz_used = isend[0];
568: ginfo->nz_allocated = isend[1];
569: ginfo->nz_unneeded = isend[2];
570: ginfo->memory = isend[3];
571: ginfo->mallocs = isend[4];
572: ginfo->assemblies = isend[5];
573: } else if (flag == MAT_GLOBAL_MAX) {
574: MPIU_Allreduce(isend,irecv,6,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)A));
576: ginfo->nz_used = irecv[0];
577: ginfo->nz_allocated = irecv[1];
578: ginfo->nz_unneeded = irecv[2];
579: ginfo->memory = irecv[3];
580: ginfo->mallocs = irecv[4];
581: ginfo->assemblies = irecv[5];
582: } else if (flag == MAT_GLOBAL_SUM) {
583: MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));
585: ginfo->nz_used = irecv[0];
586: ginfo->nz_allocated = irecv[1];
587: ginfo->nz_unneeded = irecv[2];
588: ginfo->memory = irecv[3];
589: ginfo->mallocs = irecv[4];
590: ginfo->assemblies = A->num_ass;
591: }
592: ginfo->block_size = bs;
593: ginfo->fill_ratio_given = 0;
594: ginfo->fill_ratio_needed = 0;
595: ginfo->factor_mallocs = 0;
596: return(0);
597: }
599: PetscErrorCode MatTranspose_IS(Mat A,MatReuse reuse,Mat *B)
600: {
601: Mat C,lC,lA;
602: PetscErrorCode ierr;
605: if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_INPLACE_MATRIX) {
606: ISLocalToGlobalMapping rl2g,cl2g;
607: MatCreate(PetscObjectComm((PetscObject)A),&C);
608: MatSetSizes(C,A->cmap->n,A->rmap->n,A->cmap->N,A->rmap->N);
609: MatSetBlockSizes(C,PetscAbs(A->cmap->bs),PetscAbs(A->rmap->bs));
610: MatSetType(C,MATIS);
611: MatGetLocalToGlobalMapping(A,&rl2g,&cl2g);
612: MatSetLocalToGlobalMapping(C,cl2g,rl2g);
613: } else {
614: C = *B;
615: }
617: /* perform local transposition */
618: MatISGetLocalMat(A,&lA);
619: MatTranspose(lA,MAT_INITIAL_MATRIX,&lC);
620: MatISSetLocalMat(C,lC);
621: MatDestroy(&lC);
623: if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_REUSE_MATRIX) {
624: *B = C;
625: } else {
626: MatHeaderMerge(A,&C);
627: }
628: MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);
629: MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);
630: return(0);
631: }
633: PetscErrorCode MatDiagonalSet_IS(Mat A,Vec D,InsertMode insmode)
634: {
635: Mat_IS *is = (Mat_IS*)A->data;
639: if (D) { /* MatShift_IS pass D = NULL */
640: VecScatterBegin(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);
641: VecScatterEnd(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);
642: }
643: VecPointwiseDivide(is->y,is->y,is->counter);
644: MatDiagonalSet(is->A,is->y,insmode);
645: return(0);
646: }
648: PetscErrorCode MatShift_IS(Mat A,PetscScalar a)
649: {
650: Mat_IS *is = (Mat_IS*)A->data;
654: VecSet(is->y,a);
655: MatDiagonalSet_IS(A,NULL,ADD_VALUES);
656: return(0);
657: }
659: static PetscErrorCode MatSetValuesLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
660: {
662: PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
665: #if defined(PETSC_USE_DEBUG)
666: if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
667: #endif
668: ISLocalToGlobalMappingApply(A->rmap->mapping,m,rows,rows_l);
669: ISLocalToGlobalMappingApply(A->cmap->mapping,n,cols,cols_l);
670: MatSetValuesLocal_IS(A,m,rows_l,n,cols_l,values,addv);
671: return(0);
672: }
674: static PetscErrorCode MatSetValuesBlockedLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
675: {
677: PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
680: #if defined(PETSC_USE_DEBUG)
681: if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
682: #endif
683: ISLocalToGlobalMappingApplyBlock(A->rmap->mapping,m,rows,rows_l);
684: ISLocalToGlobalMappingApplyBlock(A->cmap->mapping,n,cols,cols_l);
685: MatSetValuesBlockedLocal_IS(A,m,rows_l,n,cols_l,values,addv);
686: return(0);
687: }
689: static PetscErrorCode PetscLayoutMapLocal_Private(PetscLayout map,PetscInt N,const PetscInt idxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs)
690: {
691: PetscInt *owners = map->range;
692: PetscInt n = map->n;
693: PetscSF sf;
694: PetscInt *lidxs,*work = NULL;
695: PetscSFNode *ridxs;
696: PetscMPIInt rank;
697: PetscInt r, p = 0, len = 0;
701: if (on) *on = 0; /* squelch -Wmaybe-uninitialized */
702: /* Create SF where leaves are input idxs and roots are owned idxs (code adapted from MatZeroRowsMapLocal_Private) */
703: MPI_Comm_rank(map->comm,&rank);
704: PetscMalloc1(n,&lidxs);
705: for (r = 0; r < n; ++r) lidxs[r] = -1;
706: PetscMalloc1(N,&ridxs);
707: for (r = 0; r < N; ++r) {
708: const PetscInt idx = idxs[r];
709: if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N);
710: if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */
711: PetscLayoutFindOwner(map,idx,&p);
712: }
713: ridxs[r].rank = p;
714: ridxs[r].index = idxs[r] - owners[p];
715: }
716: PetscSFCreate(map->comm,&sf);
717: PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);
718: PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);
719: PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);
720: if (ogidxs) { /* communicate global idxs */
721: PetscInt cum = 0,start,*work2;
723: PetscMalloc1(n,&work);
724: PetscCalloc1(N,&work2);
725: for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++;
726: MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);
727: start -= cum;
728: cum = 0;
729: for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++;
730: PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);
731: PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);
732: PetscFree(work2);
733: }
734: PetscSFDestroy(&sf);
735: /* Compress and put in indices */
736: for (r = 0; r < n; ++r)
737: if (lidxs[r] >= 0) {
738: if (work) work[len] = work[r];
739: lidxs[len++] = r;
740: }
741: if (on) *on = len;
742: if (oidxs) *oidxs = lidxs;
743: if (ogidxs) *ogidxs = work;
744: return(0);
745: }
747: static PetscErrorCode MatCreateSubMatrix_IS(Mat mat,IS irow,IS icol,MatReuse scall,Mat *newmat)
748: {
749: Mat locmat,newlocmat;
750: Mat_IS *newmatis;
751: #if defined(PETSC_USE_DEBUG)
752: Vec rtest,ltest;
753: const PetscScalar *array;
754: #endif
755: const PetscInt *idxs;
756: PetscInt i,m,n;
757: PetscErrorCode ierr;
760: if (scall == MAT_REUSE_MATRIX) {
761: PetscBool ismatis;
763: PetscObjectTypeCompare((PetscObject)*newmat,MATIS,&ismatis);
764: if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Not of MATIS type");
765: newmatis = (Mat_IS*)(*newmat)->data;
766: if (!newmatis->getsub_ris) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local row IS");
767: if (!newmatis->getsub_cis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local col IS");
768: }
769: /* irow and icol may not have duplicate entries */
770: #if defined(PETSC_USE_DEBUG)
771: MatCreateVecs(mat,<est,&rtest);
772: ISGetLocalSize(irow,&n);
773: ISGetIndices(irow,&idxs);
774: for (i=0;i<n;i++) {
775: VecSetValue(rtest,idxs[i],1.0,ADD_VALUES);
776: }
777: VecAssemblyBegin(rtest);
778: VecAssemblyEnd(rtest);
779: VecGetLocalSize(rtest,&n);
780: VecGetOwnershipRange(rtest,&m,NULL);
781: VecGetArrayRead(rtest,&array);
782: for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Irow may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i]));
783: VecRestoreArrayRead(rtest,&array);
784: ISRestoreIndices(irow,&idxs);
785: ISGetLocalSize(icol,&n);
786: ISGetIndices(icol,&idxs);
787: for (i=0;i<n;i++) {
788: VecSetValue(ltest,idxs[i],1.0,ADD_VALUES);
789: }
790: VecAssemblyBegin(ltest);
791: VecAssemblyEnd(ltest);
792: VecGetLocalSize(ltest,&n);
793: VecGetOwnershipRange(ltest,&m,NULL);
794: VecGetArrayRead(ltest,&array);
795: for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Icol may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i]));
796: VecRestoreArrayRead(ltest,&array);
797: ISRestoreIndices(icol,&idxs);
798: VecDestroy(&rtest);
799: VecDestroy(<est);
800: #endif
801: if (scall == MAT_INITIAL_MATRIX) {
802: Mat_IS *matis = (Mat_IS*)mat->data;
803: ISLocalToGlobalMapping rl2g;
804: IS is;
805: PetscInt *lidxs,*lgidxs,*newgidxs;
806: PetscInt ll,newloc,irbs,icbs,arbs,acbs,rbs,cbs;
807: MPI_Comm comm;
809: PetscObjectGetComm((PetscObject)mat,&comm);
810: MatGetBlockSizes(mat,&arbs,&acbs);
811: ISGetBlockSize(irow,&irbs);
812: ISGetBlockSize(icol,&icbs);
813: rbs = arbs == irbs ? irbs : 1;
814: cbs = acbs == icbs ? icbs : 1;
815: ISGetLocalSize(irow,&m);
816: ISGetLocalSize(icol,&n);
817: MatCreate(comm,newmat);
818: MatSetType(*newmat,MATIS);
819: MatSetSizes(*newmat,m,n,PETSC_DECIDE,PETSC_DECIDE);
820: MatSetBlockSizes(*newmat,rbs,cbs);
821: /* communicate irow to their owners in the layout */
822: ISGetIndices(irow,&idxs);
823: PetscLayoutMapLocal_Private(mat->rmap,m,idxs,&ll,&lidxs,&lgidxs);
824: ISRestoreIndices(irow,&idxs);
825: MatISSetUpSF(mat);
826: PetscMemzero(matis->sf_rootdata,matis->sf->nroots*sizeof(PetscInt));
827: for (i=0;i<ll;i++) matis->sf_rootdata[lidxs[i]] = lgidxs[i]+1;
828: PetscFree(lidxs);
829: PetscFree(lgidxs);
830: PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);
831: PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);
832: for (i=0,newloc=0;i<matis->sf->nleaves;i++) if (matis->sf_leafdata[i]) newloc++;
833: PetscMalloc1(newloc,&newgidxs);
834: PetscMalloc1(newloc,&lidxs);
835: for (i=0,newloc=0;i<matis->sf->nleaves;i++)
836: if (matis->sf_leafdata[i]) {
837: lidxs[newloc] = i;
838: newgidxs[newloc++] = matis->sf_leafdata[i]-1;
839: }
840: ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);
841: ISLocalToGlobalMappingCreateIS(is,&rl2g);
842: ISLocalToGlobalMappingSetBlockSize(rl2g,rbs);
843: ISDestroy(&is);
844: /* local is to extract local submatrix */
845: newmatis = (Mat_IS*)(*newmat)->data;
846: ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_ris);
847: if (mat->congruentlayouts == -1) { /* first time we compare rows and cols layouts */
848: PetscBool cong;
850: PetscLayoutCompare(mat->rmap,mat->cmap,&cong);
851: if (cong) mat->congruentlayouts = 1;
852: else mat->congruentlayouts = 0;
853: }
854: if (mat->congruentlayouts && irow == icol && matis->csf == matis->sf) {
855: MatSetLocalToGlobalMapping(*newmat,rl2g,rl2g);
856: PetscObjectReference((PetscObject)newmatis->getsub_ris);
857: newmatis->getsub_cis = newmatis->getsub_ris;
858: } else {
859: ISLocalToGlobalMapping cl2g;
861: /* communicate icol to their owners in the layout */
862: ISGetIndices(icol,&idxs);
863: PetscLayoutMapLocal_Private(mat->cmap,n,idxs,&ll,&lidxs,&lgidxs);
864: ISRestoreIndices(icol,&idxs);
865: PetscMemzero(matis->csf_rootdata,matis->csf->nroots*sizeof(PetscInt));
866: for (i=0;i<ll;i++) matis->csf_rootdata[lidxs[i]] = lgidxs[i]+1;
867: PetscFree(lidxs);
868: PetscFree(lgidxs);
869: PetscSFBcastBegin(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);
870: PetscSFBcastEnd(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);
871: for (i=0,newloc=0;i<matis->csf->nleaves;i++) if (matis->csf_leafdata[i]) newloc++;
872: PetscMalloc1(newloc,&newgidxs);
873: PetscMalloc1(newloc,&lidxs);
874: for (i=0,newloc=0;i<matis->csf->nleaves;i++)
875: if (matis->csf_leafdata[i]) {
876: lidxs[newloc] = i;
877: newgidxs[newloc++] = matis->csf_leafdata[i]-1;
878: }
879: ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);
880: ISLocalToGlobalMappingCreateIS(is,&cl2g);
881: ISLocalToGlobalMappingSetBlockSize(cl2g,cbs);
882: ISDestroy(&is);
883: /* local is to extract local submatrix */
884: ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_cis);
885: MatSetLocalToGlobalMapping(*newmat,rl2g,cl2g);
886: ISLocalToGlobalMappingDestroy(&cl2g);
887: }
888: ISLocalToGlobalMappingDestroy(&rl2g);
889: } else {
890: MatISGetLocalMat(*newmat,&newlocmat);
891: }
892: MatISGetLocalMat(mat,&locmat);
893: newmatis = (Mat_IS*)(*newmat)->data;
894: MatCreateSubMatrix(locmat,newmatis->getsub_ris,newmatis->getsub_cis,scall,&newlocmat);
895: if (scall == MAT_INITIAL_MATRIX) {
896: MatISSetLocalMat(*newmat,newlocmat);
897: MatDestroy(&newlocmat);
898: }
899: MatAssemblyBegin(*newmat,MAT_FINAL_ASSEMBLY);
900: MatAssemblyEnd(*newmat,MAT_FINAL_ASSEMBLY);
901: return(0);
902: }
904: static PetscErrorCode MatCopy_IS(Mat A,Mat B,MatStructure str)
905: {
906: Mat_IS *a = (Mat_IS*)A->data,*b;
907: PetscBool ismatis;
911: PetscObjectTypeCompare((PetscObject)B,MATIS,&ismatis);
912: if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Need to be implemented");
913: b = (Mat_IS*)B->data;
914: MatCopy(a->A,b->A,str);
915: PetscObjectStateIncrease((PetscObject)B);
916: return(0);
917: }
919: static PetscErrorCode MatMissingDiagonal_IS(Mat A,PetscBool *missing,PetscInt *d)
920: {
921: Vec v;
922: const PetscScalar *array;
923: PetscInt i,n;
924: PetscErrorCode ierr;
927: *missing = PETSC_FALSE;
928: MatCreateVecs(A,NULL,&v);
929: MatGetDiagonal(A,v);
930: VecGetLocalSize(v,&n);
931: VecGetArrayRead(v,&array);
932: for (i=0;i<n;i++) if (array[i] == 0.) break;
933: VecRestoreArrayRead(v,&array);
934: VecDestroy(&v);
935: if (i != n) *missing = PETSC_TRUE;
936: if (d) {
937: *d = -1;
938: if (*missing) {
939: PetscInt rstart;
940: MatGetOwnershipRange(A,&rstart,NULL);
941: *d = i+rstart;
942: }
943: }
944: return(0);
945: }
947: static PetscErrorCode MatISSetUpSF_IS(Mat B)
948: {
949: Mat_IS *matis = (Mat_IS*)(B->data);
950: const PetscInt *gidxs;
951: PetscInt nleaves;
955: if (matis->sf) return(0);
956: PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);
957: ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);
958: ISLocalToGlobalMappingGetSize(B->rmap->mapping,&nleaves);
959: PetscSFSetGraphLayout(matis->sf,B->rmap,nleaves,NULL,PETSC_OWN_POINTER,gidxs);
960: ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);
961: PetscMalloc2(matis->sf->nroots,&matis->sf_rootdata,matis->sf->nleaves,&matis->sf_leafdata);
962: if (B->rmap->mapping != B->cmap->mapping) { /* setup SF for columns */
963: ISLocalToGlobalMappingGetSize(B->cmap->mapping,&nleaves);
964: PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->csf);
965: ISLocalToGlobalMappingGetIndices(B->cmap->mapping,&gidxs);
966: PetscSFSetGraphLayout(matis->csf,B->cmap,nleaves,NULL,PETSC_OWN_POINTER,gidxs);
967: ISLocalToGlobalMappingRestoreIndices(B->cmap->mapping,&gidxs);
968: PetscMalloc2(matis->csf->nroots,&matis->csf_rootdata,matis->csf->nleaves,&matis->csf_leafdata);
969: } else {
970: matis->csf = matis->sf;
971: matis->csf_leafdata = matis->sf_leafdata;
972: matis->csf_rootdata = matis->sf_rootdata;
973: }
974: return(0);
975: }
977: /*@
978: MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
980: Collective on MPI_Comm
982: Input Parameters:
983: + B - the matrix
984: . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix
985: (same value is used for all local rows)
986: . d_nnz - array containing the number of nonzeros in the various rows of the
987: DIAGONAL portion of the local submatrix (possibly different for each row)
988: or NULL, if d_nz is used to specify the nonzero structure.
989: The size of this array is equal to the number of local rows, i.e 'm'.
990: For matrices that will be factored, you must leave room for (and set)
991: the diagonal entry even if it is zero.
992: . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local
993: submatrix (same value is used for all local rows).
994: - o_nnz - array containing the number of nonzeros in the various rows of the
995: OFF-DIAGONAL portion of the local submatrix (possibly different for
996: each row) or NULL, if o_nz is used to specify the nonzero
997: structure. The size of this array is equal to the number
998: of local rows, i.e 'm'.
1000: If the *_nnz parameter is given then the *_nz parameter is ignored
1002: Level: intermediate
1004: Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
1005: from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
1006: matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
1008: .keywords: matrix
1010: .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat(), MATIS
1011: @*/
1012: PetscErrorCode MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
1013: {
1019: PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));
1020: return(0);
1021: }
1023: static PetscErrorCode MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
1024: {
1025: Mat_IS *matis = (Mat_IS*)(B->data);
1026: PetscInt bs,i,nlocalcols;
1030: if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
1031: MatISSetUpSF(B);
1033: if (!d_nnz) for (i=0;i<matis->sf->nroots;i++) matis->sf_rootdata[i] = d_nz;
1034: else for (i=0;i<matis->sf->nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
1036: if (!o_nnz) for (i=0;i<matis->sf->nroots;i++) matis->sf_rootdata[i] += o_nz;
1037: else for (i=0;i<matis->sf->nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
1039: PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);
1040: MatGetSize(matis->A,NULL,&nlocalcols);
1041: MatGetBlockSize(matis->A,&bs);
1042: PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);
1044: for (i=0;i<matis->sf->nleaves;i++) matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
1045: MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);
1046: #if defined(PETSC_HAVE_HYPRE)
1047: MatHYPRESetPreallocation(matis->A,0,matis->sf_leafdata,0,NULL);
1048: #endif
1050: for (i=0;i<matis->sf->nleaves/bs;i++) matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
1051: MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);
1053: for (i=0;i<matis->sf->nleaves/bs;i++) matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
1054: MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);
1056: /* for other matrix types */
1057: MatSetUp(matis->A);
1058: return(0);
1059: }
1061: PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
1062: {
1063: Mat_IS *matis = (Mat_IS*)(A->data);
1064: PetscInt *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
1065: const PetscInt *global_indices_r,*global_indices_c;
1066: PetscInt i,j,bs,rows,cols;
1067: PetscInt lrows,lcols;
1068: PetscInt local_rows,local_cols;
1069: PetscMPIInt nsubdomains;
1070: PetscBool isdense,issbaij;
1071: PetscErrorCode ierr;
1074: MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);
1075: MatGetSize(A,&rows,&cols);
1076: MatGetBlockSize(A,&bs);
1077: MatGetSize(matis->A,&local_rows,&local_cols);
1078: PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);
1079: PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);
1080: ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);
1081: if (A->rmap->mapping != A->cmap->mapping) {
1082: ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&global_indices_c);
1083: } else {
1084: global_indices_c = global_indices_r;
1085: }
1087: if (issbaij) {
1088: MatGetRowUpperTriangular(matis->A);
1089: }
1090: /*
1091: An SF reduce is needed to sum up properly on shared rows.
1092: Note that generally preallocation is not exact, since it overestimates nonzeros
1093: */
1094: MatISSetUpSF(A);
1095: MatGetLocalSize(A,&lrows,&lcols);
1096: MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);
1097: /* All processes need to compute entire row ownership */
1098: PetscMalloc1(rows,&row_ownership);
1099: MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);
1100: for (i=0;i<nsubdomains;i++) {
1101: for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
1102: row_ownership[j] = i;
1103: }
1104: }
1105: MatGetOwnershipRangesColumn(A,(const PetscInt**)&mat_ranges);
1107: /*
1108: my_dnz and my_onz contains exact contribution to preallocation from each local mat
1109: then, they will be summed up properly. This way, preallocation is always sufficient
1110: */
1111: PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);
1112: /* preallocation as a MATAIJ */
1113: if (isdense) { /* special case for dense local matrices */
1114: for (i=0;i<local_rows;i++) {
1115: PetscInt owner = row_ownership[global_indices_r[i]];
1116: for (j=0;j<local_cols;j++) {
1117: PetscInt index_col = global_indices_c[j];
1118: if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
1119: my_dnz[i] += 1;
1120: } else { /* offdiag block */
1121: my_onz[i] += 1;
1122: }
1123: }
1124: }
1125: } else if (matis->A->ops->getrowij) {
1126: const PetscInt *ii,*jj,*jptr;
1127: PetscBool done;
1128: MatGetRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);
1129: if (!done) SETERRQ(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatGetRowIJ");
1130: jptr = jj;
1131: for (i=0;i<local_rows;i++) {
1132: PetscInt index_row = global_indices_r[i];
1133: for (j=0;j<ii[i+1]-ii[i];j++,jptr++) {
1134: PetscInt owner = row_ownership[index_row];
1135: PetscInt index_col = global_indices_c[*jptr];
1136: if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
1137: my_dnz[i] += 1;
1138: } else { /* offdiag block */
1139: my_onz[i] += 1;
1140: }
1141: /* same as before, interchanging rows and cols */
1142: if (issbaij && index_col != index_row) {
1143: owner = row_ownership[index_col];
1144: if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
1145: my_dnz[*jptr] += 1;
1146: } else {
1147: my_onz[*jptr] += 1;
1148: }
1149: }
1150: }
1151: }
1152: MatRestoreRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);
1153: if (!done) SETERRQ(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ");
1154: } else { /* loop over rows and use MatGetRow */
1155: for (i=0;i<local_rows;i++) {
1156: const PetscInt *cols;
1157: PetscInt ncols,index_row = global_indices_r[i];
1158: MatGetRow(matis->A,i,&ncols,&cols,NULL);
1159: for (j=0;j<ncols;j++) {
1160: PetscInt owner = row_ownership[index_row];
1161: PetscInt index_col = global_indices_c[cols[j]];
1162: if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
1163: my_dnz[i] += 1;
1164: } else { /* offdiag block */
1165: my_onz[i] += 1;
1166: }
1167: /* same as before, interchanging rows and cols */
1168: if (issbaij && index_col != index_row) {
1169: owner = row_ownership[index_col];
1170: if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
1171: my_dnz[cols[j]] += 1;
1172: } else {
1173: my_onz[cols[j]] += 1;
1174: }
1175: }
1176: }
1177: MatRestoreRow(matis->A,i,&ncols,&cols,NULL);
1178: }
1179: }
1180: if (global_indices_c != global_indices_r) {
1181: ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&global_indices_c);
1182: }
1183: ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);
1184: PetscFree(row_ownership);
1186: /* Reduce my_dnz and my_onz */
1187: if (maxreduce) {
1188: PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);
1189: PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);
1190: PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);
1191: PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);
1192: } else {
1193: PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);
1194: PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);
1195: PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);
1196: PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);
1197: }
1198: PetscFree2(my_dnz,my_onz);
1200: /* Resize preallocation if overestimated */
1201: for (i=0;i<lrows;i++) {
1202: dnz[i] = PetscMin(dnz[i],lcols);
1203: onz[i] = PetscMin(onz[i],cols-lcols);
1204: }
1206: /* Set preallocation */
1207: MatSeqAIJSetPreallocation(B,0,dnz);
1208: MatMPIAIJSetPreallocation(B,0,dnz,0,onz);
1209: for (i=0;i<lrows/bs;i++) {
1210: dnz[i] = dnz[i*bs]/bs;
1211: onz[i] = onz[i*bs]/bs;
1212: }
1213: MatSeqBAIJSetPreallocation(B,bs,0,dnz);
1214: MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);
1215: MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);
1216: MatPreallocateFinalize(dnz,onz);
1217: if (issbaij) {
1218: MatRestoreRowUpperTriangular(matis->A);
1219: }
1220: return(0);
1221: }
1223: static PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
1224: {
1225: Mat_IS *matis = (Mat_IS*)(mat->data);
1226: Mat local_mat;
1227: /* info on mat */
1228: PetscInt bs,rows,cols,lrows,lcols;
1229: PetscInt local_rows,local_cols;
1230: PetscBool isseqdense,isseqsbaij,isseqaij,isseqbaij;
1231: #if defined (PETSC_USE_DEBUG)
1232: PetscBool lb[4],bb[4];
1233: #endif
1234: PetscMPIInt nsubdomains;
1235: /* values insertion */
1236: PetscScalar *array;
1237: /* work */
1241: /* get info from mat */
1242: MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);
1243: if (nsubdomains == 1) {
1244: Mat B;
1245: IS rows,cols;
1246: IS irows,icols;
1247: const PetscInt *ridxs,*cidxs;
1249: ISLocalToGlobalMappingGetIndices(mat->rmap->mapping,&ridxs);
1250: ISLocalToGlobalMappingGetIndices(mat->cmap->mapping,&cidxs);
1251: ISCreateGeneral(PETSC_COMM_SELF,mat->rmap->n,ridxs,PETSC_USE_POINTER,&rows);
1252: ISCreateGeneral(PETSC_COMM_SELF,mat->cmap->n,cidxs,PETSC_USE_POINTER,&cols);
1253: ISSetPermutation(rows);
1254: ISSetPermutation(cols);
1255: ISInvertPermutation(rows,mat->rmap->n,&irows);
1256: ISInvertPermutation(cols,mat->cmap->n,&icols);
1257: ISLocalToGlobalMappingRestoreIndices(mat->rmap->mapping,&ridxs);
1258: ISLocalToGlobalMappingRestoreIndices(mat->cmap->mapping,&cidxs);
1259: ISDestroy(&cols);
1260: ISDestroy(&rows);
1261: MatConvert(matis->A,MATSEQAIJ,MAT_INITIAL_MATRIX,&B);
1262: MatCreateSubMatrix(B,irows,icols,reuse,M);
1263: MatDestroy(&B);
1264: ISDestroy(&icols);
1265: ISDestroy(&irows);
1266: return(0);
1267: }
1268: MatGetSize(mat,&rows,&cols);
1269: MatGetBlockSize(mat,&bs);
1270: MatGetLocalSize(mat,&lrows,&lcols);
1271: MatGetSize(matis->A,&local_rows,&local_cols);
1272: PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isseqdense);
1273: PetscObjectBaseTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);
1274: PetscObjectTypeCompare((PetscObject)matis->A,MATSEQBAIJ,&isseqbaij);
1275: PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&isseqsbaij);
1276: if (!isseqdense && !isseqaij && !isseqbaij && !isseqsbaij) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not for matrix type %s",((PetscObject)(matis->A))->type_name);
1277: #if defined (PETSC_USE_DEBUG)
1278: lb[0] = isseqdense;
1279: lb[1] = isseqaij;
1280: lb[2] = isseqbaij;
1281: lb[3] = isseqsbaij;
1282: MPIU_Allreduce(lb,bb,4,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));
1283: if (!bb[0] && !bb[1] && !bb[2] && !bb[3]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Local matrices must have the same type");
1284: #endif
1286: if (reuse == MAT_INITIAL_MATRIX) {
1287: MatCreate(PetscObjectComm((PetscObject)mat),M);
1288: MatSetSizes(*M,lrows,lcols,rows,cols);
1289: if (!isseqsbaij) {
1290: MatSetType(*M,MATAIJ);
1291: } else {
1292: MatSetType(*M,MATSBAIJ);
1293: }
1294: MatSetBlockSize(*M,bs);
1295: MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);
1296: } else {
1297: PetscInt mbs,mrows,mcols,mlrows,mlcols;
1298: /* some checks */
1299: MatGetBlockSize(*M,&mbs);
1300: MatGetSize(*M,&mrows,&mcols);
1301: MatGetLocalSize(*M,&mlrows,&mlcols);
1302: if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
1303: if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
1304: if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
1305: if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
1306: if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
1307: MatZeroEntries(*M);
1308: }
1310: if (isseqsbaij) {
1311: MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);
1312: } else {
1313: PetscObjectReference((PetscObject)matis->A);
1314: local_mat = matis->A;
1315: }
1317: /* Set values */
1318: MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);
1319: if (isseqdense) { /* special case for dense local matrices */
1320: PetscInt i,*dummy;
1322: PetscMalloc1(PetscMax(local_rows,local_cols),&dummy);
1323: for (i=0;i<PetscMax(local_rows,local_cols);i++) dummy[i] = i;
1324: MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);
1325: MatDenseGetArray(local_mat,&array);
1326: MatSetValuesLocal(*M,local_rows,dummy,local_cols,dummy,array,ADD_VALUES);
1327: MatDenseRestoreArray(local_mat,&array);
1328: PetscFree(dummy);
1329: } else if (isseqaij) {
1330: PetscInt i,nvtxs,*xadj,*adjncy;
1331: PetscBool done;
1333: MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);
1334: if (!done) SETERRQ(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatGetRowIJ");
1335: MatSeqAIJGetArray(local_mat,&array);
1336: for (i=0;i<nvtxs;i++) {
1337: MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);
1338: }
1339: MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);
1340: if (!done) SETERRQ(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ");
1341: MatSeqAIJRestoreArray(local_mat,&array);
1342: } else { /* very basic values insertion for all other matrix types */
1343: PetscInt i;
1345: for (i=0;i<local_rows;i++) {
1346: PetscInt j;
1347: const PetscInt *local_indices_cols;
1349: MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);
1350: MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);
1351: MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);
1352: }
1353: }
1354: MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);
1355: MatDestroy(&local_mat);
1356: MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);
1357: if (isseqdense) {
1358: MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);
1359: }
1360: return(0);
1361: }
1363: /*@
1364: MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
1366: Input Parameter:
1367: . mat - the matrix (should be of type MATIS)
1368: . reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
1370: Output Parameter:
1371: . newmat - the matrix in AIJ format
1373: Level: developer
1375: Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.
1377: .seealso: MATIS
1378: @*/
1379: PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
1380: {
1387: if (reuse != MAT_INITIAL_MATRIX) {
1390: if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
1391: }
1392: PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));
1393: return(0);
1394: }
1396: PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
1397: {
1399: Mat_IS *matis = (Mat_IS*)(mat->data);
1400: PetscInt bs,m,n,M,N;
1401: Mat B,localmat;
1404: MatGetBlockSize(mat,&bs);
1405: MatGetSize(mat,&M,&N);
1406: MatGetLocalSize(mat,&m,&n);
1407: MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);
1408: MatDuplicate(matis->A,op,&localmat);
1409: MatISSetLocalMat(B,localmat);
1410: MatDestroy(&localmat);
1411: if (matis->sf) {
1412: Mat_IS *bmatis = (Mat_IS*)(B->data);
1414: PetscObjectReference((PetscObject)matis->sf);
1415: bmatis->sf = matis->sf;
1416: PetscMalloc2(matis->sf->nroots,&bmatis->sf_rootdata,matis->sf->nleaves,&bmatis->sf_leafdata);
1417: if (matis->sf != matis->csf) {
1418: PetscObjectReference((PetscObject)matis->csf);
1419: bmatis->csf = matis->csf;
1420: PetscMalloc2(matis->csf->nroots,&bmatis->csf_rootdata,matis->csf->nleaves,&bmatis->csf_leafdata);
1421: } else {
1422: bmatis->csf = bmatis->sf;
1423: bmatis->csf_leafdata = bmatis->sf_leafdata;
1424: bmatis->csf_rootdata = bmatis->sf_rootdata;
1425: }
1426: }
1427: MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
1428: MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
1429: *newmat = B;
1430: return(0);
1431: }
1433: static PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool *flg)
1434: {
1436: Mat_IS *matis = (Mat_IS*)A->data;
1437: PetscBool local_sym;
1440: MatIsHermitian(matis->A,tol,&local_sym);
1441: MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));
1442: return(0);
1443: }
1445: static PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool *flg)
1446: {
1448: Mat_IS *matis = (Mat_IS*)A->data;
1449: PetscBool local_sym;
1452: MatIsSymmetric(matis->A,tol,&local_sym);
1453: MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));
1454: return(0);
1455: }
1457: static PetscErrorCode MatIsStructurallySymmetric_IS(Mat A,PetscBool *flg)
1458: {
1460: Mat_IS *matis = (Mat_IS*)A->data;
1461: PetscBool local_sym;
1464: if (A->rmap->mapping != A->cmap->mapping) {
1465: *flg = PETSC_FALSE;
1466: return(0);
1467: }
1468: MatIsStructurallySymmetric(matis->A,&local_sym);
1469: MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));
1470: return(0);
1471: }
1473: static PetscErrorCode MatDestroy_IS(Mat A)
1474: {
1476: Mat_IS *b = (Mat_IS*)A->data;
1479: MatDestroy(&b->A);
1480: VecScatterDestroy(&b->cctx);
1481: VecScatterDestroy(&b->rctx);
1482: VecDestroy(&b->x);
1483: VecDestroy(&b->y);
1484: VecDestroy(&b->counter);
1485: ISDestroy(&b->getsub_ris);
1486: ISDestroy(&b->getsub_cis);
1487: if (b->sf != b->csf) {
1488: PetscSFDestroy(&b->csf);
1489: PetscFree2(b->csf_rootdata,b->csf_leafdata);
1490: }
1491: PetscSFDestroy(&b->sf);
1492: PetscFree2(b->sf_rootdata,b->sf_leafdata);
1493: PetscFree(A->data);
1494: PetscObjectChangeTypeName((PetscObject)A,0);
1495: PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);
1496: PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);
1497: PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);
1498: PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);
1499: PetscObjectComposeFunction((PetscObject)A,"MatISSetUpSF_C",NULL);
1500: return(0);
1501: }
1503: static PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
1504: {
1506: Mat_IS *is = (Mat_IS*)A->data;
1507: PetscScalar zero = 0.0;
1510: /* scatter the global vector x into the local work vector */
1511: VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);
1512: VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);
1514: /* multiply the local matrix */
1515: MatMult(is->A,is->x,is->y);
1517: /* scatter product back into global memory */
1518: VecSet(y,zero);
1519: VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);
1520: VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);
1521: return(0);
1522: }
1524: static PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
1525: {
1526: Vec temp_vec;
1530: if (v3 != v2) {
1531: MatMult(A,v1,v3);
1532: VecAXPY(v3,1.0,v2);
1533: } else {
1534: VecDuplicate(v2,&temp_vec);
1535: MatMult(A,v1,temp_vec);
1536: VecAXPY(temp_vec,1.0,v2);
1537: VecCopy(temp_vec,v3);
1538: VecDestroy(&temp_vec);
1539: }
1540: return(0);
1541: }
1543: static PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x)
1544: {
1545: Mat_IS *is = (Mat_IS*)A->data;
1549: /* scatter the global vector x into the local work vector */
1550: VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);
1551: VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);
1553: /* multiply the local matrix */
1554: MatMultTranspose(is->A,is->y,is->x);
1556: /* scatter product back into global vector */
1557: VecSet(x,0);
1558: VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);
1559: VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);
1560: return(0);
1561: }
1563: static PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
1564: {
1565: Vec temp_vec;
1569: if (v3 != v2) {
1570: MatMultTranspose(A,v1,v3);
1571: VecAXPY(v3,1.0,v2);
1572: } else {
1573: VecDuplicate(v2,&temp_vec);
1574: MatMultTranspose(A,v1,temp_vec);
1575: VecAXPY(temp_vec,1.0,v2);
1576: VecCopy(temp_vec,v3);
1577: VecDestroy(&temp_vec);
1578: }
1579: return(0);
1580: }
1582: static PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
1583: {
1584: Mat_IS *a = (Mat_IS*)A->data;
1586: PetscViewer sviewer;
1587: PetscBool isascii,view = PETSC_TRUE;
1590: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
1591: if (isascii) {
1592: PetscViewerFormat format;
1594: PetscViewerGetFormat(viewer,&format);
1595: if (format == PETSC_VIEWER_ASCII_INFO) view = PETSC_FALSE;
1596: }
1597: if (!view) return(0);
1598: PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
1599: MatView(a->A,sviewer);
1600: PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
1601: PetscViewerFlush(viewer);
1602: return(0);
1603: }
1605: static PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
1606: {
1608: PetscInt nr,rbs,nc,cbs;
1609: Mat_IS *is = (Mat_IS*)A->data;
1610: IS from,to;
1611: Vec cglobal,rglobal;
1616: /* Destroy any previous data */
1617: VecDestroy(&is->x);
1618: VecDestroy(&is->y);
1619: VecDestroy(&is->counter);
1620: VecScatterDestroy(&is->rctx);
1621: VecScatterDestroy(&is->cctx);
1622: MatDestroy(&is->A);
1623: if (is->csf != is->sf) {
1624: PetscSFDestroy(&is->csf);
1625: PetscFree2(is->csf_rootdata,is->csf_leafdata);
1626: }
1627: PetscSFDestroy(&is->sf);
1628: PetscFree2(is->sf_rootdata,is->sf_leafdata);
1630: /* Setup Layout and set local to global maps */
1631: PetscLayoutSetUp(A->rmap);
1632: PetscLayoutSetUp(A->cmap);
1633: ISLocalToGlobalMappingGetSize(rmapping,&nr);
1634: ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);
1635: ISLocalToGlobalMappingGetSize(cmapping,&nc);
1636: ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);
1637: /* check if the two mappings are actually the same for square matrices (DOLFIN passes 2 different objects) */
1638: if (rmapping != cmapping && A->rmap->N == A->cmap->N) {
1639: PetscBool same,gsame;
1641: same = PETSC_FALSE;
1642: if (nr == nc && cbs == rbs) {
1643: const PetscInt *idxs1,*idxs2;
1645: ISLocalToGlobalMappingGetBlockIndices(rmapping,&idxs1);
1646: ISLocalToGlobalMappingGetBlockIndices(cmapping,&idxs2);
1647: PetscMemcmp(idxs1,idxs2,(nr/rbs)*sizeof(PetscInt),&same);
1648: ISLocalToGlobalMappingRestoreBlockIndices(rmapping,&idxs1);
1649: ISLocalToGlobalMappingRestoreBlockIndices(cmapping,&idxs2);
1650: }
1651: MPIU_Allreduce(&same,&gsame,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));
1652: if (gsame) cmapping = rmapping;
1653: }
1654: PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);
1655: PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);
1657: /* Create the local matrix A */
1658: MatCreate(PETSC_COMM_SELF,&is->A);
1659: MatSetType(is->A,MATAIJ);
1660: MatSetSizes(is->A,nr,nc,nr,nc);
1661: MatSetBlockSizes(is->A,rbs,cbs);
1662: MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);
1663: MatAppendOptionsPrefix(is->A,"is_");
1664: MatSetFromOptions(is->A);
1665: PetscLayoutSetUp(is->A->rmap);
1666: PetscLayoutSetUp(is->A->cmap);
1668: if (!is->islocalref) { /* setup scatters and local vectors for MatMult */
1669: /* Create the local work vectors */
1670: MatCreateVecs(is->A,&is->x,&is->y);
1672: /* setup the global to local scatters */
1673: MatCreateVecs(A,&cglobal,&rglobal);
1674: ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);
1675: ISLocalToGlobalMappingApplyIS(rmapping,to,&from);
1676: VecScatterCreate(rglobal,from,is->y,to,&is->rctx);
1677: if (rmapping != cmapping) {
1678: ISDestroy(&to);
1679: ISDestroy(&from);
1680: ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);
1681: ISLocalToGlobalMappingApplyIS(cmapping,to,&from);
1682: VecScatterCreate(cglobal,from,is->x,to,&is->cctx);
1683: } else {
1684: PetscObjectReference((PetscObject)is->rctx);
1685: is->cctx = is->rctx;
1686: }
1688: /* interface counter vector (local) */
1689: VecDuplicate(is->y,&is->counter);
1690: VecSet(is->y,1.);
1691: VecScatterBegin(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);
1692: VecScatterEnd(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);
1693: VecScatterBegin(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);
1694: VecScatterEnd(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);
1696: /* free workspace */
1697: VecDestroy(&rglobal);
1698: VecDestroy(&cglobal);
1699: ISDestroy(&to);
1700: ISDestroy(&from);
1701: }
1702: MatSetUp(A);
1703: return(0);
1704: }
1706: static PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
1707: {
1708: Mat_IS *is = (Mat_IS*)mat->data;
1710: #if defined(PETSC_USE_DEBUG)
1711: PetscInt i,zm,zn;
1712: #endif
1713: PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
1716: #if defined(PETSC_USE_DEBUG)
1717: if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
1718: /* count negative indices */
1719: for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++;
1720: for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++;
1721: #endif
1722: ISGlobalToLocalMappingApply(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);
1723: ISGlobalToLocalMappingApply(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);
1724: #if defined(PETSC_USE_DEBUG)
1725: /* count negative indices (should be the same as before) */
1726: for (i=0;i<m;i++) if (rows_l[i] < 0) zm--;
1727: for (i=0;i<n;i++) if (cols_l[i] < 0) zn--;
1728: if (!is->A->rmap->mapping && zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS");
1729: if (!is->A->cmap->mapping && zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS");
1730: #endif
1731: MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);
1732: return(0);
1733: }
1735: static PetscErrorCode MatSetValuesBlocked_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
1736: {
1737: Mat_IS *is = (Mat_IS*)mat->data;
1739: #if defined(PETSC_USE_DEBUG)
1740: PetscInt i,zm,zn;
1741: #endif
1742: PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
1745: #if defined(PETSC_USE_DEBUG)
1746: if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
1747: /* count negative indices */
1748: for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++;
1749: for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++;
1750: #endif
1751: ISGlobalToLocalMappingApplyBlock(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);
1752: ISGlobalToLocalMappingApplyBlock(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);
1753: #if defined(PETSC_USE_DEBUG)
1754: /* count negative indices (should be the same as before) */
1755: for (i=0;i<m;i++) if (rows_l[i] < 0) zm--;
1756: for (i=0;i<n;i++) if (cols_l[i] < 0) zn--;
1757: if (!is->A->rmap->mapping && zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS");
1758: if (!is->A->cmap->mapping && zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS");
1759: #endif
1760: MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);
1761: return(0);
1762: }
1764: static PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
1765: {
1767: Mat_IS *is = (Mat_IS*)A->data;
1770: if (is->A->rmap->mapping) {
1771: MatSetValuesLocal(is->A,m,rows,n,cols,values,addv);
1772: } else {
1773: MatSetValues(is->A,m,rows,n,cols,values,addv);
1774: }
1775: return(0);
1776: }
1778: static PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
1779: {
1781: Mat_IS *is = (Mat_IS*)A->data;
1784: if (is->A->rmap->mapping) {
1785: #if defined(PETSC_USE_DEBUG)
1786: PetscInt ibs,bs;
1788: ISLocalToGlobalMappingGetBlockSize(is->A->rmap->mapping,&ibs);
1789: MatGetBlockSize(is->A,&bs);
1790: if (ibs != bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Different block sizes! local mat %D, local l2g map %D",bs,ibs);
1791: #endif
1792: MatSetValuesBlockedLocal(is->A,m,rows,n,cols,values,addv);
1793: } else {
1794: MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);
1795: }
1796: return(0);
1797: }
1799: static PetscErrorCode MatISZeroRowsColumnsLocal_Private(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,PetscBool columns)
1800: {
1801: Mat_IS *is = (Mat_IS*)A->data;
1805: if (!n) {
1806: is->pure_neumann = PETSC_TRUE;
1807: } else {
1808: PetscInt i;
1809: is->pure_neumann = PETSC_FALSE;
1811: if (columns) {
1812: MatZeroRowsColumns(is->A,n,rows,diag,0,0);
1813: } else {
1814: MatZeroRows(is->A,n,rows,diag,0,0);
1815: }
1816: if (diag != 0.) {
1817: const PetscScalar *array;
1818: VecGetArrayRead(is->counter,&array);
1819: for (i=0; i<n; i++) {
1820: MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);
1821: }
1822: VecRestoreArrayRead(is->counter,&array);
1823: }
1824: MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);
1825: MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);
1826: }
1827: return(0);
1828: }
1830: static PetscErrorCode MatZeroRowsColumns_Private_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b,PetscBool columns)
1831: {
1832: Mat_IS *matis = (Mat_IS*)A->data;
1833: PetscInt nr,nl,len,i;
1834: PetscInt *lrows;
1838: #if defined(PETSC_USE_DEBUG)
1839: if (columns || diag != 0. || (x && b)) {
1840: PetscBool cong;
1842: PetscLayoutCompare(A->rmap,A->cmap,&cong);
1843: cong = (PetscBool)(cong && matis->sf == matis->csf);
1844: if (!cong && columns) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Columns can be zeroed if and only if A->rmap and A->cmap are congruent and the l2g maps are the same for MATIS");
1845: if (!cong && diag != 0.) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Nonzero diagonal value supported if and only if A->rmap and A->cmap are congruent and the l2g maps are the same for MATIS");
1846: if (!cong && x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"A->rmap and A->cmap need to be congruent, and the l2g maps be the same");
1847: }
1848: #endif
1849: /* get locally owned rows */
1850: PetscLayoutMapLocal_Private(A->rmap,n,rows,&len,&lrows,NULL);
1851: /* fix right hand side if needed */
1852: if (x && b) {
1853: const PetscScalar *xx;
1854: PetscScalar *bb;
1856: VecGetArrayRead(x, &xx);
1857: VecGetArray(b, &bb);
1858: for (i=0;i<len;++i) bb[lrows[i]] = diag*xx[lrows[i]];
1859: VecRestoreArrayRead(x, &xx);
1860: VecRestoreArray(b, &bb);
1861: }
1862: /* get rows associated to the local matrices */
1863: MatISSetUpSF(A);
1864: MatGetSize(matis->A,&nl,NULL);
1865: PetscMemzero(matis->sf_leafdata,nl*sizeof(PetscInt));
1866: PetscMemzero(matis->sf_rootdata,A->rmap->n*sizeof(PetscInt));
1867: for (i=0;i<len;i++) matis->sf_rootdata[lrows[i]] = 1;
1868: PetscFree(lrows);
1869: PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);
1870: PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);
1871: PetscMalloc1(nl,&lrows);
1872: for (i=0,nr=0;i<nl;i++) if (matis->sf_leafdata[i]) lrows[nr++] = i;
1873: MatISZeroRowsColumnsLocal_Private(A,nr,lrows,diag,columns);
1874: PetscFree(lrows);
1875: return(0);
1876: }
1878: static PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1879: {
1883: MatZeroRowsColumns_Private_IS(A,n,rows,diag,x,b,PETSC_FALSE);
1884: return(0);
1885: }
1887: static PetscErrorCode MatZeroRowsColumns_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1888: {
1892: MatZeroRowsColumns_Private_IS(A,n,rows,diag,x,b,PETSC_TRUE);
1893: return(0);
1894: }
1896: static PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
1897: {
1898: Mat_IS *is = (Mat_IS*)A->data;
1902: MatAssemblyBegin(is->A,type);
1903: return(0);
1904: }
1906: static PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
1907: {
1908: Mat_IS *is = (Mat_IS*)A->data;
1912: MatAssemblyEnd(is->A,type);
1913: /* fix for local empty rows/cols */
1914: if (is->locempty && type == MAT_FINAL_ASSEMBLY) {
1915: Mat newlA;
1916: ISLocalToGlobalMapping l2g;
1917: IS tis;
1918: const PetscScalar *v;
1919: PetscInt i,n,cf,*loce,*locf;
1920: PetscBool sym;
1922: MatGetRowMaxAbs(is->A,is->y,NULL);
1923: MatIsSymmetric(is->A,PETSC_SMALL,&sym);
1924: if (!sym) SETERRQ(PetscObjectComm((PetscObject)is->A),PETSC_ERR_SUP,"Not yet implemented for unsymmetric case");
1925: VecGetLocalSize(is->y,&n);
1926: PetscMalloc1(n,&loce);
1927: PetscMalloc1(n,&locf);
1928: VecGetArrayRead(is->y,&v);
1929: for (i=0,cf=0;i<n;i++) {
1930: if (v[i] == 0.0) {
1931: loce[i] = -1;
1932: } else {
1933: loce[i] = cf;
1934: locf[cf++] = i;
1935: }
1936: }
1937: VecRestoreArrayRead(is->y,&v);
1938: /* extract valid submatrix */
1939: ISCreateGeneral(PetscObjectComm((PetscObject)is->A),cf,locf,PETSC_USE_POINTER,&tis);
1940: MatCreateSubMatrix(is->A,tis,tis,MAT_INITIAL_MATRIX,&newlA);
1941: ISDestroy(&tis);
1942: /* attach local l2g map for successive calls of MatSetValues */
1943: ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)is->A),1,n,loce,PETSC_OWN_POINTER,&l2g);
1944: MatSetLocalToGlobalMapping(newlA,l2g,l2g);
1945: ISLocalToGlobalMappingDestroy(&l2g);
1946: /* attach new global l2g map */
1947: ISLocalToGlobalMappingApply(A->rmap->mapping,cf,locf,locf);
1948: ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)A),1,cf,locf,PETSC_OWN_POINTER,&l2g);
1949: MatSetLocalToGlobalMapping(A,l2g,l2g);
1950: MatISSetLocalMat(A,newlA);
1951: MatDestroy(&newlA);
1952: ISLocalToGlobalMappingDestroy(&l2g);
1953: }
1954: is->locempty = PETSC_FALSE;
1955: return(0);
1956: }
1958: static PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
1959: {
1960: Mat_IS *is = (Mat_IS*)mat->data;
1963: *local = is->A;
1964: return(0);
1965: }
1967: static PetscErrorCode MatISRestoreLocalMat_IS(Mat mat,Mat *local)
1968: {
1970: *local = NULL;
1971: return(0);
1972: }
1974: /*@
1975: MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
1977: Input Parameter:
1978: . mat - the matrix
1980: Output Parameter:
1981: . local - the local matrix
1983: Level: advanced
1985: Notes:
1986: This can be called if you have precomputed the nonzero structure of the
1987: matrix and want to provide it to the inner matrix object to improve the performance
1988: of the MatSetValues() operation.
1990: Call MatISRestoreLocalMat() when finished with the local matrix.
1992: .seealso: MATIS
1993: @*/
1994: PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
1995: {
2001: PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));
2002: return(0);
2003: }
2005: /*@
2006: MatISRestoreLocalMat - Restores the local matrix obtained with MatISGetLocalMat()
2008: Input Parameter:
2009: . mat - the matrix
2011: Output Parameter:
2012: . local - the local matrix
2014: Level: advanced
2016: .seealso: MATIS
2017: @*/
2018: PetscErrorCode MatISRestoreLocalMat(Mat mat,Mat *local)
2019: {
2025: PetscUseMethod(mat,"MatISRestoreLocalMat_C",(Mat,Mat*),(mat,local));
2026: return(0);
2027: }
2029: static PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
2030: {
2031: Mat_IS *is = (Mat_IS*)mat->data;
2032: PetscInt nrows,ncols,orows,ocols;
2036: if (is->A) {
2037: MatGetSize(is->A,&orows,&ocols);
2038: MatGetSize(local,&nrows,&ncols);
2039: if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %Dx%D (you passed a %Dx%D matrix)",orows,ocols,nrows,ncols);
2040: }
2041: PetscObjectReference((PetscObject)local);
2042: MatDestroy(&is->A);
2043: is->A = local;
2044: return(0);
2045: }
2047: /*@
2048: MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.
2050: Input Parameter:
2051: . mat - the matrix
2052: . local - the local matrix
2054: Output Parameter:
2056: Level: advanced
2058: Notes:
2059: This can be called if you have precomputed the local matrix and
2060: want to provide it to the matrix object MATIS.
2062: .seealso: MATIS
2063: @*/
2064: PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
2065: {
2071: PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));
2072: return(0);
2073: }
2075: static PetscErrorCode MatZeroEntries_IS(Mat A)
2076: {
2077: Mat_IS *a = (Mat_IS*)A->data;
2081: MatZeroEntries(a->A);
2082: return(0);
2083: }
2085: static PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
2086: {
2087: Mat_IS *is = (Mat_IS*)A->data;
2091: MatScale(is->A,a);
2092: return(0);
2093: }
2095: static PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
2096: {
2097: Mat_IS *is = (Mat_IS*)A->data;
2101: /* get diagonal of the local matrix */
2102: MatGetDiagonal(is->A,is->y);
2104: /* scatter diagonal back into global vector */
2105: VecSet(v,0);
2106: VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);
2107: VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);
2108: return(0);
2109: }
2111: static PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
2112: {
2113: Mat_IS *a = (Mat_IS*)A->data;
2117: MatSetOption(a->A,op,flg);
2118: return(0);
2119: }
2121: static PetscErrorCode MatAXPY_IS(Mat Y,PetscScalar a,Mat X,MatStructure str)
2122: {
2123: Mat_IS *y = (Mat_IS*)Y->data;
2124: Mat_IS *x;
2125: #if defined(PETSC_USE_DEBUG)
2126: PetscBool ismatis;
2127: #endif
2131: #if defined(PETSC_USE_DEBUG)
2132: PetscObjectTypeCompare((PetscObject)X,MATIS,&ismatis);
2133: if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)Y),PETSC_ERR_SUP,"Cannot call MatAXPY(Y,a,X,str) with X not of type MATIS");
2134: #endif
2135: x = (Mat_IS*)X->data;
2136: MatAXPY(y->A,a,x->A,str);
2137: return(0);
2138: }
2140: static PetscErrorCode MatGetLocalSubMatrix_IS(Mat A,IS row,IS col,Mat *submat)
2141: {
2142: Mat lA;
2143: Mat_IS *matis;
2144: ISLocalToGlobalMapping rl2g,cl2g;
2145: IS is;
2146: const PetscInt *rg,*rl;
2147: PetscInt nrg;
2148: PetscInt N,M,nrl,i,*idxs;
2149: PetscErrorCode ierr;
2152: ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&rg);
2153: ISGetLocalSize(row,&nrl);
2154: ISGetIndices(row,&rl);
2155: ISLocalToGlobalMappingGetSize(A->rmap->mapping,&nrg);
2156: #if defined(PETSC_USE_DEBUG)
2157: for (i=0;i<nrl;i++) if (rl[i]>=nrg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row index %D -> %D greater than maximum possible %D",i,rl[i],nrg);
2158: #endif
2159: PetscMalloc1(nrg,&idxs);
2160: /* map from [0,nrl) to row */
2161: for (i=0;i<nrl;i++) idxs[i] = rl[i];
2162: #if defined(PETSC_USE_DEBUG)
2163: for (i=nrl;i<nrg;i++) idxs[i] = nrg;
2164: #else
2165: for (i=nrl;i<nrg;i++) idxs[i] = -1;
2166: #endif
2167: ISRestoreIndices(row,&rl);
2168: ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&rg);
2169: ISCreateGeneral(PetscObjectComm((PetscObject)A),nrg,idxs,PETSC_OWN_POINTER,&is);
2170: ISLocalToGlobalMappingCreateIS(is,&rl2g);
2171: ISDestroy(&is);
2172: /* compute new l2g map for columns */
2173: if (col != row || A->rmap->mapping != A->cmap->mapping) {
2174: const PetscInt *cg,*cl;
2175: PetscInt ncg;
2176: PetscInt ncl;
2178: ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&cg);
2179: ISGetLocalSize(col,&ncl);
2180: ISGetIndices(col,&cl);
2181: ISLocalToGlobalMappingGetSize(A->cmap->mapping,&ncg);
2182: #if defined(PETSC_USE_DEBUG)
2183: for (i=0;i<ncl;i++) if (cl[i]>=ncg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local column index %D -> %D greater than maximum possible %D",i,cl[i],ncg);
2184: #endif
2185: PetscMalloc1(ncg,&idxs);
2186: /* map from [0,ncl) to col */
2187: for (i=0;i<ncl;i++) idxs[i] = cl[i];
2188: #if defined(PETSC_USE_DEBUG)
2189: for (i=ncl;i<ncg;i++) idxs[i] = ncg;
2190: #else
2191: for (i=ncl;i<ncg;i++) idxs[i] = -1;
2192: #endif
2193: ISRestoreIndices(col,&cl);
2194: ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&cg);
2195: ISCreateGeneral(PetscObjectComm((PetscObject)A),ncg,idxs,PETSC_OWN_POINTER,&is);
2196: ISLocalToGlobalMappingCreateIS(is,&cl2g);
2197: ISDestroy(&is);
2198: } else {
2199: PetscObjectReference((PetscObject)rl2g);
2200: cl2g = rl2g;
2201: }
2202: /* create the MATIS submatrix */
2203: MatGetSize(A,&M,&N);
2204: MatCreate(PetscObjectComm((PetscObject)A),submat);
2205: MatSetSizes(*submat,PETSC_DECIDE,PETSC_DECIDE,M,N);
2206: MatSetType(*submat,MATIS);
2207: matis = (Mat_IS*)((*submat)->data);
2208: matis->islocalref = PETSC_TRUE;
2209: MatSetLocalToGlobalMapping(*submat,rl2g,cl2g);
2210: MatISGetLocalMat(A,&lA);
2211: MatISSetLocalMat(*submat,lA);
2212: MatSetUp(*submat);
2213: MatAssemblyBegin(*submat,MAT_FINAL_ASSEMBLY);
2214: MatAssemblyEnd(*submat,MAT_FINAL_ASSEMBLY);
2215: ISLocalToGlobalMappingDestroy(&rl2g);
2216: ISLocalToGlobalMappingDestroy(&cl2g);
2217: /* remove unsupported ops */
2218: PetscMemzero((*submat)->ops,sizeof(struct _MatOps));
2219: (*submat)->ops->destroy = MatDestroy_IS;
2220: (*submat)->ops->setvalueslocal = MatSetValuesLocal_SubMat_IS;
2221: (*submat)->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_SubMat_IS;
2222: (*submat)->ops->assemblybegin = MatAssemblyBegin_IS;
2223: (*submat)->ops->assemblyend = MatAssemblyEnd_IS;
2224: return(0);
2225: }
2227: static PetscErrorCode MatSetFromOptions_IS(PetscOptionItems *PetscOptionsObject, Mat A)
2228: {
2229: Mat_IS *a = (Mat_IS*)A->data;
2233: PetscOptionsHead(PetscOptionsObject,"MATIS options");
2234: PetscObjectOptionsBegin((PetscObject)A);
2235: PetscOptionsBool("-matis_fixempty","Fix local matrices in case of empty local rows/columns",NULL,a->locempty,&a->locempty,NULL);
2236: PetscOptionsEnd();
2237: return(0);
2238: }
2240: /*@
2241: MatCreateIS - Creates a "process" unassembled matrix, assembled on each
2242: process but not across processes.
2244: Input Parameters:
2245: + comm - MPI communicator that will share the matrix
2246: . bs - block size of the matrix
2247: . m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
2248: . rmap - local to global map for rows
2249: - cmap - local to global map for cols
2251: Output Parameter:
2252: . A - the resulting matrix
2254: Level: advanced
2256: Notes: See MATIS for more details.
2257: m and n are NOT related to the size of the map; they represent the size of the local parts of the vectors
2258: used in MatMult operations. The sizes of rmap and cmap define the size of the local matrices.
2259: If either rmap or cmap are NULL, then the matrix is assumed to be square.
2261: .seealso: MATIS, MatSetLocalToGlobalMapping()
2262: @*/
2263: PetscErrorCode MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A)
2264: {
2268: if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mappings");
2269: MatCreate(comm,A);
2270: MatSetSizes(*A,m,n,M,N);
2271: if (bs > 0) {
2272: MatSetBlockSize(*A,bs);
2273: }
2274: MatSetType(*A,MATIS);
2275: if (rmap && cmap) {
2276: MatSetLocalToGlobalMapping(*A,rmap,cmap);
2277: } else if (!rmap) {
2278: MatSetLocalToGlobalMapping(*A,cmap,cmap);
2279: } else {
2280: MatSetLocalToGlobalMapping(*A,rmap,rmap);
2281: }
2282: return(0);
2283: }
2285: /*MC
2286: MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition methods (e.g. PCBDDC or KSPFETIDP).
2287: This stores the matrices in globally unassembled form. Each processor
2288: assembles only its local Neumann problem and the parallel matrix vector
2289: product is handled "implicitly".
2291: Operations Provided:
2292: + MatMult()
2293: . MatMultAdd()
2294: . MatMultTranspose()
2295: . MatMultTransposeAdd()
2296: . MatZeroEntries()
2297: . MatSetOption()
2298: . MatZeroRows()
2299: . MatSetValues()
2300: . MatSetValuesBlocked()
2301: . MatSetValuesLocal()
2302: . MatSetValuesBlockedLocal()
2303: . MatScale()
2304: . MatGetDiagonal()
2305: . MatMissingDiagonal()
2306: . MatDuplicate()
2307: . MatCopy()
2308: . MatAXPY()
2309: . MatCreateSubMatrix()
2310: . MatGetLocalSubMatrix()
2311: . MatTranspose()
2312: - MatSetLocalToGlobalMapping()
2314: Options Database Keys:
2315: . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
2317: Notes: Options prefix for the inner matrix are given by -is_mat_xxx
2319: You must call MatSetLocalToGlobalMapping() before using this matrix type.
2321: You can do matrix preallocation on the local matrix after you obtain it with
2322: MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()
2324: Level: advanced
2326: .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), MatCreateIS(), PCBDDC, KSPFETIDP
2328: M*/
2330: PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
2331: {
2333: Mat_IS *b;
2336: PetscNewLog(A,&b);
2337: A->data = (void*)b;
2339: /* matrix ops */
2340: PetscMemzero(A->ops,sizeof(struct _MatOps));
2341: A->ops->mult = MatMult_IS;
2342: A->ops->multadd = MatMultAdd_IS;
2343: A->ops->multtranspose = MatMultTranspose_IS;
2344: A->ops->multtransposeadd = MatMultTransposeAdd_IS;
2345: A->ops->destroy = MatDestroy_IS;
2346: A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
2347: A->ops->setvalues = MatSetValues_IS;
2348: A->ops->setvaluesblocked = MatSetValuesBlocked_IS;
2349: A->ops->setvalueslocal = MatSetValuesLocal_IS;
2350: A->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_IS;
2351: A->ops->zerorows = MatZeroRows_IS;
2352: A->ops->zerorowscolumns = MatZeroRowsColumns_IS;
2353: A->ops->assemblybegin = MatAssemblyBegin_IS;
2354: A->ops->assemblyend = MatAssemblyEnd_IS;
2355: A->ops->view = MatView_IS;
2356: A->ops->zeroentries = MatZeroEntries_IS;
2357: A->ops->scale = MatScale_IS;
2358: A->ops->getdiagonal = MatGetDiagonal_IS;
2359: A->ops->setoption = MatSetOption_IS;
2360: A->ops->ishermitian = MatIsHermitian_IS;
2361: A->ops->issymmetric = MatIsSymmetric_IS;
2362: A->ops->isstructurallysymmetric = MatIsStructurallySymmetric_IS;
2363: A->ops->duplicate = MatDuplicate_IS;
2364: A->ops->missingdiagonal = MatMissingDiagonal_IS;
2365: A->ops->copy = MatCopy_IS;
2366: A->ops->getlocalsubmatrix = MatGetLocalSubMatrix_IS;
2367: A->ops->createsubmatrix = MatCreateSubMatrix_IS;
2368: A->ops->axpy = MatAXPY_IS;
2369: A->ops->diagonalset = MatDiagonalSet_IS;
2370: A->ops->shift = MatShift_IS;
2371: A->ops->transpose = MatTranspose_IS;
2372: A->ops->getinfo = MatGetInfo_IS;
2373: A->ops->diagonalscale = MatDiagonalScale_IS;
2374: A->ops->setfromoptions = MatSetFromOptions_IS;
2376: /* special MATIS functions */
2377: PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);
2378: PetscObjectComposeFunction((PetscObject)A,"MatISRestoreLocalMat_C",MatISRestoreLocalMat_IS);
2379: PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);
2380: PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);
2381: PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);
2382: PetscObjectComposeFunction((PetscObject)A,"MatISSetUpSF_C",MatISSetUpSF_IS);
2383: PetscObjectChangeTypeName((PetscObject)A,MATIS);
2384: return(0);
2385: }