Actual source code: matnest.c
petsc-3.8.4 2018-03-24
2: #include <../src/mat/impls/nest/matnestimpl.h>
3: #include <../src/mat/impls/aij/seq/aij.h>
4: #include <petscsf.h>
6: static PetscErrorCode MatSetUp_NestIS_Private(Mat,PetscInt,const IS[],PetscInt,const IS[]);
7: static PetscErrorCode MatCreateVecs_Nest(Mat A,Vec *right,Vec *left);
8: PETSC_INTERN PetscErrorCode MatConvert_Nest_IS(Mat,MatType,MatReuse,Mat*);
10: /* private functions */
11: static PetscErrorCode MatNestGetSizes_Private(Mat A,PetscInt *m,PetscInt *n,PetscInt *M,PetscInt *N)
12: {
13: Mat_Nest *bA = (Mat_Nest*)A->data;
14: PetscInt i,j;
18: *m = *n = *M = *N = 0;
19: for (i=0; i<bA->nr; i++) { /* rows */
20: PetscInt sm,sM;
21: ISGetLocalSize(bA->isglobal.row[i],&sm);
22: ISGetSize(bA->isglobal.row[i],&sM);
23: *m += sm;
24: *M += sM;
25: }
26: for (j=0; j<bA->nc; j++) { /* cols */
27: PetscInt sn,sN;
28: ISGetLocalSize(bA->isglobal.col[j],&sn);
29: ISGetSize(bA->isglobal.col[j],&sN);
30: *n += sn;
31: *N += sN;
32: }
33: return(0);
34: }
36: /* operations */
37: static PetscErrorCode MatMult_Nest(Mat A,Vec x,Vec y)
38: {
39: Mat_Nest *bA = (Mat_Nest*)A->data;
40: Vec *bx = bA->right,*by = bA->left;
41: PetscInt i,j,nr = bA->nr,nc = bA->nc;
45: for (i=0; i<nr; i++) {VecGetSubVector(y,bA->isglobal.row[i],&by[i]);}
46: for (i=0; i<nc; i++) {VecGetSubVector(x,bA->isglobal.col[i],&bx[i]);}
47: for (i=0; i<nr; i++) {
48: VecZeroEntries(by[i]);
49: for (j=0; j<nc; j++) {
50: if (!bA->m[i][j]) continue;
51: /* y[i] <- y[i] + A[i][j] * x[j] */
52: MatMultAdd(bA->m[i][j],bx[j],by[i],by[i]);
53: }
54: }
55: for (i=0; i<nr; i++) {VecRestoreSubVector(y,bA->isglobal.row[i],&by[i]);}
56: for (i=0; i<nc; i++) {VecRestoreSubVector(x,bA->isglobal.col[i],&bx[i]);}
57: return(0);
58: }
60: static PetscErrorCode MatMultAdd_Nest(Mat A,Vec x,Vec y,Vec z)
61: {
62: Mat_Nest *bA = (Mat_Nest*)A->data;
63: Vec *bx = bA->right,*bz = bA->left;
64: PetscInt i,j,nr = bA->nr,nc = bA->nc;
68: for (i=0; i<nr; i++) {VecGetSubVector(z,bA->isglobal.row[i],&bz[i]);}
69: for (i=0; i<nc; i++) {VecGetSubVector(x,bA->isglobal.col[i],&bx[i]);}
70: for (i=0; i<nr; i++) {
71: if (y != z) {
72: Vec by;
73: VecGetSubVector(y,bA->isglobal.row[i],&by);
74: VecCopy(by,bz[i]);
75: VecRestoreSubVector(y,bA->isglobal.row[i],&by);
76: }
77: for (j=0; j<nc; j++) {
78: if (!bA->m[i][j]) continue;
79: /* y[i] <- y[i] + A[i][j] * x[j] */
80: MatMultAdd(bA->m[i][j],bx[j],bz[i],bz[i]);
81: }
82: }
83: for (i=0; i<nr; i++) {VecRestoreSubVector(z,bA->isglobal.row[i],&bz[i]);}
84: for (i=0; i<nc; i++) {VecRestoreSubVector(x,bA->isglobal.col[i],&bx[i]);}
85: return(0);
86: }
88: static PetscErrorCode MatMultTranspose_Nest(Mat A,Vec x,Vec y)
89: {
90: Mat_Nest *bA = (Mat_Nest*)A->data;
91: Vec *bx = bA->left,*by = bA->right;
92: PetscInt i,j,nr = bA->nr,nc = bA->nc;
96: for (i=0; i<nr; i++) {VecGetSubVector(x,bA->isglobal.row[i],&bx[i]);}
97: for (i=0; i<nc; i++) {VecGetSubVector(y,bA->isglobal.col[i],&by[i]);}
98: for (j=0; j<nc; j++) {
99: VecZeroEntries(by[j]);
100: for (i=0; i<nr; i++) {
101: if (!bA->m[i][j]) continue;
102: /* y[j] <- y[j] + (A[i][j])^T * x[i] */
103: MatMultTransposeAdd(bA->m[i][j],bx[i],by[j],by[j]);
104: }
105: }
106: for (i=0; i<nr; i++) {VecRestoreSubVector(x,bA->isglobal.row[i],&bx[i]);}
107: for (i=0; i<nc; i++) {VecRestoreSubVector(y,bA->isglobal.col[i],&by[i]);}
108: return(0);
109: }
111: static PetscErrorCode MatMultTransposeAdd_Nest(Mat A,Vec x,Vec y,Vec z)
112: {
113: Mat_Nest *bA = (Mat_Nest*)A->data;
114: Vec *bx = bA->left,*bz = bA->right;
115: PetscInt i,j,nr = bA->nr,nc = bA->nc;
119: for (i=0; i<nr; i++) {VecGetSubVector(x,bA->isglobal.row[i],&bx[i]);}
120: for (i=0; i<nc; i++) {VecGetSubVector(z,bA->isglobal.col[i],&bz[i]);}
121: for (j=0; j<nc; j++) {
122: if (y != z) {
123: Vec by;
124: VecGetSubVector(y,bA->isglobal.col[j],&by);
125: VecCopy(by,bz[j]);
126: VecRestoreSubVector(y,bA->isglobal.col[j],&by);
127: }
128: for (i=0; i<nr; i++) {
129: if (!bA->m[i][j]) continue;
130: /* z[j] <- y[j] + (A[i][j])^T * x[i] */
131: MatMultTransposeAdd(bA->m[i][j],bx[i],bz[j],bz[j]);
132: }
133: }
134: for (i=0; i<nr; i++) {VecRestoreSubVector(x,bA->isglobal.row[i],&bx[i]);}
135: for (i=0; i<nc; i++) {VecRestoreSubVector(z,bA->isglobal.col[i],&bz[i]);}
136: return(0);
137: }
139: static PetscErrorCode MatTranspose_Nest(Mat A,MatReuse reuse,Mat *B)
140: {
141: Mat_Nest *bA = (Mat_Nest*)A->data, *bC;
142: Mat C;
143: PetscInt i,j,nr = bA->nr,nc = bA->nc;
147: if (reuse == MAT_INPLACE_MATRIX && nr != nc) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Square nested matrix only for in-place");
149: if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_INPLACE_MATRIX) {
150: Mat *subs;
151: IS *is_row,*is_col;
153: PetscCalloc1(nr * nc,&subs);
154: PetscMalloc2(nr,&is_row,nc,&is_col);
155: MatNestGetISs(A,is_row,is_col);
156: if (reuse == MAT_INPLACE_MATRIX) {
157: for (i=0; i<nr; i++) {
158: for (j=0; j<nc; j++) {
159: subs[i + nr * j] = bA->m[i][j];
160: }
161: }
162: }
164: MatCreateNest(PetscObjectComm((PetscObject)A),nc,is_col,nr,is_row,subs,&C);
165: PetscFree(subs);
166: PetscFree2(is_row,is_col);
167: } else {
168: C = *B;
169: }
171: bC = (Mat_Nest*)C->data;
172: for (i=0; i<nr; i++) {
173: for (j=0; j<nc; j++) {
174: if (bA->m[i][j]) {
175: MatTranspose(bA->m[i][j], reuse, &(bC->m[j][i]));
176: } else {
177: bC->m[j][i] = NULL;
178: }
179: }
180: }
182: if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_REUSE_MATRIX) {
183: *B = C;
184: } else {
185: MatHeaderMerge(A, &C);
186: }
187: return(0);
188: }
190: static PetscErrorCode MatNestDestroyISList(PetscInt n,IS **list)
191: {
193: IS *lst = *list;
194: PetscInt i;
197: if (!lst) return(0);
198: for (i=0; i<n; i++) if (lst[i]) {ISDestroy(&lst[i]);}
199: PetscFree(lst);
200: *list = NULL;
201: return(0);
202: }
204: static PetscErrorCode MatDestroy_Nest(Mat A)
205: {
206: Mat_Nest *vs = (Mat_Nest*)A->data;
207: PetscInt i,j;
211: /* release the matrices and the place holders */
212: MatNestDestroyISList(vs->nr,&vs->isglobal.row);
213: MatNestDestroyISList(vs->nc,&vs->isglobal.col);
214: MatNestDestroyISList(vs->nr,&vs->islocal.row);
215: MatNestDestroyISList(vs->nc,&vs->islocal.col);
217: PetscFree(vs->row_len);
218: PetscFree(vs->col_len);
220: PetscFree2(vs->left,vs->right);
222: /* release the matrices and the place holders */
223: if (vs->m) {
224: for (i=0; i<vs->nr; i++) {
225: for (j=0; j<vs->nc; j++) {
226: MatDestroy(&vs->m[i][j]);
227: }
228: PetscFree(vs->m[i]);
229: }
230: PetscFree(vs->m);
231: }
232: PetscFree(A->data);
234: PetscObjectComposeFunction((PetscObject)A,"MatNestGetSubMat_C",0);
235: PetscObjectComposeFunction((PetscObject)A,"MatNestSetSubMat_C",0);
236: PetscObjectComposeFunction((PetscObject)A,"MatNestGetSubMats_C",0);
237: PetscObjectComposeFunction((PetscObject)A,"MatNestGetSize_C",0);
238: PetscObjectComposeFunction((PetscObject)A,"MatNestGetISs_C",0);
239: PetscObjectComposeFunction((PetscObject)A,"MatNestGetLocalISs_C",0);
240: PetscObjectComposeFunction((PetscObject)A,"MatNestSetVecType_C",0);
241: PetscObjectComposeFunction((PetscObject)A,"MatNestSetSubMats_C",0);
242: PetscObjectComposeFunction((PetscObject)A,"MatConvert_nest_mpiaij_C",0);
243: PetscObjectComposeFunction((PetscObject)A,"MatConvert_nest_seqaij_C",0);
244: PetscObjectComposeFunction((PetscObject)A,"MatConvert_nest_aij_C",0);
245: PetscObjectComposeFunction((PetscObject)A,"MatConvert_nest_is_C",0);
246: return(0);
247: }
249: static PetscErrorCode MatAssemblyBegin_Nest(Mat A,MatAssemblyType type)
250: {
251: Mat_Nest *vs = (Mat_Nest*)A->data;
252: PetscInt i,j;
256: for (i=0; i<vs->nr; i++) {
257: for (j=0; j<vs->nc; j++) {
258: if (vs->m[i][j]) {
259: MatAssemblyBegin(vs->m[i][j],type);
260: if (!vs->splitassembly) {
261: /* Note: split assembly will fail if the same block appears more than once (even indirectly through a nested
262: * sub-block). This could be fixed by adding a flag to Mat so that there was a way to check if a Mat was
263: * already performing an assembly, but the result would by more complicated and appears to offer less
264: * potential for diagnostics and correctness checking. Split assembly should be fixed once there is an
265: * interface for libraries to make asynchronous progress in "user-defined non-blocking collectives".
266: */
267: MatAssemblyEnd(vs->m[i][j],type);
268: }
269: }
270: }
271: }
272: return(0);
273: }
275: static PetscErrorCode MatAssemblyEnd_Nest(Mat A, MatAssemblyType type)
276: {
277: Mat_Nest *vs = (Mat_Nest*)A->data;
278: PetscInt i,j;
282: for (i=0; i<vs->nr; i++) {
283: for (j=0; j<vs->nc; j++) {
284: if (vs->m[i][j]) {
285: if (vs->splitassembly) {
286: MatAssemblyEnd(vs->m[i][j],type);
287: }
288: }
289: }
290: }
291: return(0);
292: }
294: static PetscErrorCode MatNestFindNonzeroSubMatRow(Mat A,PetscInt row,Mat *B)
295: {
297: Mat_Nest *vs = (Mat_Nest*)A->data;
298: PetscInt j;
299: Mat sub;
302: sub = (row < vs->nc) ? vs->m[row][row] : (Mat)NULL; /* Prefer to find on the diagonal */
303: for (j=0; !sub && j<vs->nc; j++) sub = vs->m[row][j];
304: if (sub) {MatSetUp(sub);} /* Ensure that the sizes are available */
305: *B = sub;
306: return(0);
307: }
309: static PetscErrorCode MatNestFindNonzeroSubMatCol(Mat A,PetscInt col,Mat *B)
310: {
312: Mat_Nest *vs = (Mat_Nest*)A->data;
313: PetscInt i;
314: Mat sub;
317: sub = (col < vs->nr) ? vs->m[col][col] : (Mat)NULL; /* Prefer to find on the diagonal */
318: for (i=0; !sub && i<vs->nr; i++) sub = vs->m[i][col];
319: if (sub) {MatSetUp(sub);} /* Ensure that the sizes are available */
320: *B = sub;
321: return(0);
322: }
324: static PetscErrorCode MatNestFindIS(Mat A,PetscInt n,const IS list[],IS is,PetscInt *found)
325: {
327: PetscInt i;
328: PetscBool flg;
334: *found = -1;
335: for (i=0; i<n; i++) {
336: if (!list[i]) continue;
337: ISEqual(list[i],is,&flg);
338: if (flg) {
339: *found = i;
340: return(0);
341: }
342: }
343: SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Could not find index set");
344: return(0);
345: }
347: /* Get a block row as a new MatNest */
348: static PetscErrorCode MatNestGetRow(Mat A,PetscInt row,Mat *B)
349: {
350: Mat_Nest *vs = (Mat_Nest*)A->data;
351: char keyname[256];
355: *B = NULL;
356: PetscSNPrintf(keyname,sizeof(keyname),"NestRow_%D",row);
357: PetscObjectQuery((PetscObject)A,keyname,(PetscObject*)B);
358: if (*B) return(0);
360: MatCreateNest(PetscObjectComm((PetscObject)A),1,NULL,vs->nc,vs->isglobal.col,vs->m[row],B);
362: (*B)->assembled = A->assembled;
364: PetscObjectCompose((PetscObject)A,keyname,(PetscObject)*B);
365: PetscObjectDereference((PetscObject)*B); /* Leave the only remaining reference in the composition */
366: return(0);
367: }
369: static PetscErrorCode MatNestFindSubMat(Mat A,struct MatNestISPair *is,IS isrow,IS iscol,Mat *B)
370: {
371: Mat_Nest *vs = (Mat_Nest*)A->data;
373: PetscInt row,col;
374: PetscBool same,isFullCol,isFullColGlobal;
377: /* Check if full column space. This is a hack */
378: isFullCol = PETSC_FALSE;
379: PetscObjectTypeCompare((PetscObject)iscol,ISSTRIDE,&same);
380: if (same) {
381: PetscInt n,first,step,i,an,am,afirst,astep;
382: ISStrideGetInfo(iscol,&first,&step);
383: ISGetLocalSize(iscol,&n);
384: isFullCol = PETSC_TRUE;
385: for (i=0,an=A->cmap->rstart; i<vs->nc; i++) {
386: ISStrideGetInfo(is->col[i],&afirst,&astep);
387: ISGetLocalSize(is->col[i],&am);
388: if (afirst != an || astep != step) isFullCol = PETSC_FALSE;
389: an += am;
390: }
391: if (an != A->cmap->rstart+n) isFullCol = PETSC_FALSE;
392: }
393: MPIU_Allreduce(&isFullCol,&isFullColGlobal,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)iscol));
395: if (isFullColGlobal && vs->nc > 1) {
396: PetscInt row;
397: MatNestFindIS(A,vs->nr,is->row,isrow,&row);
398: MatNestGetRow(A,row,B);
399: } else {
400: MatNestFindIS(A,vs->nr,is->row,isrow,&row);
401: MatNestFindIS(A,vs->nc,is->col,iscol,&col);
402: if (!vs->m[row][col]) {
403: PetscInt lr,lc;
405: MatCreate(PetscObjectComm((PetscObject)A),&vs->m[row][col]);
406: ISGetLocalSize(vs->isglobal.row[row],&lr);
407: ISGetLocalSize(vs->isglobal.col[col],&lc);
408: MatSetSizes(vs->m[row][col],lr,lc,PETSC_DECIDE,PETSC_DECIDE);
409: MatSetUp(vs->m[row][col]);
410: MatAssemblyBegin(vs->m[row][col],MAT_FINAL_ASSEMBLY);
411: MatAssemblyEnd(vs->m[row][col],MAT_FINAL_ASSEMBLY);
412: }
413: *B = vs->m[row][col];
414: }
415: return(0);
416: }
418: static PetscErrorCode MatCreateSubMatrix_Nest(Mat A,IS isrow,IS iscol,MatReuse reuse,Mat *B)
419: {
421: Mat_Nest *vs = (Mat_Nest*)A->data;
422: Mat sub;
425: MatNestFindSubMat(A,&vs->isglobal,isrow,iscol,&sub);
426: switch (reuse) {
427: case MAT_INITIAL_MATRIX:
428: if (sub) { PetscObjectReference((PetscObject)sub); }
429: *B = sub;
430: break;
431: case MAT_REUSE_MATRIX:
432: if (sub != *B) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Submatrix was not used before in this call");
433: break;
434: case MAT_IGNORE_MATRIX: /* Nothing to do */
435: break;
436: case MAT_INPLACE_MATRIX: /* Nothing to do */
437: SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_INPLACE_MATRIX is not supported yet");
438: break;
439: }
440: return(0);
441: }
443: PetscErrorCode MatGetLocalSubMatrix_Nest(Mat A,IS isrow,IS iscol,Mat *B)
444: {
446: Mat_Nest *vs = (Mat_Nest*)A->data;
447: Mat sub;
450: MatNestFindSubMat(A,&vs->islocal,isrow,iscol,&sub);
451: /* We allow the submatrix to be NULL, perhaps it would be better for the user to return an empty matrix instead */
452: if (sub) {PetscObjectReference((PetscObject)sub);}
453: *B = sub;
454: return(0);
455: }
457: static PetscErrorCode MatRestoreLocalSubMatrix_Nest(Mat A,IS isrow,IS iscol,Mat *B)
458: {
460: Mat_Nest *vs = (Mat_Nest*)A->data;
461: Mat sub;
464: MatNestFindSubMat(A,&vs->islocal,isrow,iscol,&sub);
465: if (*B != sub) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Local submatrix has not been gotten");
466: if (sub) {
467: if (((PetscObject)sub)->refct <= 1) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Local submatrix has had reference count decremented too many times");
468: MatDestroy(B);
469: }
470: return(0);
471: }
473: static PetscErrorCode MatGetDiagonal_Nest(Mat A,Vec v)
474: {
475: Mat_Nest *bA = (Mat_Nest*)A->data;
476: PetscInt i;
480: for (i=0; i<bA->nr; i++) {
481: Vec bv;
482: VecGetSubVector(v,bA->isglobal.row[i],&bv);
483: if (bA->m[i][i]) {
484: MatGetDiagonal(bA->m[i][i],bv);
485: } else {
486: VecSet(bv,0.0);
487: }
488: VecRestoreSubVector(v,bA->isglobal.row[i],&bv);
489: }
490: return(0);
491: }
493: static PetscErrorCode MatDiagonalScale_Nest(Mat A,Vec l,Vec r)
494: {
495: Mat_Nest *bA = (Mat_Nest*)A->data;
496: Vec bl,*br;
497: PetscInt i,j;
501: PetscCalloc1(bA->nc,&br);
502: if (r) {
503: for (j=0; j<bA->nc; j++) {VecGetSubVector(r,bA->isglobal.col[j],&br[j]);}
504: }
505: bl = NULL;
506: for (i=0; i<bA->nr; i++) {
507: if (l) {
508: VecGetSubVector(l,bA->isglobal.row[i],&bl);
509: }
510: for (j=0; j<bA->nc; j++) {
511: if (bA->m[i][j]) {
512: MatDiagonalScale(bA->m[i][j],bl,br[j]);
513: }
514: }
515: if (l) {
516: VecRestoreSubVector(l,bA->isglobal.row[i],&bl);
517: }
518: }
519: if (r) {
520: for (j=0; j<bA->nc; j++) {VecRestoreSubVector(r,bA->isglobal.col[j],&br[j]);}
521: }
522: PetscFree(br);
523: return(0);
524: }
526: static PetscErrorCode MatScale_Nest(Mat A,PetscScalar a)
527: {
528: Mat_Nest *bA = (Mat_Nest*)A->data;
529: PetscInt i,j;
533: for (i=0; i<bA->nr; i++) {
534: for (j=0; j<bA->nc; j++) {
535: if (bA->m[i][j]) {
536: MatScale(bA->m[i][j],a);
537: }
538: }
539: }
540: return(0);
541: }
543: static PetscErrorCode MatShift_Nest(Mat A,PetscScalar a)
544: {
545: Mat_Nest *bA = (Mat_Nest*)A->data;
546: PetscInt i;
550: for (i=0; i<bA->nr; i++) {
551: if (!bA->m[i][i]) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for shifting an empty diagonal block, insert a matrix in block (%D,%D)",i,i);
552: MatShift(bA->m[i][i],a);
553: }
554: return(0);
555: }
557: static PetscErrorCode MatDiagonalSet_Nest(Mat A,Vec D,InsertMode is)
558: {
559: Mat_Nest *bA = (Mat_Nest*)A->data;
560: PetscInt i;
564: for (i=0; i<bA->nr; i++) {
565: Vec bv;
566: VecGetSubVector(D,bA->isglobal.row[i],&bv);
567: if (bA->m[i][i]) {
568: MatDiagonalSet(bA->m[i][i],bv,is);
569: }
570: VecRestoreSubVector(D,bA->isglobal.row[i],&bv);
571: }
572: return(0);
573: }
575: static PetscErrorCode MatSetRandom_Nest(Mat A,PetscRandom rctx)
576: {
577: Mat_Nest *bA = (Mat_Nest*)A->data;
578: PetscInt i,j;
582: for (i=0; i<bA->nr; i++) {
583: for (j=0; j<bA->nc; j++) {
584: if (bA->m[i][j]) {
585: MatSetRandom(bA->m[i][j],rctx);
586: }
587: }
588: }
589: return(0);
590: }
592: static PetscErrorCode MatCreateVecs_Nest(Mat A,Vec *right,Vec *left)
593: {
594: Mat_Nest *bA = (Mat_Nest*)A->data;
595: Vec *L,*R;
596: MPI_Comm comm;
597: PetscInt i,j;
601: PetscObjectGetComm((PetscObject)A,&comm);
602: if (right) {
603: /* allocate R */
604: PetscMalloc1(bA->nc, &R);
605: /* Create the right vectors */
606: for (j=0; j<bA->nc; j++) {
607: for (i=0; i<bA->nr; i++) {
608: if (bA->m[i][j]) {
609: MatCreateVecs(bA->m[i][j],&R[j],NULL);
610: break;
611: }
612: }
613: if (i==bA->nr) SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Mat(Nest) contains a null column.");
614: }
615: VecCreateNest(comm,bA->nc,bA->isglobal.col,R,right);
616: /* hand back control to the nest vector */
617: for (j=0; j<bA->nc; j++) {
618: VecDestroy(&R[j]);
619: }
620: PetscFree(R);
621: }
623: if (left) {
624: /* allocate L */
625: PetscMalloc1(bA->nr, &L);
626: /* Create the left vectors */
627: for (i=0; i<bA->nr; i++) {
628: for (j=0; j<bA->nc; j++) {
629: if (bA->m[i][j]) {
630: MatCreateVecs(bA->m[i][j],NULL,&L[i]);
631: break;
632: }
633: }
634: if (j==bA->nc) SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Mat(Nest) contains a null row.");
635: }
637: VecCreateNest(comm,bA->nr,bA->isglobal.row,L,left);
638: for (i=0; i<bA->nr; i++) {
639: VecDestroy(&L[i]);
640: }
642: PetscFree(L);
643: }
644: return(0);
645: }
647: static PetscErrorCode MatView_Nest(Mat A,PetscViewer viewer)
648: {
649: Mat_Nest *bA = (Mat_Nest*)A->data;
650: PetscBool isascii;
651: PetscInt i,j;
655: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
656: if (isascii) {
658: PetscViewerASCIIPrintf(viewer,"Matrix object: \n");
659: PetscViewerASCIIPushTab(viewer);
660: PetscViewerASCIIPrintf(viewer, "type=nest, rows=%D, cols=%D \n",bA->nr,bA->nc);
662: PetscViewerASCIIPrintf(viewer,"MatNest structure: \n");
663: for (i=0; i<bA->nr; i++) {
664: for (j=0; j<bA->nc; j++) {
665: MatType type;
666: char name[256] = "",prefix[256] = "";
667: PetscInt NR,NC;
668: PetscBool isNest = PETSC_FALSE;
670: if (!bA->m[i][j]) {
671: PetscViewerASCIIPrintf(viewer, "(%D,%D) : NULL \n",i,j);
672: continue;
673: }
674: MatGetSize(bA->m[i][j],&NR,&NC);
675: MatGetType(bA->m[i][j], &type);
676: if (((PetscObject)bA->m[i][j])->name) {PetscSNPrintf(name,sizeof(name),"name=\"%s\", ",((PetscObject)bA->m[i][j])->name);}
677: if (((PetscObject)bA->m[i][j])->prefix) {PetscSNPrintf(prefix,sizeof(prefix),"prefix=\"%s\", ",((PetscObject)bA->m[i][j])->prefix);}
678: PetscObjectTypeCompare((PetscObject)bA->m[i][j],MATNEST,&isNest);
680: PetscViewerASCIIPrintf(viewer,"(%D,%D) : %s%stype=%s, rows=%D, cols=%D \n",i,j,name,prefix,type,NR,NC);
682: if (isNest) {
683: PetscViewerASCIIPushTab(viewer); /* push1 */
684: MatView(bA->m[i][j],viewer);
685: PetscViewerASCIIPopTab(viewer); /* pop1 */
686: }
687: }
688: }
689: PetscViewerASCIIPopTab(viewer); /* pop0 */
690: }
691: return(0);
692: }
694: static PetscErrorCode MatZeroEntries_Nest(Mat A)
695: {
696: Mat_Nest *bA = (Mat_Nest*)A->data;
697: PetscInt i,j;
701: for (i=0; i<bA->nr; i++) {
702: for (j=0; j<bA->nc; j++) {
703: if (!bA->m[i][j]) continue;
704: MatZeroEntries(bA->m[i][j]);
705: }
706: }
707: return(0);
708: }
710: static PetscErrorCode MatCopy_Nest(Mat A,Mat B,MatStructure str)
711: {
712: Mat_Nest *bA = (Mat_Nest*)A->data,*bB = (Mat_Nest*)B->data;
713: PetscInt i,j,nr = bA->nr,nc = bA->nc;
717: if (nr != bB->nr || nc != bB->nc) SETERRQ4(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Cannot copy a Mat_Nest of block size (%D,%D) to a Mat_Nest of block size (%D,%D)",bB->nr,bB->nc,nr,nc);
718: for (i=0; i<nr; i++) {
719: for (j=0; j<nc; j++) {
720: if (bA->m[i][j] && bB->m[i][j]) {
721: MatCopy(bA->m[i][j],bB->m[i][j],str);
722: } else if (bA->m[i][j] || bB->m[i][j]) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Matrix block does not exist at %D,%D",i,j);
723: }
724: }
725: PetscObjectStateIncrease((PetscObject)B);
726: return(0);
727: }
729: static PetscErrorCode MatDuplicate_Nest(Mat A,MatDuplicateOption op,Mat *B)
730: {
731: Mat_Nest *bA = (Mat_Nest*)A->data;
732: Mat *b;
733: PetscInt i,j,nr = bA->nr,nc = bA->nc;
737: PetscMalloc1(nr*nc,&b);
738: for (i=0; i<nr; i++) {
739: for (j=0; j<nc; j++) {
740: if (bA->m[i][j]) {
741: MatDuplicate(bA->m[i][j],op,&b[i*nc+j]);
742: } else {
743: b[i*nc+j] = NULL;
744: }
745: }
746: }
747: MatCreateNest(PetscObjectComm((PetscObject)A),nr,bA->isglobal.row,nc,bA->isglobal.col,b,B);
748: /* Give the new MatNest exclusive ownership */
749: for (i=0; i<nr*nc; i++) {
750: MatDestroy(&b[i]);
751: }
752: PetscFree(b);
754: MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);
755: MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);
756: return(0);
757: }
759: /* nest api */
760: PetscErrorCode MatNestGetSubMat_Nest(Mat A,PetscInt idxm,PetscInt jdxm,Mat *mat)
761: {
762: Mat_Nest *bA = (Mat_Nest*)A->data;
765: if (idxm >= bA->nr) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm,bA->nr-1);
766: if (jdxm >= bA->nc) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_OUTOFRANGE,"Col too large: row %D max %D",jdxm,bA->nc-1);
767: *mat = bA->m[idxm][jdxm];
768: return(0);
769: }
771: /*@
772: MatNestGetSubMat - Returns a single, sub-matrix from a nest matrix.
774: Not collective
776: Input Parameters:
777: + A - nest matrix
778: . idxm - index of the matrix within the nest matrix
779: - jdxm - index of the matrix within the nest matrix
781: Output Parameter:
782: . sub - matrix at index idxm,jdxm within the nest matrix
784: Level: developer
786: .seealso: MatNestGetSize(), MatNestGetSubMats()
787: @*/
788: PetscErrorCode MatNestGetSubMat(Mat A,PetscInt idxm,PetscInt jdxm,Mat *sub)
789: {
793: PetscUseMethod(A,"MatNestGetSubMat_C",(Mat,PetscInt,PetscInt,Mat*),(A,idxm,jdxm,sub));
794: return(0);
795: }
797: PetscErrorCode MatNestSetSubMat_Nest(Mat A,PetscInt idxm,PetscInt jdxm,Mat mat)
798: {
799: Mat_Nest *bA = (Mat_Nest*)A->data;
800: PetscInt m,n,M,N,mi,ni,Mi,Ni;
804: if (idxm >= bA->nr) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm,bA->nr-1);
805: if (jdxm >= bA->nc) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_OUTOFRANGE,"Col too large: row %D max %D",jdxm,bA->nc-1);
806: MatGetLocalSize(mat,&m,&n);
807: MatGetSize(mat,&M,&N);
808: ISGetLocalSize(bA->isglobal.row[idxm],&mi);
809: ISGetSize(bA->isglobal.row[idxm],&Mi);
810: ISGetLocalSize(bA->isglobal.col[jdxm],&ni);
811: ISGetSize(bA->isglobal.col[jdxm],&Ni);
812: if (M != Mi || N != Ni) SETERRQ4(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_INCOMP,"Submatrix dimension (%D,%D) incompatible with nest block (%D,%D)",M,N,Mi,Ni);
813: if (m != mi || n != ni) SETERRQ4(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_INCOMP,"Submatrix local dimension (%D,%D) incompatible with nest block (%D,%D)",m,n,mi,ni);
815: PetscObjectReference((PetscObject)mat);
816: MatDestroy(&bA->m[idxm][jdxm]);
817: bA->m[idxm][jdxm] = mat;
818: return(0);
819: }
821: /*@
822: MatNestSetSubMat - Set a single submatrix in the nest matrix.
824: Logically collective on the submatrix communicator
826: Input Parameters:
827: + A - nest matrix
828: . idxm - index of the matrix within the nest matrix
829: . jdxm - index of the matrix within the nest matrix
830: - sub - matrix at index idxm,jdxm within the nest matrix
832: Notes:
833: The new submatrix must have the same size and communicator as that block of the nest.
835: This increments the reference count of the submatrix.
837: Level: developer
839: .seealso: MatNestSetSubMats(), MatNestGetSubMats()
840: @*/
841: PetscErrorCode MatNestSetSubMat(Mat A,PetscInt idxm,PetscInt jdxm,Mat sub)
842: {
846: PetscUseMethod(A,"MatNestSetSubMat_C",(Mat,PetscInt,PetscInt,Mat),(A,idxm,jdxm,sub));
847: return(0);
848: }
850: PetscErrorCode MatNestGetSubMats_Nest(Mat A,PetscInt *M,PetscInt *N,Mat ***mat)
851: {
852: Mat_Nest *bA = (Mat_Nest*)A->data;
855: if (M) *M = bA->nr;
856: if (N) *N = bA->nc;
857: if (mat) *mat = bA->m;
858: return(0);
859: }
861: /*@C
862: MatNestGetSubMats - Returns the entire two dimensional array of matrices defining a nest matrix.
864: Not collective
866: Input Parameters:
867: . A - nest matrix
869: Output Parameter:
870: + M - number of rows in the nest matrix
871: . N - number of cols in the nest matrix
872: - mat - 2d array of matrices
874: Notes:
876: The user should not free the array mat.
878: In Fortran, this routine has a calling sequence
879: $ call MatNestGetSubMats(A, M, N, mat, ierr)
880: where the space allocated for the optional argument mat is assumed large enough (if provided).
882: Level: developer
884: .seealso: MatNestGetSize(), MatNestGetSubMat()
885: @*/
886: PetscErrorCode MatNestGetSubMats(Mat A,PetscInt *M,PetscInt *N,Mat ***mat)
887: {
891: PetscUseMethod(A,"MatNestGetSubMats_C",(Mat,PetscInt*,PetscInt*,Mat***),(A,M,N,mat));
892: return(0);
893: }
895: PetscErrorCode MatNestGetSize_Nest(Mat A,PetscInt *M,PetscInt *N)
896: {
897: Mat_Nest *bA = (Mat_Nest*)A->data;
900: if (M) *M = bA->nr;
901: if (N) *N = bA->nc;
902: return(0);
903: }
905: /*@
906: MatNestGetSize - Returns the size of the nest matrix.
908: Not collective
910: Input Parameters:
911: . A - nest matrix
913: Output Parameter:
914: + M - number of rows in the nested mat
915: - N - number of cols in the nested mat
917: Notes:
919: Level: developer
921: .seealso: MatNestGetSubMat(), MatNestGetSubMats()
922: @*/
923: PetscErrorCode MatNestGetSize(Mat A,PetscInt *M,PetscInt *N)
924: {
928: PetscUseMethod(A,"MatNestGetSize_C",(Mat,PetscInt*,PetscInt*),(A,M,N));
929: return(0);
930: }
932: static PetscErrorCode MatNestGetISs_Nest(Mat A,IS rows[],IS cols[])
933: {
934: Mat_Nest *vs = (Mat_Nest*)A->data;
935: PetscInt i;
938: if (rows) for (i=0; i<vs->nr; i++) rows[i] = vs->isglobal.row[i];
939: if (cols) for (i=0; i<vs->nc; i++) cols[i] = vs->isglobal.col[i];
940: return(0);
941: }
943: /*@C
944: MatNestGetISs - Returns the index sets partitioning the row and column spaces
946: Not collective
948: Input Parameters:
949: . A - nest matrix
951: Output Parameter:
952: + rows - array of row index sets
953: - cols - array of column index sets
955: Level: advanced
957: Notes:
958: The user must have allocated arrays of the correct size. The reference count is not increased on the returned ISs.
960: .seealso: MatNestGetSubMat(), MatNestGetSubMats(), MatNestGetSize(), MatNestGetLocalISs()
961: @*/
962: PetscErrorCode MatNestGetISs(Mat A,IS rows[],IS cols[])
963: {
968: PetscUseMethod(A,"MatNestGetISs_C",(Mat,IS[],IS[]),(A,rows,cols));
969: return(0);
970: }
972: static PetscErrorCode MatNestGetLocalISs_Nest(Mat A,IS rows[],IS cols[])
973: {
974: Mat_Nest *vs = (Mat_Nest*)A->data;
975: PetscInt i;
978: if (rows) for (i=0; i<vs->nr; i++) rows[i] = vs->islocal.row[i];
979: if (cols) for (i=0; i<vs->nc; i++) cols[i] = vs->islocal.col[i];
980: return(0);
981: }
983: /*@C
984: MatNestGetLocalISs - Returns the index sets partitioning the row and column spaces
986: Not collective
988: Input Parameters:
989: . A - nest matrix
991: Output Parameter:
992: + rows - array of row index sets (or NULL to ignore)
993: - cols - array of column index sets (or NULL to ignore)
995: Level: advanced
997: Notes:
998: The user must have allocated arrays of the correct size. The reference count is not increased on the returned ISs.
1000: .seealso: MatNestGetSubMat(), MatNestGetSubMats(), MatNestGetSize(), MatNestGetISs()
1001: @*/
1002: PetscErrorCode MatNestGetLocalISs(Mat A,IS rows[],IS cols[])
1003: {
1008: PetscUseMethod(A,"MatNestGetLocalISs_C",(Mat,IS[],IS[]),(A,rows,cols));
1009: return(0);
1010: }
1012: PetscErrorCode MatNestSetVecType_Nest(Mat A,VecType vtype)
1013: {
1015: PetscBool flg;
1018: PetscStrcmp(vtype,VECNEST,&flg);
1019: /* In reality, this only distinguishes VECNEST and "other" */
1020: if (flg) A->ops->getvecs = MatCreateVecs_Nest;
1021: else A->ops->getvecs = (PetscErrorCode (*)(Mat,Vec*,Vec*)) 0;
1022: return(0);
1023: }
1025: /*@C
1026: MatNestSetVecType - Sets the type of Vec returned by MatCreateVecs()
1028: Not collective
1030: Input Parameters:
1031: + A - nest matrix
1032: - vtype - type to use for creating vectors
1034: Notes:
1036: Level: developer
1038: .seealso: MatCreateVecs()
1039: @*/
1040: PetscErrorCode MatNestSetVecType(Mat A,VecType vtype)
1041: {
1045: PetscTryMethod(A,"MatNestSetVecType_C",(Mat,VecType),(A,vtype));
1046: return(0);
1047: }
1049: PetscErrorCode MatNestSetSubMats_Nest(Mat A,PetscInt nr,const IS is_row[],PetscInt nc,const IS is_col[],const Mat a[])
1050: {
1051: Mat_Nest *s = (Mat_Nest*)A->data;
1052: PetscInt i,j,m,n,M,N;
1056: s->nr = nr;
1057: s->nc = nc;
1059: /* Create space for submatrices */
1060: PetscMalloc1(nr,&s->m);
1061: for (i=0; i<nr; i++) {
1062: PetscMalloc1(nc,&s->m[i]);
1063: }
1064: for (i=0; i<nr; i++) {
1065: for (j=0; j<nc; j++) {
1066: s->m[i][j] = a[i*nc+j];
1067: if (a[i*nc+j]) {
1068: PetscObjectReference((PetscObject)a[i*nc+j]);
1069: }
1070: }
1071: }
1073: MatSetUp_NestIS_Private(A,nr,is_row,nc,is_col);
1075: PetscMalloc1(nr,&s->row_len);
1076: PetscMalloc1(nc,&s->col_len);
1077: for (i=0; i<nr; i++) s->row_len[i]=-1;
1078: for (j=0; j<nc; j++) s->col_len[j]=-1;
1080: MatNestGetSizes_Private(A,&m,&n,&M,&N);
1082: PetscLayoutSetSize(A->rmap,M);
1083: PetscLayoutSetLocalSize(A->rmap,m);
1084: PetscLayoutSetSize(A->cmap,N);
1085: PetscLayoutSetLocalSize(A->cmap,n);
1087: PetscLayoutSetUp(A->rmap);
1088: PetscLayoutSetUp(A->cmap);
1090: PetscCalloc2(nr,&s->left,nc,&s->right);
1091: return(0);
1092: }
1094: /*@
1095: MatNestSetSubMats - Sets the nested submatrices
1097: Collective on Mat
1099: Input Parameter:
1100: + N - nested matrix
1101: . nr - number of nested row blocks
1102: . is_row - index sets for each nested row block, or NULL to make contiguous
1103: . nc - number of nested column blocks
1104: . is_col - index sets for each nested column block, or NULL to make contiguous
1105: - a - row-aligned array of nr*nc submatrices, empty submatrices can be passed using NULL
1107: Level: advanced
1109: .seealso: MatCreateNest(), MATNEST
1110: @*/
1111: PetscErrorCode MatNestSetSubMats(Mat A,PetscInt nr,const IS is_row[],PetscInt nc,const IS is_col[],const Mat a[])
1112: {
1114: PetscInt i,nr_nc;
1118: if (nr < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_OUTOFRANGE,"Number of rows cannot be negative");
1119: if (nr && is_row) {
1122: }
1123: if (nc < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_OUTOFRANGE,"Number of columns cannot be negative");
1124: if (nc && is_col) {
1127: }
1128: nr_nc=nr*nc;
1130: PetscUseMethod(A,"MatNestSetSubMats_C",(Mat,PetscInt,const IS[],PetscInt,const IS[],const Mat[]),(A,nr,is_row,nc,is_col,a));
1131: return(0);
1132: }
1134: static PetscErrorCode MatNestCreateAggregateL2G_Private(Mat A,PetscInt n,const IS islocal[],const IS isglobal[],PetscBool colflg,ISLocalToGlobalMapping *ltog)
1135: {
1137: PetscBool flg;
1138: PetscInt i,j,m,mi,*ix;
1141: for (i=0,m=0,flg=PETSC_FALSE; i<n; i++) {
1142: if (islocal[i]) {
1143: ISGetSize(islocal[i],&mi);
1144: flg = PETSC_TRUE; /* We found a non-trivial entry */
1145: } else {
1146: ISGetSize(isglobal[i],&mi);
1147: }
1148: m += mi;
1149: }
1150: if (flg) {
1151: PetscMalloc1(m,&ix);
1152: for (i=0,n=0; i<n; i++) {
1153: ISLocalToGlobalMapping smap = NULL;
1154: VecScatter scat;
1155: IS isreq;
1156: Vec lvec,gvec;
1157: union {char padding[sizeof(PetscScalar)]; PetscInt integer;} *x;
1158: Mat sub;
1160: if (sizeof(*x) != sizeof(PetscScalar)) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support when scalars smaller than integers");
1161: if (colflg) {
1162: MatNestFindNonzeroSubMatRow(A,i,&sub);
1163: } else {
1164: MatNestFindNonzeroSubMatCol(A,i,&sub);
1165: }
1166: if (sub) {MatGetLocalToGlobalMapping(sub,&smap,NULL);}
1167: if (islocal[i]) {
1168: ISGetSize(islocal[i],&mi);
1169: } else {
1170: ISGetSize(isglobal[i],&mi);
1171: }
1172: for (j=0; j<mi; j++) ix[m+j] = j;
1173: if (smap) {ISLocalToGlobalMappingApply(smap,mi,ix+m,ix+m);}
1174: /*
1175: Now we need to extract the monolithic global indices that correspond to the given split global indices.
1176: In many/most cases, we only want MatGetLocalSubMatrix() to work, in which case we only need to know the size of the local spaces.
1177: The approach here is ugly because it uses VecScatter to move indices.
1178: */
1179: VecCreateSeq(PETSC_COMM_SELF,mi,&lvec);
1180: VecCreateMPI(((PetscObject)isglobal[i])->comm,mi,PETSC_DECIDE,&gvec);
1181: ISCreateGeneral(((PetscObject)isglobal[i])->comm,mi,ix+m,PETSC_COPY_VALUES,&isreq);
1182: VecScatterCreate(gvec,isreq,lvec,NULL,&scat);
1183: VecGetArray(gvec,(PetscScalar**)&x);
1184: for (j=0; j<mi; j++) x[j].integer = ix[m+j];
1185: VecRestoreArray(gvec,(PetscScalar**)&x);
1186: VecScatterBegin(scat,gvec,lvec,INSERT_VALUES,SCATTER_FORWARD);
1187: VecScatterEnd(scat,gvec,lvec,INSERT_VALUES,SCATTER_FORWARD);
1188: VecGetArray(lvec,(PetscScalar**)&x);
1189: for (j=0; j<mi; j++) ix[m+j] = x[j].integer;
1190: VecRestoreArray(lvec,(PetscScalar**)&x);
1191: VecDestroy(&lvec);
1192: VecDestroy(&gvec);
1193: ISDestroy(&isreq);
1194: VecScatterDestroy(&scat);
1195: m += mi;
1196: }
1197: ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)A),1,m,ix,PETSC_OWN_POINTER,ltog);
1198: } else {
1199: *ltog = NULL;
1200: }
1201: return(0);
1202: }
1205: /* If an IS was provided, there is nothing Nest needs to do, otherwise Nest will build a strided IS */
1206: /*
1207: nprocessors = NP
1208: Nest x^T = ((g_0,g_1,...g_nprocs-1), (h_0,h_1,...h_NP-1))
1209: proc 0: => (g_0,h_0,)
1210: proc 1: => (g_1,h_1,)
1211: ...
1212: proc nprocs-1: => (g_NP-1,h_NP-1,)
1214: proc 0: proc 1: proc nprocs-1:
1215: is[0] = (0,1,2,...,nlocal(g_0)-1) (0,1,...,nlocal(g_1)-1) (0,1,...,nlocal(g_NP-1))
1217: proc 0:
1218: is[1] = (nlocal(g_0),nlocal(g_0)+1,...,nlocal(g_0)+nlocal(h_0)-1)
1219: proc 1:
1220: is[1] = (nlocal(g_1),nlocal(g_1)+1,...,nlocal(g_1)+nlocal(h_1)-1)
1222: proc NP-1:
1223: is[1] = (nlocal(g_NP-1),nlocal(g_NP-1)+1,...,nlocal(g_NP-1)+nlocal(h_NP-1)-1)
1224: */
1225: static PetscErrorCode MatSetUp_NestIS_Private(Mat A,PetscInt nr,const IS is_row[],PetscInt nc,const IS is_col[])
1226: {
1227: Mat_Nest *vs = (Mat_Nest*)A->data;
1228: PetscInt i,j,offset,n,nsum,bs;
1230: Mat sub = NULL;
1233: PetscMalloc1(nr,&vs->isglobal.row);
1234: PetscMalloc1(nc,&vs->isglobal.col);
1235: if (is_row) { /* valid IS is passed in */
1236: /* refs on is[] are incremeneted */
1237: for (i=0; i<vs->nr; i++) {
1238: PetscObjectReference((PetscObject)is_row[i]);
1240: vs->isglobal.row[i] = is_row[i];
1241: }
1242: } else { /* Create the ISs by inspecting sizes of a submatrix in each row */
1243: nsum = 0;
1244: for (i=0; i<vs->nr; i++) { /* Add up the local sizes to compute the aggregate offset */
1245: MatNestFindNonzeroSubMatRow(A,i,&sub);
1246: if (!sub) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"No nonzero submatrix in row %D",i);
1247: MatGetLocalSize(sub,&n,NULL);
1248: if (n < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Sizes have not yet been set for submatrix");
1249: nsum += n;
1250: }
1251: MPI_Scan(&nsum,&offset,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A));
1252: offset -= nsum;
1253: for (i=0; i<vs->nr; i++) {
1254: MatNestFindNonzeroSubMatRow(A,i,&sub);
1255: MatGetLocalSize(sub,&n,NULL);
1256: MatGetBlockSize(sub,&bs);
1257: ISCreateStride(PetscObjectComm((PetscObject)sub),n,offset,1,&vs->isglobal.row[i]);
1258: ISSetBlockSize(vs->isglobal.row[i],bs);
1259: offset += n;
1260: }
1261: }
1263: if (is_col) { /* valid IS is passed in */
1264: /* refs on is[] are incremeneted */
1265: for (j=0; j<vs->nc; j++) {
1266: PetscObjectReference((PetscObject)is_col[j]);
1268: vs->isglobal.col[j] = is_col[j];
1269: }
1270: } else { /* Create the ISs by inspecting sizes of a submatrix in each column */
1271: offset = A->cmap->rstart;
1272: nsum = 0;
1273: for (j=0; j<vs->nc; j++) {
1274: MatNestFindNonzeroSubMatCol(A,j,&sub);
1275: if (!sub) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"No nonzero submatrix in column %D",i);
1276: MatGetLocalSize(sub,NULL,&n);
1277: if (n < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Sizes have not yet been set for submatrix");
1278: nsum += n;
1279: }
1280: MPI_Scan(&nsum,&offset,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A));
1281: offset -= nsum;
1282: for (j=0; j<vs->nc; j++) {
1283: MatNestFindNonzeroSubMatCol(A,j,&sub);
1284: MatGetLocalSize(sub,NULL,&n);
1285: MatGetBlockSize(sub,&bs);
1286: ISCreateStride(PetscObjectComm((PetscObject)sub),n,offset,1,&vs->isglobal.col[j]);
1287: ISSetBlockSize(vs->isglobal.col[j],bs);
1288: offset += n;
1289: }
1290: }
1292: /* Set up the local ISs */
1293: PetscMalloc1(vs->nr,&vs->islocal.row);
1294: PetscMalloc1(vs->nc,&vs->islocal.col);
1295: for (i=0,offset=0; i<vs->nr; i++) {
1296: IS isloc;
1297: ISLocalToGlobalMapping rmap = NULL;
1298: PetscInt nlocal,bs;
1299: MatNestFindNonzeroSubMatRow(A,i,&sub);
1300: if (sub) {MatGetLocalToGlobalMapping(sub,&rmap,NULL);}
1301: if (rmap) {
1302: MatGetBlockSize(sub,&bs);
1303: ISLocalToGlobalMappingGetSize(rmap,&nlocal);
1304: ISCreateStride(PETSC_COMM_SELF,nlocal,offset,1,&isloc);
1305: ISSetBlockSize(isloc,bs);
1306: } else {
1307: nlocal = 0;
1308: isloc = NULL;
1309: }
1310: vs->islocal.row[i] = isloc;
1311: offset += nlocal;
1312: }
1313: for (i=0,offset=0; i<vs->nc; i++) {
1314: IS isloc;
1315: ISLocalToGlobalMapping cmap = NULL;
1316: PetscInt nlocal,bs;
1317: MatNestFindNonzeroSubMatCol(A,i,&sub);
1318: if (sub) {MatGetLocalToGlobalMapping(sub,NULL,&cmap);}
1319: if (cmap) {
1320: MatGetBlockSize(sub,&bs);
1321: ISLocalToGlobalMappingGetSize(cmap,&nlocal);
1322: ISCreateStride(PETSC_COMM_SELF,nlocal,offset,1,&isloc);
1323: ISSetBlockSize(isloc,bs);
1324: } else {
1325: nlocal = 0;
1326: isloc = NULL;
1327: }
1328: vs->islocal.col[i] = isloc;
1329: offset += nlocal;
1330: }
1332: /* Set up the aggregate ISLocalToGlobalMapping */
1333: {
1334: ISLocalToGlobalMapping rmap,cmap;
1335: MatNestCreateAggregateL2G_Private(A,vs->nr,vs->islocal.row,vs->isglobal.row,PETSC_FALSE,&rmap);
1336: MatNestCreateAggregateL2G_Private(A,vs->nc,vs->islocal.col,vs->isglobal.col,PETSC_TRUE,&cmap);
1337: if (rmap && cmap) {MatSetLocalToGlobalMapping(A,rmap,cmap);}
1338: ISLocalToGlobalMappingDestroy(&rmap);
1339: ISLocalToGlobalMappingDestroy(&cmap);
1340: }
1342: #if defined(PETSC_USE_DEBUG)
1343: for (i=0; i<vs->nr; i++) {
1344: for (j=0; j<vs->nc; j++) {
1345: PetscInt m,n,M,N,mi,ni,Mi,Ni;
1346: Mat B = vs->m[i][j];
1347: if (!B) continue;
1348: MatGetSize(B,&M,&N);
1349: MatGetLocalSize(B,&m,&n);
1350: ISGetSize(vs->isglobal.row[i],&Mi);
1351: ISGetSize(vs->isglobal.col[j],&Ni);
1352: ISGetLocalSize(vs->isglobal.row[i],&mi);
1353: ISGetLocalSize(vs->isglobal.col[j],&ni);
1354: if (M != Mi || N != Ni) SETERRQ6(PetscObjectComm((PetscObject)sub),PETSC_ERR_ARG_INCOMP,"Global sizes (%D,%D) of nested submatrix (%D,%D) do not agree with space defined by index sets (%D,%D)",M,N,i,j,Mi,Ni);
1355: if (m != mi || n != ni) SETERRQ6(PetscObjectComm((PetscObject)sub),PETSC_ERR_ARG_INCOMP,"Local sizes (%D,%D) of nested submatrix (%D,%D) do not agree with space defined by index sets (%D,%D)",m,n,i,j,mi,ni);
1356: }
1357: }
1358: #endif
1360: /* Set A->assembled if all non-null blocks are currently assembled */
1361: for (i=0; i<vs->nr; i++) {
1362: for (j=0; j<vs->nc; j++) {
1363: if (vs->m[i][j] && !vs->m[i][j]->assembled) return(0);
1364: }
1365: }
1366: A->assembled = PETSC_TRUE;
1367: return(0);
1368: }
1370: /*@C
1371: MatCreateNest - Creates a new matrix containing several nested submatrices, each stored separately
1373: Collective on Mat
1375: Input Parameter:
1376: + comm - Communicator for the new Mat
1377: . nr - number of nested row blocks
1378: . is_row - index sets for each nested row block, or NULL to make contiguous
1379: . nc - number of nested column blocks
1380: . is_col - index sets for each nested column block, or NULL to make contiguous
1381: - a - row-aligned array of nr*nc submatrices, empty submatrices can be passed using NULL
1383: Output Parameter:
1384: . B - new matrix
1386: Level: advanced
1388: .seealso: MatCreate(), VecCreateNest(), DMCreateMatrix(), MATNEST
1389: @*/
1390: PetscErrorCode MatCreateNest(MPI_Comm comm,PetscInt nr,const IS is_row[],PetscInt nc,const IS is_col[],const Mat a[],Mat *B)
1391: {
1392: Mat A;
1396: *B = 0;
1397: MatCreate(comm,&A);
1398: MatSetType(A,MATNEST);
1399: A->preallocated = PETSC_TRUE;
1400: MatNestSetSubMats(A,nr,is_row,nc,is_col,a);
1401: *B = A;
1402: return(0);
1403: }
1405: static PetscErrorCode MatConvert_Nest_SeqAIJ_fast(Mat A,MatType newtype,MatReuse reuse,Mat *newmat)
1406: {
1407: Mat_Nest *nest = (Mat_Nest*)A->data;
1408: Mat *trans;
1409: PetscScalar **avv;
1410: PetscScalar *vv;
1411: PetscInt **aii,**ajj;
1412: PetscInt *ii,*jj,*ci;
1413: PetscInt nr,nc,nnz,i,j;
1414: PetscBool done;
1418: MatGetSize(A,&nr,&nc);
1419: if (reuse == MAT_REUSE_MATRIX) {
1420: PetscInt rnr;
1422: MatGetRowIJ(*newmat,0,PETSC_FALSE,PETSC_FALSE,&rnr,(const PetscInt**)&ii,(const PetscInt**)&jj,&done);
1423: if (!done) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"MatGetRowIJ");
1424: if (rnr != nr) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"Cannot reuse matrix, wrong number of rows");
1425: MatSeqAIJGetArray(*newmat,&vv);
1426: }
1427: /* extract CSR for nested SeqAIJ matrices */
1428: nnz = 0;
1429: PetscCalloc4(nest->nr*nest->nc,&aii,nest->nr*nest->nc,&ajj,nest->nr*nest->nc,&avv,nest->nr*nest->nc,&trans);
1430: for (i=0; i<nest->nr; ++i) {
1431: for (j=0; j<nest->nc; ++j) {
1432: Mat B = nest->m[i][j];
1433: if (B) {
1434: PetscScalar *naa;
1435: PetscInt *nii,*njj,nnr;
1436: PetscBool istrans;
1438: PetscObjectTypeCompare((PetscObject)B,MATTRANSPOSEMAT,&istrans);
1439: if (istrans) {
1440: Mat Bt;
1442: MatTransposeGetMat(B,&Bt);
1443: MatTranspose(Bt,MAT_INITIAL_MATRIX,&trans[i*nest->nc+j]);
1444: B = trans[i*nest->nc+j];
1445: }
1446: MatGetRowIJ(B,0,PETSC_FALSE,PETSC_FALSE,&nnr,(const PetscInt**)&nii,(const PetscInt**)&njj,&done);
1447: if (!done) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_PLIB,"MatGetRowIJ");
1448: MatSeqAIJGetArray(B,&naa);
1449: nnz += nii[nnr];
1451: aii[i*nest->nc+j] = nii;
1452: ajj[i*nest->nc+j] = njj;
1453: avv[i*nest->nc+j] = naa;
1454: }
1455: }
1456: }
1457: if (reuse != MAT_REUSE_MATRIX) {
1458: PetscMalloc1(nr+1,&ii);
1459: PetscMalloc1(nnz,&jj);
1460: PetscMalloc1(nnz,&vv);
1461: } else {
1462: if (nnz != ii[nr]) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"Cannot reuse matrix, wrong number of nonzeros");
1463: }
1465: /* new row pointer */
1466: PetscMemzero(ii,(nr+1)*sizeof(PetscInt));
1467: for (i=0; i<nest->nr; ++i) {
1468: PetscInt ncr,rst;
1470: ISStrideGetInfo(nest->isglobal.row[i],&rst,NULL);
1471: ISGetLocalSize(nest->isglobal.row[i],&ncr);
1472: for (j=0; j<nest->nc; ++j) {
1473: if (aii[i*nest->nc+j]) {
1474: PetscInt *nii = aii[i*nest->nc+j];
1475: PetscInt ir;
1477: for (ir=rst; ir<ncr+rst; ++ir) {
1478: ii[ir+1] += nii[1]-nii[0];
1479: nii++;
1480: }
1481: }
1482: }
1483: }
1484: for (i=0; i<nr; i++) ii[i+1] += ii[i];
1486: /* construct CSR for the new matrix */
1487: PetscCalloc1(nr,&ci);
1488: for (i=0; i<nest->nr; ++i) {
1489: PetscInt ncr,rst;
1491: ISStrideGetInfo(nest->isglobal.row[i],&rst,NULL);
1492: ISGetLocalSize(nest->isglobal.row[i],&ncr);
1493: for (j=0; j<nest->nc; ++j) {
1494: if (aii[i*nest->nc+j]) {
1495: PetscScalar *nvv = avv[i*nest->nc+j];
1496: PetscInt *nii = aii[i*nest->nc+j];
1497: PetscInt *njj = ajj[i*nest->nc+j];
1498: PetscInt ir,cst;
1500: ISStrideGetInfo(nest->isglobal.col[j],&cst,NULL);
1501: for (ir=rst; ir<ncr+rst; ++ir) {
1502: PetscInt ij,rsize = nii[1]-nii[0],ist = ii[ir]+ci[ir];
1504: for (ij=0;ij<rsize;ij++) {
1505: jj[ist+ij] = *njj+cst;
1506: vv[ist+ij] = *nvv;
1507: njj++;
1508: nvv++;
1509: }
1510: ci[ir] += rsize;
1511: nii++;
1512: }
1513: }
1514: }
1515: }
1516: PetscFree(ci);
1518: /* restore info */
1519: for (i=0; i<nest->nr; ++i) {
1520: for (j=0; j<nest->nc; ++j) {
1521: Mat B = nest->m[i][j];
1522: if (B) {
1523: PetscInt nnr = 0, k = i*nest->nc+j;
1525: B = (trans[k] ? trans[k] : B);
1526: MatRestoreRowIJ(B,0,PETSC_FALSE,PETSC_FALSE,&nnr,(const PetscInt**)&aii[k],(const PetscInt**)&ajj[k],&done);
1527: if (!done) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_PLIB,"MatRestoreRowIJ");
1528: MatSeqAIJRestoreArray(B,&avv[k]);
1529: MatDestroy(&trans[k]);
1530: }
1531: }
1532: }
1533: PetscFree4(aii,ajj,avv,trans);
1535: /* finalize newmat */
1536: if (reuse == MAT_INITIAL_MATRIX) {
1537: MatCreateSeqAIJWithArrays(PetscObjectComm((PetscObject)A),nr,nc,ii,jj,vv,newmat);
1538: } else if (reuse == MAT_INPLACE_MATRIX) {
1539: Mat B;
1541: MatCreateSeqAIJWithArrays(PetscObjectComm((PetscObject)A),nr,nc,ii,jj,vv,&B);
1542: MatHeaderReplace(A,&B);
1543: }
1544: MatAssemblyBegin(*newmat,MAT_FINAL_ASSEMBLY);
1545: MatAssemblyEnd(*newmat,MAT_FINAL_ASSEMBLY);
1546: {
1547: Mat_SeqAIJ *a = (Mat_SeqAIJ*)((*newmat)->data);
1548: a->free_a = PETSC_TRUE;
1549: a->free_ij = PETSC_TRUE;
1550: }
1551: return(0);
1552: }
1554: PETSC_INTERN PetscErrorCode MatConvert_Nest_AIJ(Mat A,MatType newtype,MatReuse reuse,Mat *newmat)
1555: {
1557: Mat_Nest *nest = (Mat_Nest*)A->data;
1558: PetscInt m,n,M,N,i,j,k,*dnnz,*onnz,rstart;
1559: PetscInt cstart,cend;
1560: PetscMPIInt size;
1561: Mat C;
1564: MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
1565: if (size == 1) { /* look for a special case with SeqAIJ matrices and strided-1, contiguous, blocks */
1566: PetscInt nf;
1567: PetscBool fast;
1569: PetscStrcmp(newtype,MATAIJ,&fast);
1570: if (!fast) {
1571: PetscStrcmp(newtype,MATSEQAIJ,&fast);
1572: }
1573: for (i=0; i<nest->nr && fast; ++i) {
1574: for (j=0; j<nest->nc && fast; ++j) {
1575: Mat B = nest->m[i][j];
1576: if (B) {
1577: PetscObjectTypeCompare((PetscObject)B,MATSEQAIJ,&fast);
1578: if (!fast) {
1579: PetscBool istrans;
1581: PetscObjectTypeCompare((PetscObject)B,MATTRANSPOSEMAT,&istrans);
1582: if (istrans) {
1583: Mat Bt;
1585: MatTransposeGetMat(B,&Bt);
1586: PetscObjectTypeCompare((PetscObject)Bt,MATSEQAIJ,&fast);
1587: }
1588: }
1589: }
1590: }
1591: }
1592: for (i=0, nf=0; i<nest->nr && fast; ++i) {
1593: PetscObjectTypeCompare((PetscObject)nest->isglobal.row[i],ISSTRIDE,&fast);
1594: if (fast) {
1595: PetscInt f,s;
1597: ISStrideGetInfo(nest->isglobal.row[i],&f,&s);
1598: if (f != nf || s != 1) { fast = PETSC_FALSE; }
1599: else {
1600: ISGetSize(nest->isglobal.row[i],&f);
1601: nf += f;
1602: }
1603: }
1604: }
1605: for (i=0, nf=0; i<nest->nc && fast; ++i) {
1606: PetscObjectTypeCompare((PetscObject)nest->isglobal.col[i],ISSTRIDE,&fast);
1607: if (fast) {
1608: PetscInt f,s;
1610: ISStrideGetInfo(nest->isglobal.col[i],&f,&s);
1611: if (f != nf || s != 1) { fast = PETSC_FALSE; }
1612: else {
1613: ISGetSize(nest->isglobal.col[i],&f);
1614: nf += f;
1615: }
1616: }
1617: }
1618: if (fast) {
1619: MatConvert_Nest_SeqAIJ_fast(A,newtype,reuse,newmat);
1620: return(0);
1621: }
1622: }
1623: MatGetSize(A,&M,&N);
1624: MatGetLocalSize(A,&m,&n);
1625: MatGetOwnershipRangeColumn(A,&cstart,&cend);
1626: switch (reuse) {
1627: case MAT_INITIAL_MATRIX:
1628: MatCreate(PetscObjectComm((PetscObject)A),&C);
1629: MatSetType(C,newtype);
1630: MatSetSizes(C,m,n,M,N);
1631: *newmat = C;
1632: break;
1633: case MAT_REUSE_MATRIX:
1634: C = *newmat;
1635: break;
1636: default: SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MatReuse");
1637: }
1638: PetscMalloc1(2*m,&dnnz);
1639: onnz = dnnz + m;
1640: for (k=0; k<m; k++) {
1641: dnnz[k] = 0;
1642: onnz[k] = 0;
1643: }
1644: for (j=0; j<nest->nc; ++j) {
1645: IS bNis;
1646: PetscInt bN;
1647: const PetscInt *bNindices;
1648: /* Using global column indices and ISAllGather() is not scalable. */
1649: ISAllGather(nest->isglobal.col[j], &bNis);
1650: ISGetSize(bNis, &bN);
1651: ISGetIndices(bNis,&bNindices);
1652: for (i=0; i<nest->nr; ++i) {
1653: PetscSF bmsf;
1654: PetscSFNode *iremote;
1655: Mat B;
1656: PetscInt bm, *sub_dnnz,*sub_onnz, br;
1657: const PetscInt *bmindices;
1658: B = nest->m[i][j];
1659: if (!B) continue;
1660: ISGetLocalSize(nest->isglobal.row[i],&bm);
1661: ISGetIndices(nest->isglobal.row[i],&bmindices);
1662: PetscSFCreate(PetscObjectComm((PetscObject)A), &bmsf);
1663: PetscMalloc1(bm,&iremote);
1664: PetscMalloc1(bm,&sub_dnnz);
1665: PetscMalloc1(bm,&sub_onnz);
1666: for (k = 0; k < bm; ++k){
1667: sub_dnnz[k] = 0;
1668: sub_onnz[k] = 0;
1669: }
1670: /*
1671: Locate the owners for all of the locally-owned global row indices for this row block.
1672: These determine the roots of PetscSF used to communicate preallocation data to row owners.
1673: The roots correspond to the dnnz and onnz entries; thus, there are two roots per row.
1674: */
1675: MatGetOwnershipRange(B,&rstart,NULL);
1676: for (br = 0; br < bm; ++br) {
1677: PetscInt row = bmindices[br], rowowner = 0, brncols, col;
1678: const PetscInt *brcols;
1679: PetscInt rowrel = 0; /* row's relative index on its owner rank */
1680: PetscLayoutFindOwnerIndex(A->rmap,row,&rowowner,&rowrel);
1681: /* how many roots */
1682: iremote[br].rank = rowowner; iremote[br].index = rowrel; /* edge from bmdnnz to dnnz */
1683: /* get nonzero pattern */
1684: MatGetRow(B,br+rstart,&brncols,&brcols,NULL);
1685: for (k=0; k<brncols; k++) {
1686: col = bNindices[brcols[k]];
1687: if (col>=A->cmap->range[rowowner] && col<A->cmap->range[rowowner+1]) {
1688: sub_dnnz[br]++;
1689: } else {
1690: sub_onnz[br]++;
1691: }
1692: }
1693: MatRestoreRow(B,br+rstart,&brncols,&brcols,NULL);
1694: }
1695: ISRestoreIndices(nest->isglobal.row[i],&bmindices);
1696: /* bsf will have to take care of disposing of bedges. */
1697: PetscSFSetGraph(bmsf,m,bm,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);
1698: PetscSFReduceBegin(bmsf,MPIU_INT,sub_dnnz,dnnz,MPI_SUM);
1699: PetscSFReduceEnd(bmsf,MPIU_INT,sub_dnnz,dnnz,MPI_SUM);
1700: PetscSFReduceBegin(bmsf,MPIU_INT,sub_onnz,onnz,MPI_SUM);
1701: PetscSFReduceEnd(bmsf,MPIU_INT,sub_onnz,onnz,MPI_SUM);
1702: PetscFree(sub_dnnz);
1703: PetscFree(sub_onnz);
1704: PetscSFDestroy(&bmsf);
1705: }
1706: ISRestoreIndices(bNis,&bNindices);
1707: ISDestroy(&bNis);
1708: }
1709: /* Resize preallocation if overestimated */
1710: for (i=0;i<m;i++) {
1711: dnnz[i] = PetscMin(dnnz[i],A->cmap->n);
1712: onnz[i] = PetscMin(onnz[i],A->cmap->N - A->cmap->n);
1713: }
1714: MatSeqAIJSetPreallocation(C,0,dnnz);
1715: MatMPIAIJSetPreallocation(C,0,dnnz,0,onnz);
1716: PetscFree(dnnz);
1718: /* Fill by row */
1719: for (j=0; j<nest->nc; ++j) {
1720: /* Using global column indices and ISAllGather() is not scalable. */
1721: IS bNis;
1722: PetscInt bN;
1723: const PetscInt *bNindices;
1724: ISAllGather(nest->isglobal.col[j], &bNis);
1725: ISGetSize(bNis,&bN);
1726: ISGetIndices(bNis,&bNindices);
1727: for (i=0; i<nest->nr; ++i) {
1728: Mat B;
1729: PetscInt bm, br;
1730: const PetscInt *bmindices;
1731: B = nest->m[i][j];
1732: if (!B) continue;
1733: ISGetLocalSize(nest->isglobal.row[i],&bm);
1734: ISGetIndices(nest->isglobal.row[i],&bmindices);
1735: MatGetOwnershipRange(B,&rstart,NULL);
1736: for (br = 0; br < bm; ++br) {
1737: PetscInt row = bmindices[br], brncols, *cols;
1738: const PetscInt *brcols;
1739: const PetscScalar *brcoldata;
1740: MatGetRow(B,br+rstart,&brncols,&brcols,&brcoldata);
1741: PetscMalloc1(brncols,&cols);
1742: for (k=0; k<brncols; k++) cols[k] = bNindices[brcols[k]];
1743: /*
1744: Nest blocks are required to be nonoverlapping -- otherwise nest and monolithic index layouts wouldn't match.
1745: Thus, we could use INSERT_VALUES, but I prefer ADD_VALUES.
1746: */
1747: MatSetValues(C,1,&row,brncols,cols,brcoldata,ADD_VALUES);
1748: MatRestoreRow(B,br+rstart,&brncols,&brcols,&brcoldata);
1749: PetscFree(cols);
1750: }
1751: ISRestoreIndices(nest->isglobal.row[i],&bmindices);
1752: }
1753: ISRestoreIndices(bNis,&bNindices);
1754: ISDestroy(&bNis);
1755: }
1756: MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);
1757: MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);
1758: return(0);
1759: }
1761: PetscErrorCode MatHasOperation_Nest(Mat mat,MatOperation op,PetscBool *has)
1762: {
1764: *has = PETSC_FALSE;
1765: if (op == MATOP_MULT_TRANSPOSE) {
1766: Mat_Nest *bA = (Mat_Nest*)mat->data;
1767: PetscInt i,j,nr = bA->nr,nc = bA->nc;
1769: PetscBool flg;
1771: for (j=0; j<nc; j++) {
1772: for (i=0; i<nr; i++) {
1773: if (!bA->m[i][j]) continue;
1774: MatHasOperation(bA->m[i][j],MATOP_MULT_TRANSPOSE_ADD,&flg);
1775: if (!flg) return(0);
1776: }
1777: }
1778: }
1779: if (((void**)mat->ops)[op]) *has = PETSC_TRUE;
1780: return(0);
1781: }
1783: /*MC
1784: MATNEST - MATNEST = "nest" - Matrix type consisting of nested submatrices, each stored separately.
1786: Level: intermediate
1788: Notes:
1789: This matrix type permits scalable use of PCFieldSplit and avoids the large memory costs of extracting submatrices.
1790: It allows the use of symmetric and block formats for parts of multi-physics simulations.
1791: It is usually used with DMComposite and DMCreateMatrix()
1793: Each of the submatrices lives on the same MPI communicator as the original nest matrix (though they can have zero
1794: rows/columns on some processes.) Thus this is not meant for cases where the submatrices live on far fewer processes
1795: than the nest matrix.
1797: .seealso: MatCreate(), MatType, MatCreateNest()
1798: M*/
1799: PETSC_EXTERN PetscErrorCode MatCreate_Nest(Mat A)
1800: {
1801: Mat_Nest *s;
1805: PetscNewLog(A,&s);
1806: A->data = (void*)s;
1808: s->nr = -1;
1809: s->nc = -1;
1810: s->m = NULL;
1811: s->splitassembly = PETSC_FALSE;
1813: PetscMemzero(A->ops,sizeof(*A->ops));
1815: A->ops->mult = MatMult_Nest;
1816: A->ops->multadd = MatMultAdd_Nest;
1817: A->ops->multtranspose = MatMultTranspose_Nest;
1818: A->ops->multtransposeadd = MatMultTransposeAdd_Nest;
1819: A->ops->transpose = MatTranspose_Nest;
1820: A->ops->assemblybegin = MatAssemblyBegin_Nest;
1821: A->ops->assemblyend = MatAssemblyEnd_Nest;
1822: A->ops->zeroentries = MatZeroEntries_Nest;
1823: A->ops->copy = MatCopy_Nest;
1824: A->ops->duplicate = MatDuplicate_Nest;
1825: A->ops->createsubmatrix = MatCreateSubMatrix_Nest;
1826: A->ops->destroy = MatDestroy_Nest;
1827: A->ops->view = MatView_Nest;
1828: A->ops->getvecs = 0; /* Use VECNEST by calling MatNestSetVecType(A,VECNEST) */
1829: A->ops->getlocalsubmatrix = MatGetLocalSubMatrix_Nest;
1830: A->ops->restorelocalsubmatrix = MatRestoreLocalSubMatrix_Nest;
1831: A->ops->getdiagonal = MatGetDiagonal_Nest;
1832: A->ops->diagonalscale = MatDiagonalScale_Nest;
1833: A->ops->scale = MatScale_Nest;
1834: A->ops->shift = MatShift_Nest;
1835: A->ops->diagonalset = MatDiagonalSet_Nest;
1836: A->ops->setrandom = MatSetRandom_Nest;
1837: A->ops->hasoperation = MatHasOperation_Nest;
1839: A->spptr = 0;
1840: A->assembled = PETSC_FALSE;
1842: /* expose Nest api's */
1843: PetscObjectComposeFunction((PetscObject)A,"MatNestGetSubMat_C", MatNestGetSubMat_Nest);
1844: PetscObjectComposeFunction((PetscObject)A,"MatNestSetSubMat_C", MatNestSetSubMat_Nest);
1845: PetscObjectComposeFunction((PetscObject)A,"MatNestGetSubMats_C", MatNestGetSubMats_Nest);
1846: PetscObjectComposeFunction((PetscObject)A,"MatNestGetSize_C", MatNestGetSize_Nest);
1847: PetscObjectComposeFunction((PetscObject)A,"MatNestGetISs_C", MatNestGetISs_Nest);
1848: PetscObjectComposeFunction((PetscObject)A,"MatNestGetLocalISs_C", MatNestGetLocalISs_Nest);
1849: PetscObjectComposeFunction((PetscObject)A,"MatNestSetVecType_C", MatNestSetVecType_Nest);
1850: PetscObjectComposeFunction((PetscObject)A,"MatNestSetSubMats_C", MatNestSetSubMats_Nest);
1851: PetscObjectComposeFunction((PetscObject)A,"MatConvert_nest_mpiaij_C",MatConvert_Nest_AIJ);
1852: PetscObjectComposeFunction((PetscObject)A,"MatConvert_nest_seqaij_C",MatConvert_Nest_AIJ);
1853: PetscObjectComposeFunction((PetscObject)A,"MatConvert_nest_aij_C", MatConvert_Nest_AIJ);
1854: PetscObjectComposeFunction((PetscObject)A,"MatConvert_nest_is_C", MatConvert_Nest_IS);
1856: PetscObjectChangeTypeName((PetscObject)A,MATNEST);
1857: return(0);
1858: }