Actual source code: packm.c
petsc-3.13.6 2020-09-29
2: #include <../src/dm/impls/composite/packimpl.h>
4: static PetscErrorCode DMCreateMatrix_Composite_Nest(DM dm,Mat *J)
5: {
6: const DM_Composite *com = (DM_Composite*)dm->data;
7: const struct DMCompositeLink *rlink,*clink;
8: PetscErrorCode ierr;
9: IS *isg;
10: Mat *submats;
11: PetscInt i,j,n;
14: n = com->nDM; /* Total number of entries */
16: /* Explicit index sets are not required for MatCreateNest, but getting them here allows MatNest to do consistency
17: * checking and allows ISEqual to compare by identity instead of by contents. */
18: DMCompositeGetGlobalISs(dm,&isg);
20: /* Get submatrices */
21: PetscMalloc1(n*n,&submats);
22: for (i=0,rlink=com->next; rlink; i++,rlink=rlink->next) {
23: for (j=0,clink=com->next; clink; j++,clink=clink->next) {
24: Mat sub = NULL;
25: if (i == j) {
26: DMCreateMatrix(rlink->dm,&sub);
27: } else if (com->FormCoupleLocations) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"Cannot manage off-diagonal parts yet");
28: submats[i*n+j] = sub;
29: }
30: }
32: MatCreateNest(PetscObjectComm((PetscObject)dm),n,isg,n,isg,submats,J);
34: /* Disown references */
35: for (i=0; i<n; i++) {ISDestroy(&isg[i]);}
36: PetscFree(isg);
38: for (i=0; i<n*n; i++) {
39: if (submats[i]) {MatDestroy(&submats[i]);}
40: }
41: PetscFree(submats);
42: return(0);
43: }
45: static PetscErrorCode DMCreateMatrix_Composite_AIJ(DM dm,Mat *J)
46: {
47: PetscErrorCode ierr;
48: DM_Composite *com = (DM_Composite*)dm->data;
49: struct DMCompositeLink *next;
50: PetscInt m,*dnz,*onz,i,j,mA;
51: Mat Atmp;
52: PetscMPIInt rank;
53: PetscBool dense = PETSC_FALSE;
56: /* use global vector to determine layout needed for matrix */
57: m = com->n;
59: MatCreate(PetscObjectComm((PetscObject)dm),J);
60: MatSetSizes(*J,m,m,PETSC_DETERMINE,PETSC_DETERMINE);
61: MatSetType(*J,dm->mattype);
63: /*
64: Extremely inefficient but will compute entire Jacobian for testing
65: */
66: PetscOptionsGetBool(((PetscObject)dm)->options,((PetscObject)dm)->prefix,"-dmcomposite_dense_jacobian",&dense,NULL);
67: if (dense) {
68: PetscInt rstart,rend,*indices;
69: PetscScalar *values;
71: mA = com->N;
72: MatMPIAIJSetPreallocation(*J,mA,NULL,mA-m,NULL);
73: MatSeqAIJSetPreallocation(*J,mA,NULL);
75: MatGetOwnershipRange(*J,&rstart,&rend);
76: PetscMalloc2(mA,&values,mA,&indices);
77: PetscArrayzero(values,mA);
78: for (i=0; i<mA; i++) indices[i] = i;
79: for (i=rstart; i<rend; i++) {
80: MatSetValues(*J,1,&i,mA,indices,values,INSERT_VALUES);
81: }
82: PetscFree2(values,indices);
83: MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);
84: MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);
85: return(0);
86: }
88: MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&rank);
89: MatPreallocateInitialize(PetscObjectComm((PetscObject)dm),m,m,dnz,onz);
90: /* loop over packed objects, handling one at at time */
91: next = com->next;
92: while (next) {
93: PetscInt nc,rstart,*ccols,maxnc;
94: const PetscInt *cols,*rstarts;
95: PetscMPIInt proc;
97: DMCreateMatrix(next->dm,&Atmp);
98: MatGetOwnershipRange(Atmp,&rstart,NULL);
99: MatGetOwnershipRanges(Atmp,&rstarts);
100: MatGetLocalSize(Atmp,&mA,NULL);
102: maxnc = 0;
103: for (i=0; i<mA; i++) {
104: MatGetRow(Atmp,rstart+i,&nc,NULL,NULL);
105: MatRestoreRow(Atmp,rstart+i,NULL,NULL,NULL);
106: maxnc = PetscMax(nc,maxnc);
107: }
108: PetscMalloc1(maxnc,&ccols);
109: for (i=0; i<mA; i++) {
110: MatGetRow(Atmp,rstart+i,&nc,&cols,NULL);
111: /* remap the columns taking into how much they are shifted on each process */
112: for (j=0; j<nc; j++) {
113: proc = 0;
114: while (cols[j] >= rstarts[proc+1]) proc++;
115: ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
116: }
117: MatPreallocateSet(com->rstart+next->rstart+i,nc,ccols,dnz,onz);
118: MatRestoreRow(Atmp,rstart+i,&nc,&cols,NULL);
119: }
120: PetscFree(ccols);
121: MatDestroy(&Atmp);
122: next = next->next;
123: }
124: if (com->FormCoupleLocations) {
125: (*com->FormCoupleLocations)(dm,NULL,dnz,onz,__rstart,__nrows,__start,__end);
126: }
127: MatMPIAIJSetPreallocation(*J,0,dnz,0,onz);
128: MatSeqAIJSetPreallocation(*J,0,dnz);
129: MatPreallocateFinalize(dnz,onz);
131: if (dm->prealloc_only) return(0);
133: next = com->next;
134: while (next) {
135: PetscInt nc,rstart,row,maxnc,*ccols;
136: const PetscInt *cols,*rstarts;
137: const PetscScalar *values;
138: PetscMPIInt proc;
140: DMCreateMatrix(next->dm,&Atmp);
141: MatGetOwnershipRange(Atmp,&rstart,NULL);
142: MatGetOwnershipRanges(Atmp,&rstarts);
143: MatGetLocalSize(Atmp,&mA,NULL);
144: maxnc = 0;
145: for (i=0; i<mA; i++) {
146: MatGetRow(Atmp,rstart+i,&nc,NULL,NULL);
147: maxnc = PetscMax(nc,maxnc);
148: MatRestoreRow(Atmp,rstart+i,&nc,NULL,NULL);
149: }
150: PetscMalloc1(maxnc,&ccols);
151: for (i=0; i<mA; i++) {
152: MatGetRow(Atmp,rstart+i,&nc,(const PetscInt**)&cols,&values);
153: for (j=0; j<nc; j++) {
154: proc = 0;
155: while (cols[j] >= rstarts[proc+1]) proc++;
156: ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
157: }
158: row = com->rstart+next->rstart+i;
159: MatSetValues(*J,1,&row,nc,ccols,values,INSERT_VALUES);
160: MatRestoreRow(Atmp,rstart+i,&nc,(const PetscInt**)&cols,&values);
161: }
162: PetscFree(ccols);
163: MatDestroy(&Atmp);
164: next = next->next;
165: }
166: if (com->FormCoupleLocations) {
167: PetscInt __rstart;
168: MatGetOwnershipRange(*J,&__rstart,NULL);
169: (*com->FormCoupleLocations)(dm,*J,NULL,NULL,__rstart,0,0,0);
170: }
171: MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);
172: MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);
173: return(0);
174: }
176: PetscErrorCode DMCreateMatrix_Composite(DM dm,Mat *J)
177: {
178: PetscErrorCode ierr;
179: PetscBool usenest;
180: ISLocalToGlobalMapping ltogmap;
183: DMSetFromOptions(dm);
184: DMSetUp(dm);
185: PetscStrcmp(dm->mattype,MATNEST,&usenest);
186: if (usenest) {
187: DMCreateMatrix_Composite_Nest(dm,J);
188: } else {
189: DMCreateMatrix_Composite_AIJ(dm,J);
190: }
192: DMGetLocalToGlobalMapping(dm,<ogmap);
193: MatSetLocalToGlobalMapping(*J,ltogmap,ltogmap);
194: MatSetDM(*J,dm);
195: return(0);
196: }