Actual source code: packm.c
1: #include <../src/dm/impls/composite/packimpl.h>
3: static PetscErrorCode DMCreateMatrix_Composite_Nest(DM dm, Mat *J)
4: {
5: const DM_Composite *com = (DM_Composite *)dm->data;
6: const struct DMCompositeLink *rlink, *clink;
7: IS *isg;
8: Mat *submats;
9: PetscInt i, j, n;
11: PetscFunctionBegin;
12: n = com->nDM; /* Total number of entries */
14: /* Explicit index sets are not required for MatCreateNest, but getting them here allows MatNest to do consistency
15: * checking and allows ISEqual to compare by identity instead of by contents. */
16: PetscCall(DMCompositeGetGlobalISs(dm, &isg));
18: /* Get submatrices */
19: PetscCall(PetscMalloc1(n * n, &submats));
20: for (i = 0, rlink = com->next; rlink; i++, rlink = rlink->next) {
21: for (j = 0, clink = com->next; clink; j++, clink = clink->next) {
22: Mat sub = NULL;
23: if (i == j) {
24: PetscCall(DMCreateMatrix(rlink->dm, &sub));
25: } else PetscCheck(!com->FormCoupleLocations, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Cannot manage off-diagonal parts yet");
26: submats[i * n + j] = sub;
27: }
28: }
30: PetscCall(MatCreateNest(PetscObjectComm((PetscObject)dm), n, isg, n, isg, submats, J));
32: /* Disown references */
33: for (i = 0; i < n; i++) PetscCall(ISDestroy(&isg[i]));
34: PetscCall(PetscFree(isg));
36: for (i = 0; i < n * n; i++) {
37: if (submats[i]) PetscCall(MatDestroy(&submats[i]));
38: }
39: PetscCall(PetscFree(submats));
40: PetscFunctionReturn(PETSC_SUCCESS);
41: }
43: static PetscErrorCode DMCreateMatrix_Composite_AIJ(DM dm, Mat *J)
44: {
45: DM_Composite *com = (DM_Composite *)dm->data;
46: struct DMCompositeLink *next;
47: PetscInt m, *dnz, *onz, i, j, mA;
48: Mat Atmp;
49: PetscMPIInt rank;
50: PetscBool dense = PETSC_FALSE;
52: PetscFunctionBegin;
53: /* use global vector to determine layout needed for matrix */
54: m = com->n;
56: PetscCall(MatCreate(PetscObjectComm((PetscObject)dm), J));
57: PetscCall(MatSetSizes(*J, m, m, PETSC_DETERMINE, PETSC_DETERMINE));
58: PetscCall(MatSetType(*J, dm->mattype));
60: /*
61: Extremely inefficient but will compute entire Jacobian for testing
62: */
63: PetscCall(PetscOptionsGetBool(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-dmcomposite_dense_jacobian", &dense, NULL));
64: if (dense) {
65: PetscInt rstart, rend, *indices;
66: PetscScalar *values;
68: mA = com->N;
69: PetscCall(MatMPIAIJSetPreallocation(*J, mA, NULL, mA - m, NULL));
70: PetscCall(MatSeqAIJSetPreallocation(*J, mA, NULL));
72: PetscCall(MatGetOwnershipRange(*J, &rstart, &rend));
73: PetscCall(PetscMalloc2(mA, &values, mA, &indices));
74: PetscCall(PetscArrayzero(values, mA));
75: for (i = 0; i < mA; i++) indices[i] = i;
76: for (i = rstart; i < rend; i++) PetscCall(MatSetValues(*J, 1, &i, mA, indices, values, INSERT_VALUES));
77: PetscCall(PetscFree2(values, indices));
78: PetscCall(MatAssemblyBegin(*J, MAT_FINAL_ASSEMBLY));
79: PetscCall(MatAssemblyEnd(*J, MAT_FINAL_ASSEMBLY));
80: PetscFunctionReturn(PETSC_SUCCESS);
81: }
83: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
84: MatPreallocateBegin(PetscObjectComm((PetscObject)dm), m, m, dnz, onz);
85: /* loop over packed objects, handling one at at time */
86: next = com->next;
87: while (next) {
88: PetscInt nc, rstart, *ccols, maxnc;
89: const PetscInt *cols, *rstarts;
90: PetscMPIInt proc;
92: PetscCall(DMCreateMatrix(next->dm, &Atmp));
93: PetscCall(MatGetOwnershipRange(Atmp, &rstart, NULL));
94: PetscCall(MatGetOwnershipRanges(Atmp, &rstarts));
95: PetscCall(MatGetLocalSize(Atmp, &mA, NULL));
97: maxnc = 0;
98: for (i = 0; i < mA; i++) {
99: PetscCall(MatGetRow(Atmp, rstart + i, &nc, NULL, NULL));
100: maxnc = PetscMax(nc, maxnc);
101: PetscCall(MatRestoreRow(Atmp, rstart + i, &nc, NULL, NULL));
102: }
103: PetscCall(PetscMalloc1(maxnc, &ccols));
104: for (i = 0; i < mA; i++) {
105: PetscCall(MatGetRow(Atmp, rstart + i, &nc, &cols, NULL));
106: /* remap the columns taking into how much they are shifted on each process */
107: for (j = 0; j < nc; j++) {
108: proc = 0;
109: while (cols[j] >= rstarts[proc + 1]) proc++;
110: ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
111: }
112: PetscCall(MatPreallocateSet(com->rstart + next->rstart + i, nc, ccols, dnz, onz));
113: PetscCall(MatRestoreRow(Atmp, rstart + i, &nc, &cols, NULL));
114: }
115: PetscCall(PetscFree(ccols));
116: PetscCall(MatDestroy(&Atmp));
117: next = next->next;
118: }
119: if (com->FormCoupleLocations) PetscCall((*com->FormCoupleLocations)(dm, NULL, dnz, onz, __rstart, __nrows, __start, __end));
120: PetscCall(MatMPIAIJSetPreallocation(*J, 0, dnz, 0, onz));
121: PetscCall(MatSeqAIJSetPreallocation(*J, 0, dnz));
122: MatPreallocateEnd(dnz, onz);
124: if (dm->prealloc_only) PetscFunctionReturn(PETSC_SUCCESS);
126: next = com->next;
127: while (next) {
128: PetscInt nc, rstart, row, maxnc, *ccols;
129: const PetscInt *cols, *rstarts;
130: const PetscScalar *values;
131: PetscMPIInt proc;
133: PetscCall(DMCreateMatrix(next->dm, &Atmp));
134: PetscCall(MatGetOwnershipRange(Atmp, &rstart, NULL));
135: PetscCall(MatGetOwnershipRanges(Atmp, &rstarts));
136: PetscCall(MatGetLocalSize(Atmp, &mA, NULL));
137: maxnc = 0;
138: for (i = 0; i < mA; i++) {
139: PetscCall(MatGetRow(Atmp, rstart + i, &nc, NULL, NULL));
140: maxnc = PetscMax(nc, maxnc);
141: PetscCall(MatRestoreRow(Atmp, rstart + i, &nc, NULL, NULL));
142: }
143: PetscCall(PetscMalloc1(maxnc, &ccols));
144: for (i = 0; i < mA; i++) {
145: PetscCall(MatGetRow(Atmp, rstart + i, &nc, (const PetscInt **)&cols, &values));
146: for (j = 0; j < nc; j++) {
147: proc = 0;
148: while (cols[j] >= rstarts[proc + 1]) proc++;
149: ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
150: }
151: row = com->rstart + next->rstart + i;
152: PetscCall(MatSetValues(*J, 1, &row, nc, ccols, values, INSERT_VALUES));
153: PetscCall(MatRestoreRow(Atmp, rstart + i, &nc, (const PetscInt **)&cols, &values));
154: }
155: PetscCall(PetscFree(ccols));
156: PetscCall(MatDestroy(&Atmp));
157: next = next->next;
158: }
159: if (com->FormCoupleLocations) {
160: PetscInt __rstart;
161: PetscCall(MatGetOwnershipRange(*J, &__rstart, NULL));
162: PetscCall((*com->FormCoupleLocations)(dm, *J, NULL, NULL, __rstart, 0, 0, 0));
163: }
164: PetscCall(MatAssemblyBegin(*J, MAT_FINAL_ASSEMBLY));
165: PetscCall(MatAssemblyEnd(*J, MAT_FINAL_ASSEMBLY));
166: PetscFunctionReturn(PETSC_SUCCESS);
167: }
169: PetscErrorCode DMCreateMatrix_Composite(DM dm, Mat *J)
170: {
171: PetscBool usenest;
172: ISLocalToGlobalMapping ltogmap;
174: PetscFunctionBegin;
175: PetscCall(DMSetFromOptions(dm));
176: PetscCall(DMSetUp(dm));
177: PetscCall(PetscStrcmp(dm->mattype, MATNEST, &usenest));
178: if (usenest) PetscCall(DMCreateMatrix_Composite_Nest(dm, J));
179: else PetscCall(DMCreateMatrix_Composite_AIJ(dm, J));
181: PetscCall(DMGetLocalToGlobalMapping(dm, <ogmap));
182: PetscCall(MatSetLocalToGlobalMapping(*J, ltogmap, ltogmap));
183: PetscCall(MatSetDM(*J, dm));
184: PetscFunctionReturn(PETSC_SUCCESS);
185: }