Actual source code: sorder.c
1: /*
2: Provides the code that allows PETSc users to register their own
3: sequential matrix Ordering routines.
4: */
5: #include <petsc/private/matimpl.h>
6: #include <petscmat.h>
8: PetscFunctionList MatOrderingList = NULL;
9: PetscBool MatOrderingRegisterAllCalled = PETSC_FALSE;
11: PETSC_INTERN PetscErrorCode MatGetOrdering_Natural(Mat mat, MatOrderingType type, IS *irow, IS *icol)
12: {
13: PetscInt n, i, *ii;
14: PetscBool done;
15: MPI_Comm comm;
17: PetscFunctionBegin;
18: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
19: PetscCall(MatGetRowIJ(mat, 0, PETSC_FALSE, PETSC_TRUE, &n, NULL, NULL, &done));
20: PetscCall(MatRestoreRowIJ(mat, 0, PETSC_FALSE, PETSC_TRUE, NULL, NULL, NULL, &done));
21: if (done) { /* matrix may be "compressed" in symbolic factorization, due to i-nodes or block storage */
22: /*
23: We actually create general index sets because this avoids mallocs to
24: to obtain the indices in the MatSolve() routines.
25: PetscCall(ISCreateStride(PETSC_COMM_SELF,n,0,1,irow));
26: PetscCall(ISCreateStride(PETSC_COMM_SELF,n,0,1,icol));
27: */
28: PetscCall(PetscMalloc1(n, &ii));
29: for (i = 0; i < n; i++) ii[i] = i;
30: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, n, ii, PETSC_COPY_VALUES, irow));
31: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, n, ii, PETSC_OWN_POINTER, icol));
32: } else {
33: PetscInt start, end;
35: PetscCall(MatGetOwnershipRange(mat, &start, &end));
36: PetscCall(ISCreateStride(comm, end - start, start, 1, irow));
37: PetscCall(ISCreateStride(comm, end - start, start, 1, icol));
38: }
39: PetscCall(ISSetIdentity(*irow));
40: PetscCall(ISSetIdentity(*icol));
41: PetscFunctionReturn(PETSC_SUCCESS);
42: }
44: /*
45: Orders the rows (and columns) by the lengths of the rows.
46: This produces a symmetric Ordering but does not require a
47: matrix with symmetric non-zero structure.
48: */
49: PETSC_INTERN PetscErrorCode MatGetOrdering_RowLength(Mat mat, MatOrderingType type, IS *irow, IS *icol)
50: {
51: PetscInt n, *permr, *lens, i;
52: const PetscInt *ia, *ja;
53: PetscBool done;
55: PetscFunctionBegin;
56: PetscCall(MatGetRowIJ(mat, 0, PETSC_FALSE, PETSC_TRUE, &n, &ia, &ja, &done));
57: PetscCheck(done, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot get rows for matrix");
59: PetscCall(PetscMalloc2(n, &lens, n, &permr));
60: for (i = 0; i < n; i++) {
61: lens[i] = ia[i + 1] - ia[i];
62: permr[i] = i;
63: }
64: PetscCall(MatRestoreRowIJ(mat, 0, PETSC_FALSE, PETSC_TRUE, NULL, &ia, &ja, &done));
66: PetscCall(PetscSortIntWithPermutation(n, lens, permr));
68: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, n, permr, PETSC_COPY_VALUES, irow));
69: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, n, permr, PETSC_COPY_VALUES, icol));
70: PetscCall(PetscFree2(lens, permr));
71: PetscFunctionReturn(PETSC_SUCCESS);
72: }
74: /*@C
75: MatOrderingRegister - Adds a new sparse matrix ordering to the matrix package.
77: Not Collective
79: Input Parameters:
80: + sname - name of ordering (for example `MATORDERINGND`)
81: - function - function pointer that creates the ordering
83: Level: developer
85: Example Usage:
86: .vb
87: MatOrderingRegister("my_order", MyOrder);
88: .ve
90: Then, your partitioner can be chosen with the procedural interface via `MatOrderingSetType(part, "my_order)` or at runtime via the option
91: `-pc_factor_mat_ordering_type my_order`
93: .seealso: `Mat`, `MatOrderingType`, `MatOrderingRegisterAll()`, `MatGetOrdering()`
94: @*/
95: PetscErrorCode MatOrderingRegister(const char sname[], PetscErrorCode (*function)(Mat, MatOrderingType, IS *, IS *))
96: {
97: PetscFunctionBegin;
98: PetscCall(MatInitializePackage());
99: PetscCall(PetscFunctionListAdd(&MatOrderingList, sname, function));
100: PetscFunctionReturn(PETSC_SUCCESS);
101: }
103: #include <../src/mat/impls/aij/mpi/mpiaij.h>
104: /*@C
105: MatGetOrdering - Gets a reordering for a matrix to reduce fill or to
106: improve numerical stability of LU factorization.
108: Collective
110: Input Parameters:
111: + mat - the matrix
112: - type - type of reordering, one of the following
113: .vb
114: MATORDERINGNATURAL_OR_ND - Nested dissection unless matrix is SBAIJ then it is natural
115: MATORDERINGNATURAL - Natural
116: MATORDERINGND - Nested Dissection
117: MATORDERING1WD - One-way Dissection
118: MATORDERINGRCM - Reverse Cuthill-McKee
119: MATORDERINGQMD - Quotient Minimum Degree
120: MATORDERINGEXTERNAL - Use an ordering internal to the factorzation package and do not compute or use PETSc's
121: .ve
123: Output Parameters:
124: + rperm - row permutation indices
125: - cperm - column permutation indices
127: Options Database Key:
128: + -mat_view_ordering draw - plots matrix nonzero structure in new ordering
129: - -pc_factor_mat_ordering_type <nd,natural,..> - ordering to use with `PC`s based on factorization, `PCLU`, `PCILU`, `PCCHOLESKY`, `PCICC`
131: Level: intermediate
133: Notes:
134: This DOES NOT actually reorder the matrix; it merely returns two index sets
135: that define a reordering. This is usually not used directly, rather use the
136: options `PCFactorSetMatOrderingType()`
138: The user can define additional orderings; see `MatOrderingRegister()`.
140: These are generally only implemented for sequential sparse matrices.
142: Some external packages that PETSc can use for direct factorization such as SuperLU_DIST do not accept orderings provided by
143: this call.
145: If `MATORDERINGEXTERNAL` is used then PETSc does not compute an ordering and utilizes one built into the factorization package
147: .seealso: `MatOrderingRegister()`, `PCFactorSetMatOrderingType()`, `MatColoring`, `MatColoringCreate()`, `MatOrderingType`, `Mat`
148: @*/
149: PetscErrorCode MatGetOrdering(Mat mat, MatOrderingType type, IS *rperm, IS *cperm)
150: {
151: PetscInt mmat, nmat, mis;
152: PetscErrorCode (*r)(Mat, MatOrderingType, IS *, IS *);
153: PetscBool flg, ismpiaij;
155: PetscFunctionBegin;
157: PetscAssertPointer(rperm, 3);
158: PetscAssertPointer(cperm, 4);
159: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
160: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
161: PetscCheck(type, PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Ordering type cannot be null");
163: PetscCall(PetscStrcmp(type, MATORDERINGEXTERNAL, &flg));
164: if (flg) {
165: *rperm = NULL;
166: *cperm = NULL;
167: PetscFunctionReturn(PETSC_SUCCESS);
168: }
170: /* This code is terrible. MatGetOrdering() multiple dispatch should use matrix and this code should move to impls/aij/mpi. */
171: PetscCall(PetscObjectTypeCompare((PetscObject)mat, MATMPIAIJ, &ismpiaij));
172: if (ismpiaij) { /* Reorder using diagonal block */
173: Mat Ad, Ao;
174: const PetscInt *colmap;
175: IS lrowperm, lcolperm;
176: PetscInt i, rstart, rend, *idx;
177: const PetscInt *lidx;
179: PetscCall(MatMPIAIJGetSeqAIJ(mat, &Ad, &Ao, &colmap));
180: PetscCall(MatGetOrdering(Ad, type, &lrowperm, &lcolperm));
181: PetscCall(MatGetOwnershipRange(mat, &rstart, &rend));
182: /* Remap row index set to global space */
183: PetscCall(ISGetIndices(lrowperm, &lidx));
184: PetscCall(PetscMalloc1(rend - rstart, &idx));
185: for (i = 0; i + rstart < rend; i++) idx[i] = rstart + lidx[i];
186: PetscCall(ISRestoreIndices(lrowperm, &lidx));
187: PetscCall(ISDestroy(&lrowperm));
188: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), rend - rstart, idx, PETSC_OWN_POINTER, rperm));
189: PetscCall(ISSetPermutation(*rperm));
190: /* Remap column index set to global space */
191: PetscCall(ISGetIndices(lcolperm, &lidx));
192: PetscCall(PetscMalloc1(rend - rstart, &idx));
193: for (i = 0; i + rstart < rend; i++) idx[i] = rstart + lidx[i];
194: PetscCall(ISRestoreIndices(lcolperm, &lidx));
195: PetscCall(ISDestroy(&lcolperm));
196: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), rend - rstart, idx, PETSC_OWN_POINTER, cperm));
197: PetscCall(ISSetPermutation(*cperm));
198: PetscFunctionReturn(PETSC_SUCCESS);
199: }
201: if (!mat->rmap->N) { /* matrix has zero rows */
202: PetscCall(ISCreateStride(PETSC_COMM_SELF, 0, 0, 1, cperm));
203: PetscCall(ISCreateStride(PETSC_COMM_SELF, 0, 0, 1, rperm));
204: PetscCall(ISSetIdentity(*cperm));
205: PetscCall(ISSetIdentity(*rperm));
206: PetscFunctionReturn(PETSC_SUCCESS);
207: }
209: PetscCall(MatGetLocalSize(mat, &mmat, &nmat));
210: PetscCheck(mmat == nmat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Must be square matrix, rows %" PetscInt_FMT " columns %" PetscInt_FMT, mmat, nmat);
212: PetscCall(MatOrderingRegisterAll());
213: PetscCall(PetscFunctionListFind(MatOrderingList, type, &r));
214: PetscCheck(r, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Unknown or unregistered type: %s", type);
216: PetscCall(PetscLogEventBegin(MAT_GetOrdering, mat, 0, 0, 0));
217: PetscCall((*r)(mat, type, rperm, cperm));
218: PetscCall(ISSetPermutation(*rperm));
219: PetscCall(ISSetPermutation(*cperm));
220: /* Adjust for inode (reduced matrix ordering) only if row permutation is smaller the matrix size */
221: PetscCall(ISGetLocalSize(*rperm, &mis));
222: if (mmat > mis) PetscCall(MatInodeAdjustForInodes(mat, rperm, cperm));
223: PetscCall(PetscLogEventEnd(MAT_GetOrdering, mat, 0, 0, 0));
225: PetscCall(PetscOptionsHasName(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-mat_view_ordering", &flg));
226: if (flg) {
227: Mat tmat;
228: PetscCall(MatPermute(mat, *rperm, *cperm, &tmat));
229: PetscCall(MatViewFromOptions(tmat, (PetscObject)mat, "-mat_view_ordering"));
230: PetscCall(MatDestroy(&tmat));
231: }
232: PetscFunctionReturn(PETSC_SUCCESS);
233: }
235: PetscErrorCode MatGetOrderingList(PetscFunctionList *list)
236: {
237: PetscFunctionBegin;
238: *list = MatOrderingList;
239: PetscFunctionReturn(PETSC_SUCCESS);
240: }