Actual source code: matrix.c
1: /*
2: This is where the abstract matrix operations are defined
3: Portions of this code are under:
4: Copyright (c) 2022 Advanced Micro Devices, Inc. All rights reserved.
5: */
7: #include <petsc/private/matimpl.h>
8: #include <petsc/private/isimpl.h>
9: #include <petsc/private/vecimpl.h>
11: /* Logging support */
12: PetscClassId MAT_CLASSID;
13: PetscClassId MAT_COLORING_CLASSID;
14: PetscClassId MAT_FDCOLORING_CLASSID;
15: PetscClassId MAT_TRANSPOSECOLORING_CLASSID;
17: PetscLogEvent MAT_Mult, MAT_MultAdd, MAT_MultTranspose;
18: PetscLogEvent MAT_MultTransposeAdd, MAT_Solve, MAT_Solves, MAT_SolveAdd, MAT_SolveTranspose, MAT_MatSolve, MAT_MatTrSolve;
19: PetscLogEvent MAT_SolveTransposeAdd, MAT_SOR, MAT_ForwardSolve, MAT_BackwardSolve, MAT_LUFactor, MAT_LUFactorSymbolic;
20: PetscLogEvent MAT_LUFactorNumeric, MAT_CholeskyFactor, MAT_CholeskyFactorSymbolic, MAT_CholeskyFactorNumeric, MAT_ILUFactor;
21: PetscLogEvent MAT_ILUFactorSymbolic, MAT_ICCFactorSymbolic, MAT_Copy, MAT_Convert, MAT_Scale, MAT_AssemblyBegin;
22: PetscLogEvent MAT_QRFactorNumeric, MAT_QRFactorSymbolic, MAT_QRFactor;
23: PetscLogEvent MAT_AssemblyEnd, MAT_SetValues, MAT_GetValues, MAT_GetRow, MAT_GetRowIJ, MAT_CreateSubMats, MAT_GetOrdering, MAT_RedundantMat, MAT_GetSeqNonzeroStructure;
24: PetscLogEvent MAT_IncreaseOverlap, MAT_Partitioning, MAT_PartitioningND, MAT_Coarsen, MAT_ZeroEntries, MAT_Load, MAT_View, MAT_AXPY, MAT_FDColoringCreate;
25: PetscLogEvent MAT_FDColoringSetUp, MAT_FDColoringApply, MAT_Transpose, MAT_FDColoringFunction, MAT_CreateSubMat;
26: PetscLogEvent MAT_TransposeColoringCreate;
27: PetscLogEvent MAT_MatMult, MAT_MatMultSymbolic, MAT_MatMultNumeric;
28: PetscLogEvent MAT_PtAP, MAT_PtAPSymbolic, MAT_PtAPNumeric, MAT_RARt, MAT_RARtSymbolic, MAT_RARtNumeric;
29: PetscLogEvent MAT_MatTransposeMult, MAT_MatTransposeMultSymbolic, MAT_MatTransposeMultNumeric;
30: PetscLogEvent MAT_TransposeMatMult, MAT_TransposeMatMultSymbolic, MAT_TransposeMatMultNumeric;
31: PetscLogEvent MAT_MatMatMult, MAT_MatMatMultSymbolic, MAT_MatMatMultNumeric;
32: PetscLogEvent MAT_MultHermitianTranspose, MAT_MultHermitianTransposeAdd;
33: PetscLogEvent MAT_Getsymtransreduced, MAT_GetBrowsOfAcols;
34: PetscLogEvent MAT_GetBrowsOfAocols, MAT_Getlocalmat, MAT_Getlocalmatcondensed, MAT_Seqstompi, MAT_Seqstompinum, MAT_Seqstompisym;
35: PetscLogEvent MAT_GetMultiProcBlock;
36: PetscLogEvent MAT_CUSPARSECopyToGPU, MAT_CUSPARSECopyFromGPU, MAT_CUSPARSEGenerateTranspose, MAT_CUSPARSESolveAnalysis;
37: PetscLogEvent MAT_HIPSPARSECopyToGPU, MAT_HIPSPARSECopyFromGPU, MAT_HIPSPARSEGenerateTranspose, MAT_HIPSPARSESolveAnalysis;
38: PetscLogEvent MAT_PreallCOO, MAT_SetVCOO;
39: PetscLogEvent MAT_SetValuesBatch;
40: PetscLogEvent MAT_ViennaCLCopyToGPU;
41: PetscLogEvent MAT_CUDACopyToGPU, MAT_HIPCopyToGPU;
42: PetscLogEvent MAT_DenseCopyToGPU, MAT_DenseCopyFromGPU;
43: PetscLogEvent MAT_Merge, MAT_Residual, MAT_SetRandom;
44: PetscLogEvent MAT_FactorFactS, MAT_FactorInvS;
45: PetscLogEvent MATCOLORING_Apply, MATCOLORING_Comm, MATCOLORING_Local, MATCOLORING_ISCreate, MATCOLORING_SetUp, MATCOLORING_Weights;
46: PetscLogEvent MAT_H2Opus_Build, MAT_H2Opus_Compress, MAT_H2Opus_Orthog, MAT_H2Opus_LR;
48: const char *const MatFactorTypes[] = {"NONE", "LU", "CHOLESKY", "ILU", "ICC", "ILUDT", "QR", "MatFactorType", "MAT_FACTOR_", NULL};
50: /*@
51: MatSetRandom - Sets all components of a matrix to random numbers.
53: Logically Collective
55: Input Parameters:
56: + x - the matrix
57: - rctx - the `PetscRandom` object, formed by `PetscRandomCreate()`, or `NULL` and
58: it will create one internally.
60: Example:
61: .vb
62: PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
63: MatSetRandom(x,rctx);
64: PetscRandomDestroy(rctx);
65: .ve
67: Level: intermediate
69: Notes:
70: For sparse matrices that have been preallocated but not been assembled, it randomly selects appropriate locations,
72: for sparse matrices that already have nonzero locations, it fills the locations with random numbers.
74: It generates an error if used on unassembled sparse matrices that have not been preallocated.
76: .seealso: [](ch_matrices), `Mat`, `PetscRandom`, `PetscRandomCreate()`, `MatZeroEntries()`, `MatSetValues()`, `PetscRandomDestroy()`
77: @*/
78: PetscErrorCode MatSetRandom(Mat x, PetscRandom rctx)
79: {
80: PetscRandom randObj = NULL;
82: PetscFunctionBegin;
86: MatCheckPreallocated(x, 1);
88: if (!rctx) {
89: MPI_Comm comm;
90: PetscCall(PetscObjectGetComm((PetscObject)x, &comm));
91: PetscCall(PetscRandomCreate(comm, &randObj));
92: PetscCall(PetscRandomSetType(randObj, x->defaultrandtype));
93: PetscCall(PetscRandomSetFromOptions(randObj));
94: rctx = randObj;
95: }
96: PetscCall(PetscLogEventBegin(MAT_SetRandom, x, rctx, 0, 0));
97: PetscUseTypeMethod(x, setrandom, rctx);
98: PetscCall(PetscLogEventEnd(MAT_SetRandom, x, rctx, 0, 0));
100: PetscCall(MatAssemblyBegin(x, MAT_FINAL_ASSEMBLY));
101: PetscCall(MatAssemblyEnd(x, MAT_FINAL_ASSEMBLY));
102: PetscCall(PetscRandomDestroy(&randObj));
103: PetscFunctionReturn(PETSC_SUCCESS);
104: }
106: /*@
107: MatFactorGetErrorZeroPivot - returns the pivot value that was determined to be zero and the row it occurred in
109: Logically Collective
111: Input Parameter:
112: . mat - the factored matrix
114: Output Parameters:
115: + pivot - the pivot value computed
116: - row - the row that the zero pivot occurred. This row value must be interpreted carefully due to row reorderings and which processes
117: the share the matrix
119: Level: advanced
121: Notes:
122: This routine does not work for factorizations done with external packages.
124: This routine should only be called if `MatGetFactorError()` returns a value of `MAT_FACTOR_NUMERIC_ZEROPIVOT`
126: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
128: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`,
129: `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorClearError()`,
130: `MAT_FACTOR_NUMERIC_ZEROPIVOT`
131: @*/
132: PetscErrorCode MatFactorGetErrorZeroPivot(Mat mat, PetscReal *pivot, PetscInt *row)
133: {
134: PetscFunctionBegin;
136: PetscAssertPointer(pivot, 2);
137: PetscAssertPointer(row, 3);
138: *pivot = mat->factorerror_zeropivot_value;
139: *row = mat->factorerror_zeropivot_row;
140: PetscFunctionReturn(PETSC_SUCCESS);
141: }
143: /*@
144: MatFactorGetError - gets the error code from a factorization
146: Logically Collective
148: Input Parameter:
149: . mat - the factored matrix
151: Output Parameter:
152: . err - the error code
154: Level: advanced
156: Note:
157: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
159: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`,
160: `MatFactorClearError()`, `MatFactorGetErrorZeroPivot()`, `MatFactorError`
161: @*/
162: PetscErrorCode MatFactorGetError(Mat mat, MatFactorError *err)
163: {
164: PetscFunctionBegin;
166: PetscAssertPointer(err, 2);
167: *err = mat->factorerrortype;
168: PetscFunctionReturn(PETSC_SUCCESS);
169: }
171: /*@
172: MatFactorClearError - clears the error code in a factorization
174: Logically Collective
176: Input Parameter:
177: . mat - the factored matrix
179: Level: developer
181: Note:
182: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
184: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorGetError()`, `MatFactorGetErrorZeroPivot()`,
185: `MatGetErrorCode()`, `MatFactorError`
186: @*/
187: PetscErrorCode MatFactorClearError(Mat mat)
188: {
189: PetscFunctionBegin;
191: mat->factorerrortype = MAT_FACTOR_NOERROR;
192: mat->factorerror_zeropivot_value = 0.0;
193: mat->factorerror_zeropivot_row = 0;
194: PetscFunctionReturn(PETSC_SUCCESS);
195: }
197: PetscErrorCode MatFindNonzeroRowsOrCols_Basic(Mat mat, PetscBool cols, PetscReal tol, IS *nonzero)
198: {
199: Vec r, l;
200: const PetscScalar *al;
201: PetscInt i, nz, gnz, N, n, st;
203: PetscFunctionBegin;
204: PetscCall(MatCreateVecs(mat, &r, &l));
205: if (!cols) { /* nonzero rows */
206: PetscCall(MatGetOwnershipRange(mat, &st, NULL));
207: PetscCall(MatGetSize(mat, &N, NULL));
208: PetscCall(MatGetLocalSize(mat, &n, NULL));
209: PetscCall(VecSet(l, 0.0));
210: PetscCall(VecSetRandom(r, NULL));
211: PetscCall(MatMult(mat, r, l));
212: PetscCall(VecGetArrayRead(l, &al));
213: } else { /* nonzero columns */
214: PetscCall(MatGetOwnershipRangeColumn(mat, &st, NULL));
215: PetscCall(MatGetSize(mat, NULL, &N));
216: PetscCall(MatGetLocalSize(mat, NULL, &n));
217: PetscCall(VecSet(r, 0.0));
218: PetscCall(VecSetRandom(l, NULL));
219: PetscCall(MatMultTranspose(mat, l, r));
220: PetscCall(VecGetArrayRead(r, &al));
221: }
222: if (tol <= 0.0) {
223: for (i = 0, nz = 0; i < n; i++)
224: if (al[i] != 0.0) nz++;
225: } else {
226: for (i = 0, nz = 0; i < n; i++)
227: if (PetscAbsScalar(al[i]) > tol) nz++;
228: }
229: PetscCall(MPIU_Allreduce(&nz, &gnz, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
230: if (gnz != N) {
231: PetscInt *nzr;
232: PetscCall(PetscMalloc1(nz, &nzr));
233: if (nz) {
234: if (tol < 0) {
235: for (i = 0, nz = 0; i < n; i++)
236: if (al[i] != 0.0) nzr[nz++] = i + st;
237: } else {
238: for (i = 0, nz = 0; i < n; i++)
239: if (PetscAbsScalar(al[i]) > tol) nzr[nz++] = i + st;
240: }
241: }
242: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nz, nzr, PETSC_OWN_POINTER, nonzero));
243: } else *nonzero = NULL;
244: if (!cols) { /* nonzero rows */
245: PetscCall(VecRestoreArrayRead(l, &al));
246: } else {
247: PetscCall(VecRestoreArrayRead(r, &al));
248: }
249: PetscCall(VecDestroy(&l));
250: PetscCall(VecDestroy(&r));
251: PetscFunctionReturn(PETSC_SUCCESS);
252: }
254: /*@
255: MatFindNonzeroRows - Locate all rows that are not completely zero in the matrix
257: Input Parameter:
258: . mat - the matrix
260: Output Parameter:
261: . keptrows - the rows that are not completely zero
263: Level: intermediate
265: Note:
266: `keptrows` is set to `NULL` if all rows are nonzero.
268: .seealso: [](ch_matrices), `Mat`, `MatFindZeroRows()`
269: @*/
270: PetscErrorCode MatFindNonzeroRows(Mat mat, IS *keptrows)
271: {
272: PetscFunctionBegin;
275: PetscAssertPointer(keptrows, 2);
276: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
277: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
278: if (mat->ops->findnonzerorows) PetscUseTypeMethod(mat, findnonzerorows, keptrows);
279: else PetscCall(MatFindNonzeroRowsOrCols_Basic(mat, PETSC_FALSE, 0.0, keptrows));
280: PetscFunctionReturn(PETSC_SUCCESS);
281: }
283: /*@
284: MatFindZeroRows - Locate all rows that are completely zero in the matrix
286: Input Parameter:
287: . mat - the matrix
289: Output Parameter:
290: . zerorows - the rows that are completely zero
292: Level: intermediate
294: Note:
295: `zerorows` is set to `NULL` if no rows are zero.
297: .seealso: [](ch_matrices), `Mat`, `MatFindNonzeroRows()`
298: @*/
299: PetscErrorCode MatFindZeroRows(Mat mat, IS *zerorows)
300: {
301: IS keptrows;
302: PetscInt m, n;
304: PetscFunctionBegin;
307: PetscAssertPointer(zerorows, 2);
308: PetscCall(MatFindNonzeroRows(mat, &keptrows));
309: /* MatFindNonzeroRows sets keptrows to NULL if there are no zero rows.
310: In keeping with this convention, we set zerorows to NULL if there are no zero
311: rows. */
312: if (keptrows == NULL) {
313: *zerorows = NULL;
314: } else {
315: PetscCall(MatGetOwnershipRange(mat, &m, &n));
316: PetscCall(ISComplement(keptrows, m, n, zerorows));
317: PetscCall(ISDestroy(&keptrows));
318: }
319: PetscFunctionReturn(PETSC_SUCCESS);
320: }
322: /*@
323: MatGetDiagonalBlock - Returns the part of the matrix associated with the on-process coupling
325: Not Collective
327: Input Parameter:
328: . A - the matrix
330: Output Parameter:
331: . a - the diagonal part (which is a SEQUENTIAL matrix)
333: Level: advanced
335: Notes:
336: See `MatCreateAIJ()` for more information on the "diagonal part" of the matrix.
338: Use caution, as the reference count on the returned matrix is not incremented and it is used as part of `A`'s normal operation.
340: .seealso: [](ch_matrices), `Mat`, `MatCreateAIJ()`, `MATAIJ`, `MATBAIJ`, `MATSBAIJ`
341: @*/
342: PetscErrorCode MatGetDiagonalBlock(Mat A, Mat *a)
343: {
344: PetscFunctionBegin;
347: PetscAssertPointer(a, 2);
348: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
349: if (A->ops->getdiagonalblock) PetscUseTypeMethod(A, getdiagonalblock, a);
350: else {
351: PetscMPIInt size;
353: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
354: PetscCheck(size == 1, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for parallel matrix type %s", ((PetscObject)A)->type_name);
355: *a = A;
356: }
357: PetscFunctionReturn(PETSC_SUCCESS);
358: }
360: /*@
361: MatGetTrace - Gets the trace of a matrix. The sum of the diagonal entries.
363: Collective
365: Input Parameter:
366: . mat - the matrix
368: Output Parameter:
369: . trace - the sum of the diagonal entries
371: Level: advanced
373: .seealso: [](ch_matrices), `Mat`
374: @*/
375: PetscErrorCode MatGetTrace(Mat mat, PetscScalar *trace)
376: {
377: Vec diag;
379: PetscFunctionBegin;
381: PetscAssertPointer(trace, 2);
382: PetscCall(MatCreateVecs(mat, &diag, NULL));
383: PetscCall(MatGetDiagonal(mat, diag));
384: PetscCall(VecSum(diag, trace));
385: PetscCall(VecDestroy(&diag));
386: PetscFunctionReturn(PETSC_SUCCESS);
387: }
389: /*@
390: MatRealPart - Zeros out the imaginary part of the matrix
392: Logically Collective
394: Input Parameter:
395: . mat - the matrix
397: Level: advanced
399: .seealso: [](ch_matrices), `Mat`, `MatImaginaryPart()`
400: @*/
401: PetscErrorCode MatRealPart(Mat mat)
402: {
403: PetscFunctionBegin;
406: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
407: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
408: MatCheckPreallocated(mat, 1);
409: PetscUseTypeMethod(mat, realpart);
410: PetscFunctionReturn(PETSC_SUCCESS);
411: }
413: /*@C
414: MatGetGhosts - Get the global indices of all ghost nodes defined by the sparse matrix
416: Collective
418: Input Parameter:
419: . mat - the matrix
421: Output Parameters:
422: + nghosts - number of ghosts (for `MATBAIJ` and `MATSBAIJ` matrices there is one ghost for each matrix block)
423: - ghosts - the global indices of the ghost points
425: Level: advanced
427: Note:
428: `nghosts` and `ghosts` are suitable to pass into `VecCreateGhost()` or `VecCreateGhostBlock()`
430: .seealso: [](ch_matrices), `Mat`, `VecCreateGhost()`, `VecCreateGhostBlock()`
431: @*/
432: PetscErrorCode MatGetGhosts(Mat mat, PetscInt *nghosts, const PetscInt *ghosts[])
433: {
434: PetscFunctionBegin;
437: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
438: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
439: if (mat->ops->getghosts) PetscUseTypeMethod(mat, getghosts, nghosts, ghosts);
440: else {
441: if (nghosts) *nghosts = 0;
442: if (ghosts) *ghosts = NULL;
443: }
444: PetscFunctionReturn(PETSC_SUCCESS);
445: }
447: /*@
448: MatImaginaryPart - Moves the imaginary part of the matrix to the real part and zeros the imaginary part
450: Logically Collective
452: Input Parameter:
453: . mat - the matrix
455: Level: advanced
457: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`
458: @*/
459: PetscErrorCode MatImaginaryPart(Mat mat)
460: {
461: PetscFunctionBegin;
464: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
465: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
466: MatCheckPreallocated(mat, 1);
467: PetscUseTypeMethod(mat, imaginarypart);
468: PetscFunctionReturn(PETSC_SUCCESS);
469: }
471: /*@
472: MatMissingDiagonal - Determine if sparse matrix is missing a diagonal entry (or block entry for `MATBAIJ` and `MATSBAIJ` matrices) in the nonzero structure
474: Not Collective
476: Input Parameter:
477: . mat - the matrix
479: Output Parameters:
480: + missing - is any diagonal entry missing
481: - dd - first diagonal entry that is missing (optional) on this process
483: Level: advanced
485: Note:
486: This does not return diagonal entries that are in the nonzero structure but happen to have a zero numerical value
488: .seealso: [](ch_matrices), `Mat`
489: @*/
490: PetscErrorCode MatMissingDiagonal(Mat mat, PetscBool *missing, PetscInt *dd)
491: {
492: PetscFunctionBegin;
495: PetscAssertPointer(missing, 2);
496: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix %s", ((PetscObject)mat)->type_name);
497: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
498: PetscUseTypeMethod(mat, missingdiagonal, missing, dd);
499: PetscFunctionReturn(PETSC_SUCCESS);
500: }
502: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
503: /*@C
504: MatGetRow - Gets a row of a matrix. You MUST call `MatRestoreRow()`
505: for each row that you get to ensure that your application does
506: not bleed memory.
508: Not Collective
510: Input Parameters:
511: + mat - the matrix
512: - row - the row to get
514: Output Parameters:
515: + ncols - if not `NULL`, the number of nonzeros in `row`
516: . cols - if not `NULL`, the column numbers
517: - vals - if not `NULL`, the numerical values
519: Level: advanced
521: Notes:
522: This routine is provided for people who need to have direct access
523: to the structure of a matrix. We hope that we provide enough
524: high-level matrix routines that few users will need it.
526: `MatGetRow()` always returns 0-based column indices, regardless of
527: whether the internal representation is 0-based (default) or 1-based.
529: For better efficiency, set `cols` and/or `vals` to `NULL` if you do
530: not wish to extract these quantities.
532: The user can only examine the values extracted with `MatGetRow()`;
533: the values CANNOT be altered. To change the matrix entries, one
534: must use `MatSetValues()`.
536: You can only have one call to `MatGetRow()` outstanding for a particular
537: matrix at a time, per processor. `MatGetRow()` can only obtain rows
538: associated with the given processor, it cannot get rows from the
539: other processors; for that we suggest using `MatCreateSubMatrices()`, then
540: `MatGetRow()` on the submatrix. The row index passed to `MatGetRow()`
541: is in the global number of rows.
543: Use `MatGetRowIJ()` and `MatRestoreRowIJ()` to access all the local indices of the sparse matrix.
545: Use `MatSeqAIJGetArray()` and similar functions to access the numerical values for certain matrix types directly.
547: Fortran Note:
548: The calling sequence is
549: .vb
550: MatGetRow(matrix,row,ncols,cols,values,ierr)
551: Mat matrix (input)
552: integer row (input)
553: integer ncols (output)
554: integer cols(maxcols) (output)
555: double precision (or double complex) values(maxcols) output
556: .ve
557: where maxcols >= maximum nonzeros in any row of the matrix.
559: .seealso: [](ch_matrices), `Mat`, `MatRestoreRow()`, `MatSetValues()`, `MatGetValues()`, `MatCreateSubMatrices()`, `MatGetDiagonal()`, `MatGetRowIJ()`, `MatRestoreRowIJ()`
560: @*/
561: PetscErrorCode MatGetRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
562: {
563: PetscInt incols;
565: PetscFunctionBegin;
568: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
569: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
570: MatCheckPreallocated(mat, 1);
571: PetscCheck(row >= mat->rmap->rstart && row < mat->rmap->rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Only for local rows, %" PetscInt_FMT " not in [%" PetscInt_FMT ",%" PetscInt_FMT ")", row, mat->rmap->rstart, mat->rmap->rend);
572: PetscCall(PetscLogEventBegin(MAT_GetRow, mat, 0, 0, 0));
573: PetscUseTypeMethod(mat, getrow, row, &incols, (PetscInt **)cols, (PetscScalar **)vals);
574: if (ncols) *ncols = incols;
575: PetscCall(PetscLogEventEnd(MAT_GetRow, mat, 0, 0, 0));
576: PetscFunctionReturn(PETSC_SUCCESS);
577: }
579: /*@
580: MatConjugate - replaces the matrix values with their complex conjugates
582: Logically Collective
584: Input Parameter:
585: . mat - the matrix
587: Level: advanced
589: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`, `MatImaginaryPart()`, `VecConjugate()`, `MatTranspose()`
590: @*/
591: PetscErrorCode MatConjugate(Mat mat)
592: {
593: PetscFunctionBegin;
595: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
596: if (PetscDefined(USE_COMPLEX) && mat->hermitian != PETSC_BOOL3_TRUE) {
597: PetscUseTypeMethod(mat, conjugate);
598: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
599: }
600: PetscFunctionReturn(PETSC_SUCCESS);
601: }
603: /*@C
604: MatRestoreRow - Frees any temporary space allocated by `MatGetRow()`.
606: Not Collective
608: Input Parameters:
609: + mat - the matrix
610: . row - the row to get
611: . ncols - the number of nonzeros
612: . cols - the columns of the nonzeros
613: - vals - if nonzero the column values
615: Level: advanced
617: Notes:
618: This routine should be called after you have finished examining the entries.
620: This routine zeros out `ncols`, `cols`, and `vals`. This is to prevent accidental
621: us of the array after it has been restored. If you pass `NULL`, it will
622: not zero the pointers. Use of `cols` or `vals` after `MatRestoreRow()` is invalid.
624: Fortran Notes:
625: The calling sequence is
626: .vb
627: MatRestoreRow(matrix,row,ncols,cols,values,ierr)
628: Mat matrix (input)
629: integer row (input)
630: integer ncols (output)
631: integer cols(maxcols) (output)
632: double precision (or double complex) values(maxcols) output
633: .ve
634: Where maxcols >= maximum nonzeros in any row of the matrix.
636: In Fortran `MatRestoreRow()` MUST be called after `MatGetRow()`
637: before another call to `MatGetRow()` can be made.
639: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`
640: @*/
641: PetscErrorCode MatRestoreRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
642: {
643: PetscFunctionBegin;
645: if (ncols) PetscAssertPointer(ncols, 3);
646: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
647: if (!mat->ops->restorerow) PetscFunctionReturn(PETSC_SUCCESS);
648: PetscUseTypeMethod(mat, restorerow, row, ncols, (PetscInt **)cols, (PetscScalar **)vals);
649: if (ncols) *ncols = 0;
650: if (cols) *cols = NULL;
651: if (vals) *vals = NULL;
652: PetscFunctionReturn(PETSC_SUCCESS);
653: }
655: /*@
656: MatGetRowUpperTriangular - Sets a flag to enable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
657: You should call `MatRestoreRowUpperTriangular()` after calling` MatGetRow()` and `MatRestoreRow()` to disable the flag.
659: Not Collective
661: Input Parameter:
662: . mat - the matrix
664: Level: advanced
666: Note:
667: The flag is to ensure that users are aware that `MatGetRow()` only provides the upper triangular part of the row for the matrices in `MATSBAIJ` format.
669: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatRestoreRowUpperTriangular()`
670: @*/
671: PetscErrorCode MatGetRowUpperTriangular(Mat mat)
672: {
673: PetscFunctionBegin;
676: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
677: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
678: MatCheckPreallocated(mat, 1);
679: if (!mat->ops->getrowuppertriangular) PetscFunctionReturn(PETSC_SUCCESS);
680: PetscUseTypeMethod(mat, getrowuppertriangular);
681: PetscFunctionReturn(PETSC_SUCCESS);
682: }
684: /*@
685: MatRestoreRowUpperTriangular - Disable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
687: Not Collective
689: Input Parameter:
690: . mat - the matrix
692: Level: advanced
694: Note:
695: This routine should be called after you have finished calls to `MatGetRow()` and `MatRestoreRow()`.
697: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatGetRowUpperTriangular()`
698: @*/
699: PetscErrorCode MatRestoreRowUpperTriangular(Mat mat)
700: {
701: PetscFunctionBegin;
704: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
705: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
706: MatCheckPreallocated(mat, 1);
707: if (!mat->ops->restorerowuppertriangular) PetscFunctionReturn(PETSC_SUCCESS);
708: PetscUseTypeMethod(mat, restorerowuppertriangular);
709: PetscFunctionReturn(PETSC_SUCCESS);
710: }
712: /*@C
713: MatSetOptionsPrefix - Sets the prefix used for searching for all
714: `Mat` options in the database.
716: Logically Collective
718: Input Parameters:
719: + A - the matrix
720: - prefix - the prefix to prepend to all option names
722: Level: advanced
724: Notes:
725: A hyphen (-) must NOT be given at the beginning of the prefix name.
726: The first character of all runtime options is AUTOMATICALLY the hyphen.
728: This is NOT used for options for the factorization of the matrix. Normally the
729: prefix is automatically passed in from the PC calling the factorization. To set
730: it directly use `MatSetOptionsPrefixFactor()`
732: .seealso: [](ch_matrices), `Mat`, `MatSetFromOptions()`, `MatSetOptionsPrefixFactor()`
733: @*/
734: PetscErrorCode MatSetOptionsPrefix(Mat A, const char prefix[])
735: {
736: PetscFunctionBegin;
738: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)A, prefix));
739: PetscFunctionReturn(PETSC_SUCCESS);
740: }
742: /*@C
743: MatSetOptionsPrefixFactor - Sets the prefix used for searching for all matrix factor options in the database for
744: for matrices created with `MatGetFactor()`
746: Logically Collective
748: Input Parameters:
749: + A - the matrix
750: - prefix - the prefix to prepend to all option names for the factored matrix
752: Level: developer
754: Notes:
755: A hyphen (-) must NOT be given at the beginning of the prefix name.
756: The first character of all runtime options is AUTOMATICALLY the hyphen.
758: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
759: it directly when not using `KSP`/`PC` use `MatSetOptionsPrefixFactor()`
761: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSetFromOptions()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`
762: @*/
763: PetscErrorCode MatSetOptionsPrefixFactor(Mat A, const char prefix[])
764: {
765: PetscFunctionBegin;
767: if (prefix) {
768: PetscAssertPointer(prefix, 2);
769: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
770: if (prefix != A->factorprefix) {
771: PetscCall(PetscFree(A->factorprefix));
772: PetscCall(PetscStrallocpy(prefix, &A->factorprefix));
773: }
774: } else PetscCall(PetscFree(A->factorprefix));
775: PetscFunctionReturn(PETSC_SUCCESS);
776: }
778: /*@C
779: MatAppendOptionsPrefixFactor - Appends to the prefix used for searching for all matrix factor options in the database for
780: for matrices created with `MatGetFactor()`
782: Logically Collective
784: Input Parameters:
785: + A - the matrix
786: - prefix - the prefix to prepend to all option names for the factored matrix
788: Level: developer
790: Notes:
791: A hyphen (-) must NOT be given at the beginning of the prefix name.
792: The first character of all runtime options is AUTOMATICALLY the hyphen.
794: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
795: it directly when not using `KSP`/`PC` use `MatAppendOptionsPrefixFactor()`
797: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `PetscOptionsCreate()`, `PetscOptionsDestroy()`, `PetscObjectSetOptionsPrefix()`, `PetscObjectPrependOptionsPrefix()`,
798: `PetscObjectGetOptionsPrefix()`, `TSAppendOptionsPrefix()`, `SNESAppendOptionsPrefix()`, `KSPAppendOptionsPrefix()`, `MatSetOptionsPrefixFactor()`,
799: `MatSetOptionsPrefix()`
800: @*/
801: PetscErrorCode MatAppendOptionsPrefixFactor(Mat A, const char prefix[])
802: {
803: size_t len1, len2, new_len;
805: PetscFunctionBegin;
807: if (!prefix) PetscFunctionReturn(PETSC_SUCCESS);
808: if (!A->factorprefix) {
809: PetscCall(MatSetOptionsPrefixFactor(A, prefix));
810: PetscFunctionReturn(PETSC_SUCCESS);
811: }
812: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
814: PetscCall(PetscStrlen(A->factorprefix, &len1));
815: PetscCall(PetscStrlen(prefix, &len2));
816: new_len = len1 + len2 + 1;
817: PetscCall(PetscRealloc(new_len * sizeof(*A->factorprefix), &A->factorprefix));
818: PetscCall(PetscStrncpy(A->factorprefix + len1, prefix, len2 + 1));
819: PetscFunctionReturn(PETSC_SUCCESS);
820: }
822: /*@C
823: MatAppendOptionsPrefix - Appends to the prefix used for searching for all
824: matrix options in the database.
826: Logically Collective
828: Input Parameters:
829: + A - the matrix
830: - prefix - the prefix to prepend to all option names
832: Level: advanced
834: Note:
835: A hyphen (-) must NOT be given at the beginning of the prefix name.
836: The first character of all runtime options is AUTOMATICALLY the hyphen.
838: .seealso: [](ch_matrices), `Mat`, `MatGetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefix()`
839: @*/
840: PetscErrorCode MatAppendOptionsPrefix(Mat A, const char prefix[])
841: {
842: PetscFunctionBegin;
844: PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)A, prefix));
845: PetscFunctionReturn(PETSC_SUCCESS);
846: }
848: /*@C
849: MatGetOptionsPrefix - Gets the prefix used for searching for all
850: matrix options in the database.
852: Not Collective
854: Input Parameter:
855: . A - the matrix
857: Output Parameter:
858: . prefix - pointer to the prefix string used
860: Level: advanced
862: Fortran Note:
863: The user should pass in a string `prefix` of
864: sufficient length to hold the prefix.
866: .seealso: [](ch_matrices), `Mat`, `MatAppendOptionsPrefix()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefixFactor()`
867: @*/
868: PetscErrorCode MatGetOptionsPrefix(Mat A, const char *prefix[])
869: {
870: PetscFunctionBegin;
872: PetscAssertPointer(prefix, 2);
873: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)A, prefix));
874: PetscFunctionReturn(PETSC_SUCCESS);
875: }
877: /*@
878: MatResetPreallocation - Reset matrix to use the original nonzero pattern provided by the user.
880: Collective
882: Input Parameter:
883: . A - the matrix
885: Level: beginner
887: Notes:
888: The allocated memory will be shrunk after calling `MatAssemblyBegin()` and `MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY`.
890: Users can reset the preallocation to access the original memory.
892: Currently only supported for `MATAIJ` matrices.
894: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatXAIJSetPreallocation()`
895: @*/
896: PetscErrorCode MatResetPreallocation(Mat A)
897: {
898: PetscFunctionBegin;
901: PetscCheck(A->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot reset preallocation after setting some values but not yet calling MatAssemblyBegin()/MatAssemblyEnd()");
902: if (A->num_ass == 0) PetscFunctionReturn(PETSC_SUCCESS);
903: PetscUseMethod(A, "MatResetPreallocation_C", (Mat), (A));
904: PetscFunctionReturn(PETSC_SUCCESS);
905: }
907: /*@
908: MatSetUp - Sets up the internal matrix data structures for later use.
910: Collective
912: Input Parameter:
913: . A - the matrix
915: Level: intermediate
917: Notes:
918: If the user has not set preallocation for this matrix then an efficient algorithm will be used for the first round of
919: setting values in the matrix.
921: This routine is called internally by other matrix functions when needed so rarely needs to be called by users
923: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatCreate()`, `MatDestroy()`, `MatXAIJSetPreallocation()`
924: @*/
925: PetscErrorCode MatSetUp(Mat A)
926: {
927: PetscFunctionBegin;
929: if (!((PetscObject)A)->type_name) {
930: PetscMPIInt size;
932: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
933: PetscCall(MatSetType(A, size == 1 ? MATSEQAIJ : MATMPIAIJ));
934: }
935: if (!A->preallocated) PetscTryTypeMethod(A, setup);
936: PetscCall(PetscLayoutSetUp(A->rmap));
937: PetscCall(PetscLayoutSetUp(A->cmap));
938: A->preallocated = PETSC_TRUE;
939: PetscFunctionReturn(PETSC_SUCCESS);
940: }
942: #if defined(PETSC_HAVE_SAWS)
943: #include <petscviewersaws.h>
944: #endif
946: /*
947: If threadsafety is on extraneous matrices may be printed
949: This flag cannot be stored in the matrix because the original matrix in MatView() may assemble a new matrix which is passed into MatViewFromOptions()
950: */
951: #if !defined(PETSC_HAVE_THREADSAFETY)
952: static PetscInt insidematview = 0;
953: #endif
955: /*@C
956: MatViewFromOptions - View properties of the matrix based on options set in the options database
958: Collective
960: Input Parameters:
961: + A - the matrix
962: . obj - optional additional object that provides the options prefix to use
963: - name - command line option
965: Options Database Key:
966: . -mat_view [viewertype]:... - the viewer and its options
968: Level: intermediate
970: Note:
971: .vb
972: If no value is provided ascii:stdout is used
973: ascii[:[filename][:[format][:append]]] defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab,
974: for example ascii::ascii_info prints just the information about the object not all details
975: unless :append is given filename opens in write mode, overwriting what was already there
976: binary[:[filename][:[format][:append]]] defaults to the file binaryoutput
977: draw[:drawtype[:filename]] for example, draw:tikz, draw:tikz:figure.tex or draw:x
978: socket[:port] defaults to the standard output port
979: saws[:communicatorname] publishes object to the Scientific Application Webserver (SAWs)
980: .ve
982: .seealso: [](ch_matrices), `Mat`, `MatView()`, `PetscObjectViewFromOptions()`, `MatCreate()`
983: @*/
984: PetscErrorCode MatViewFromOptions(Mat A, PetscObject obj, const char name[])
985: {
986: PetscFunctionBegin;
988: #if !defined(PETSC_HAVE_THREADSAFETY)
989: if (insidematview) PetscFunctionReturn(PETSC_SUCCESS);
990: #endif
991: PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
992: PetscFunctionReturn(PETSC_SUCCESS);
993: }
995: /*@C
996: MatView - display information about a matrix in a variety ways
998: Collective on viewer
1000: Input Parameters:
1001: + mat - the matrix
1002: - viewer - visualization context
1004: Options Database Keys:
1005: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
1006: . -mat_view ::ascii_info_detail - Prints more detailed info
1007: . -mat_view - Prints matrix in ASCII format
1008: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
1009: . -mat_view draw - PetscDraws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
1010: . -display <name> - Sets display name (default is host)
1011: . -draw_pause <sec> - Sets number of seconds to pause after display
1012: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (see Users-Manual: ch_matlab for details)
1013: . -viewer_socket_machine <machine> - -
1014: . -viewer_socket_port <port> - -
1015: . -mat_view binary - save matrix to file in binary format
1016: - -viewer_binary_filename <name> - -
1018: Level: beginner
1020: Notes:
1021: The available visualization contexts include
1022: + `PETSC_VIEWER_STDOUT_SELF` - for sequential matrices
1023: . `PETSC_VIEWER_STDOUT_WORLD` - for parallel matrices created on `PETSC_COMM_WORLD`
1024: . `PETSC_VIEWER_STDOUT_`(comm) - for matrices created on MPI communicator comm
1025: - `PETSC_VIEWER_DRAW_WORLD` - graphical display of nonzero structure
1027: The user can open alternative visualization contexts with
1028: + `PetscViewerASCIIOpen()` - Outputs matrix to a specified file
1029: . `PetscViewerBinaryOpen()` - Outputs matrix in binary to a
1030: specified file; corresponding input uses `MatLoad()`
1031: . `PetscViewerDrawOpen()` - Outputs nonzero matrix structure to
1032: an X window display
1033: - `PetscViewerSocketOpen()` - Outputs matrix to Socket viewer.
1034: Currently only the `MATSEQDENSE` and `MATAIJ`
1035: matrix types support the Socket viewer.
1037: The user can call `PetscViewerPushFormat()` to specify the output
1038: format of ASCII printed objects (when using `PETSC_VIEWER_STDOUT_SELF`,
1039: `PETSC_VIEWER_STDOUT_WORLD` and `PetscViewerASCIIOpen()`). Available formats include
1040: + `PETSC_VIEWER_DEFAULT` - default, prints matrix contents
1041: . `PETSC_VIEWER_ASCII_MATLAB` - prints matrix contents in MATLAB format
1042: . `PETSC_VIEWER_ASCII_DENSE` - prints entire matrix including zeros
1043: . `PETSC_VIEWER_ASCII_COMMON` - prints matrix contents, using a sparse
1044: format common among all matrix types
1045: . `PETSC_VIEWER_ASCII_IMPL` - prints matrix contents, using an implementation-specific
1046: format (which is in many cases the same as the default)
1047: . `PETSC_VIEWER_ASCII_INFO` - prints basic information about the matrix
1048: size and structure (not the matrix entries)
1049: - `PETSC_VIEWER_ASCII_INFO_DETAIL` - prints more detailed information about
1050: the matrix structure
1052: The ASCII viewers are only recommended for small matrices on at most a moderate number of processes,
1053: the program will seemingly hang and take hours for larger matrices, for larger matrices one should use the binary format.
1055: In the debugger you can do "call MatView(mat,0)" to display the matrix. (The same holds for any PETSc object viewer).
1057: See the manual page for `MatLoad()` for the exact format of the binary file when the binary
1058: viewer is used.
1060: See share/petsc/matlab/PetscBinaryRead.m for a MATLAB code that can read in the binary file when the binary
1061: viewer is used and lib/petsc/bin/PetscBinaryIO.py for loading them into Python.
1063: One can use '-mat_view draw -draw_pause -1' to pause the graphical display of matrix nonzero structure,
1064: and then use the following mouse functions.
1065: .vb
1066: left mouse: zoom in
1067: middle mouse: zoom out
1068: right mouse: continue with the simulation
1069: .ve
1071: .seealso: [](ch_matrices), `Mat`, `PetscViewerPushFormat()`, `PetscViewerASCIIOpen()`, `PetscViewerDrawOpen()`, `PetscViewer`,
1072: `PetscViewerSocketOpen()`, `PetscViewerBinaryOpen()`, `MatLoad()`, `MatViewFromOptions()`
1073: @*/
1074: PetscErrorCode MatView(Mat mat, PetscViewer viewer)
1075: {
1076: PetscInt rows, cols, rbs, cbs;
1077: PetscBool isascii, isstring, issaws;
1078: PetscViewerFormat format;
1079: PetscMPIInt size;
1081: PetscFunctionBegin;
1084: if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mat), &viewer));
1087: PetscCall(PetscViewerGetFormat(viewer, &format));
1088: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size));
1089: if (size == 1 && format == PETSC_VIEWER_LOAD_BALANCE) PetscFunctionReturn(PETSC_SUCCESS);
1091: #if !defined(PETSC_HAVE_THREADSAFETY)
1092: insidematview++;
1093: #endif
1094: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
1095: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
1096: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSAWS, &issaws));
1097: PetscCheck((isascii && (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL)) || !mat->factortype, PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_WRONGSTATE, "No viewers for factored matrix except ASCII, info, or info_detail");
1099: PetscCall(PetscLogEventBegin(MAT_View, mat, viewer, 0, 0));
1100: if (isascii) {
1101: if (!mat->preallocated) {
1102: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been preallocated yet\n"));
1103: #if !defined(PETSC_HAVE_THREADSAFETY)
1104: insidematview--;
1105: #endif
1106: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1107: PetscFunctionReturn(PETSC_SUCCESS);
1108: }
1109: if (!mat->assembled) {
1110: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been assembled yet\n"));
1111: #if !defined(PETSC_HAVE_THREADSAFETY)
1112: insidematview--;
1113: #endif
1114: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1115: PetscFunctionReturn(PETSC_SUCCESS);
1116: }
1117: PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mat, viewer));
1118: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1119: MatNullSpace nullsp, transnullsp;
1121: PetscCall(PetscViewerASCIIPushTab(viewer));
1122: PetscCall(MatGetSize(mat, &rows, &cols));
1123: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
1124: if (rbs != 1 || cbs != 1) {
1125: if (rbs != cbs) PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", rbs=%" PetscInt_FMT ", cbs=%" PetscInt_FMT "%s\n", rows, cols, rbs, cbs, mat->bsizes ? " variable blocks set" : ""));
1126: else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", bs=%" PetscInt_FMT "%s\n", rows, cols, rbs, mat->bsizes ? " variable blocks set" : ""));
1127: } else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", rows, cols));
1128: if (mat->factortype) {
1129: MatSolverType solver;
1130: PetscCall(MatFactorGetSolverType(mat, &solver));
1131: PetscCall(PetscViewerASCIIPrintf(viewer, "package used to perform factorization: %s\n", solver));
1132: }
1133: if (mat->ops->getinfo) {
1134: MatInfo info;
1135: PetscCall(MatGetInfo(mat, MAT_GLOBAL_SUM, &info));
1136: PetscCall(PetscViewerASCIIPrintf(viewer, "total: nonzeros=%.f, allocated nonzeros=%.f\n", info.nz_used, info.nz_allocated));
1137: if (!mat->factortype) PetscCall(PetscViewerASCIIPrintf(viewer, "total number of mallocs used during MatSetValues calls=%" PetscInt_FMT "\n", (PetscInt)info.mallocs));
1138: }
1139: PetscCall(MatGetNullSpace(mat, &nullsp));
1140: PetscCall(MatGetTransposeNullSpace(mat, &transnullsp));
1141: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached null space\n"));
1142: if (transnullsp && transnullsp != nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached transposed null space\n"));
1143: PetscCall(MatGetNearNullSpace(mat, &nullsp));
1144: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached near null space\n"));
1145: PetscCall(PetscViewerASCIIPushTab(viewer));
1146: PetscCall(MatProductView(mat, viewer));
1147: PetscCall(PetscViewerASCIIPopTab(viewer));
1148: if (mat->bsizes && format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1149: IS tmp;
1151: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)viewer), mat->nblocks, mat->bsizes, PETSC_USE_POINTER, &tmp));
1152: PetscCall(PetscObjectSetName((PetscObject)tmp, "Block Sizes"));
1153: PetscCall(PetscViewerASCIIPushTab(viewer));
1154: PetscCall(ISView(tmp, viewer));
1155: PetscCall(PetscViewerASCIIPopTab(viewer));
1156: PetscCall(ISDestroy(&tmp));
1157: }
1158: }
1159: } else if (issaws) {
1160: #if defined(PETSC_HAVE_SAWS)
1161: PetscMPIInt rank;
1163: PetscCall(PetscObjectName((PetscObject)mat));
1164: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
1165: if (!((PetscObject)mat)->amsmem && rank == 0) PetscCall(PetscObjectViewSAWs((PetscObject)mat, viewer));
1166: #endif
1167: } else if (isstring) {
1168: const char *type;
1169: PetscCall(MatGetType(mat, &type));
1170: PetscCall(PetscViewerStringSPrintf(viewer, " MatType: %-7.7s", type));
1171: PetscTryTypeMethod(mat, view, viewer);
1172: }
1173: if ((format == PETSC_VIEWER_NATIVE || format == PETSC_VIEWER_LOAD_BALANCE) && mat->ops->viewnative) {
1174: PetscCall(PetscViewerASCIIPushTab(viewer));
1175: PetscUseTypeMethod(mat, viewnative, viewer);
1176: PetscCall(PetscViewerASCIIPopTab(viewer));
1177: } else if (mat->ops->view) {
1178: PetscCall(PetscViewerASCIIPushTab(viewer));
1179: PetscUseTypeMethod(mat, view, viewer);
1180: PetscCall(PetscViewerASCIIPopTab(viewer));
1181: }
1182: if (isascii) {
1183: PetscCall(PetscViewerGetFormat(viewer, &format));
1184: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscCall(PetscViewerASCIIPopTab(viewer));
1185: }
1186: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1187: #if !defined(PETSC_HAVE_THREADSAFETY)
1188: insidematview--;
1189: #endif
1190: PetscFunctionReturn(PETSC_SUCCESS);
1191: }
1193: #if defined(PETSC_USE_DEBUG)
1194: #include <../src/sys/totalview/tv_data_display.h>
1195: PETSC_UNUSED static int TV_display_type(const struct _p_Mat *mat)
1196: {
1197: TV_add_row("Local rows", "int", &mat->rmap->n);
1198: TV_add_row("Local columns", "int", &mat->cmap->n);
1199: TV_add_row("Global rows", "int", &mat->rmap->N);
1200: TV_add_row("Global columns", "int", &mat->cmap->N);
1201: TV_add_row("Typename", TV_ascii_string_type, ((PetscObject)mat)->type_name);
1202: return TV_format_OK;
1203: }
1204: #endif
1206: /*@C
1207: MatLoad - Loads a matrix that has been stored in binary/HDF5 format
1208: with `MatView()`. The matrix format is determined from the options database.
1209: Generates a parallel MPI matrix if the communicator has more than one
1210: processor. The default matrix type is `MATAIJ`.
1212: Collective
1214: Input Parameters:
1215: + mat - the newly loaded matrix, this needs to have been created with `MatCreate()`
1216: or some related function before a call to `MatLoad()`
1217: - viewer - `PETSCVIEWERBINARY`/`PETSCVIEWERHDF5` file viewer
1219: Options Database Key:
1220: . -matload_block_size <bs> - set block size
1222: Level: beginner
1224: Notes:
1225: If the `Mat` type has not yet been given then `MATAIJ` is used, call `MatSetFromOptions()` on the
1226: `Mat` before calling this routine if you wish to set it from the options database.
1228: `MatLoad()` automatically loads into the options database any options
1229: given in the file filename.info where filename is the name of the file
1230: that was passed to the `PetscViewerBinaryOpen()`. The options in the info
1231: file will be ignored if you use the -viewer_binary_skip_info option.
1233: If the type or size of mat is not set before a call to `MatLoad()`, PETSc
1234: sets the default matrix type AIJ and sets the local and global sizes.
1235: If type and/or size is already set, then the same are used.
1237: In parallel, each processor can load a subset of rows (or the
1238: entire matrix). This routine is especially useful when a large
1239: matrix is stored on disk and only part of it is desired on each
1240: processor. For example, a parallel solver may access only some of
1241: the rows from each processor. The algorithm used here reads
1242: relatively small blocks of data rather than reading the entire
1243: matrix and then subsetting it.
1245: Viewer's `PetscViewerType` must be either `PETSCVIEWERBINARY` or `PETSCVIEWERHDF5`.
1246: Such viewer can be created using `PetscViewerBinaryOpen()` or `PetscViewerHDF5Open()`,
1247: or the sequence like
1248: .vb
1249: `PetscViewer` v;
1250: `PetscViewerCreate`(`PETSC_COMM_WORLD`,&v);
1251: `PetscViewerSetType`(v,`PETSCVIEWERBINARY`);
1252: `PetscViewerSetFromOptions`(v);
1253: `PetscViewerFileSetMode`(v,`FILE_MODE_READ`);
1254: `PetscViewerFileSetName`(v,"datafile");
1255: .ve
1256: The optional `PetscViewerSetFromOptions()` call allows overriding `PetscViewerSetType()` using the option
1257: $ -viewer_type {binary, hdf5}
1259: See the example src/ksp/ksp/tutorials/ex27.c with the first approach,
1260: and src/mat/tutorials/ex10.c with the second approach.
1262: In case of `PETSCVIEWERBINARY`, a native PETSc binary format is used. Each of the blocks
1263: is read onto MPI rank 0 and then shipped to its destination MPI rank, one after another.
1264: Multiple objects, both matrices and vectors, can be stored within the same file.
1265: Their `PetscObject` name is ignored; they are loaded in the order of their storage.
1267: Most users should not need to know the details of the binary storage
1268: format, since `MatLoad()` and `MatView()` completely hide these details.
1269: But for anyone who is interested, the standard binary matrix storage
1270: format is
1272: .vb
1273: PetscInt MAT_FILE_CLASSID
1274: PetscInt number of rows
1275: PetscInt number of columns
1276: PetscInt total number of nonzeros
1277: PetscInt *number nonzeros in each row
1278: PetscInt *column indices of all nonzeros (starting index is zero)
1279: PetscScalar *values of all nonzeros
1280: .ve
1281: If PETSc was not configured with `--with-64-bit-indices` then only `MATMPIAIJ` matrices with more than `PETSC_INT_MAX` non-zeros can be
1282: stored or loaded (each MPI process part of the matrix must have less than `PETSC_INT_MAX` nonzeros). Since the total nonzero count in this
1283: case will not fit in a (32-bit) `PetscInt` the value `PETSC_INT_MAX` is used for the header entry `total number of nonzeros`.
1285: PETSc automatically does the byte swapping for
1286: machines that store the bytes reversed. Thus if you write your own binary
1287: read/write routines you have to swap the bytes; see `PetscBinaryRead()`
1288: and `PetscBinaryWrite()` to see how this may be done.
1290: In case of `PETSCVIEWERHDF5`, a parallel HDF5 reader is used.
1291: Each processor's chunk is loaded independently by its owning MPI process.
1292: Multiple objects, both matrices and vectors, can be stored within the same file.
1293: They are looked up by their PetscObject name.
1295: As the MATLAB MAT-File Version 7.3 format is also a HDF5 flavor, we decided to use
1296: by default the same structure and naming of the AIJ arrays and column count
1297: within the HDF5 file. This means that a MAT file saved with -v7.3 flag, e.g.
1298: $ save example.mat A b -v7.3
1299: can be directly read by this routine (see Reference 1 for details).
1301: Depending on your MATLAB version, this format might be a default,
1302: otherwise you can set it as default in Preferences.
1304: Unless -nocompression flag is used to save the file in MATLAB,
1305: PETSc must be configured with ZLIB package.
1307: See also examples src/mat/tutorials/ex10.c and src/ksp/ksp/tutorials/ex27.c
1309: This reader currently supports only real `MATSEQAIJ`, `MATMPIAIJ`, `MATSEQDENSE` and `MATMPIDENSE` matrices for `PETSCVIEWERHDF5`
1311: Corresponding `MatView()` is not yet implemented.
1313: The loaded matrix is actually a transpose of the original one in MATLAB,
1314: unless you push `PETSC_VIEWER_HDF5_MAT` format (see examples above).
1315: With this format, matrix is automatically transposed by PETSc,
1316: unless the matrix is marked as SPD or symmetric
1317: (see `MatSetOption()`, `MAT_SPD`, `MAT_SYMMETRIC`).
1319: See MATLAB Documentation on `save()`, <https://www.mathworks.com/help/matlab/ref/save.html#btox10b-1-version>
1321: .seealso: [](ch_matrices), `Mat`, `PetscViewerBinaryOpen()`, `PetscViewerSetType()`, `MatView()`, `VecLoad()`
1322: @*/
1323: PetscErrorCode MatLoad(Mat mat, PetscViewer viewer)
1324: {
1325: PetscBool flg;
1327: PetscFunctionBegin;
1331: if (!((PetscObject)mat)->type_name) PetscCall(MatSetType(mat, MATAIJ));
1333: flg = PETSC_FALSE;
1334: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_symmetric", &flg, NULL));
1335: if (flg) {
1336: PetscCall(MatSetOption(mat, MAT_SYMMETRIC, PETSC_TRUE));
1337: PetscCall(MatSetOption(mat, MAT_SYMMETRY_ETERNAL, PETSC_TRUE));
1338: }
1339: flg = PETSC_FALSE;
1340: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_spd", &flg, NULL));
1341: if (flg) PetscCall(MatSetOption(mat, MAT_SPD, PETSC_TRUE));
1343: PetscCall(PetscLogEventBegin(MAT_Load, mat, viewer, 0, 0));
1344: PetscUseTypeMethod(mat, load, viewer);
1345: PetscCall(PetscLogEventEnd(MAT_Load, mat, viewer, 0, 0));
1346: PetscFunctionReturn(PETSC_SUCCESS);
1347: }
1349: static PetscErrorCode MatDestroy_Redundant(Mat_Redundant **redundant)
1350: {
1351: Mat_Redundant *redund = *redundant;
1353: PetscFunctionBegin;
1354: if (redund) {
1355: if (redund->matseq) { /* via MatCreateSubMatrices() */
1356: PetscCall(ISDestroy(&redund->isrow));
1357: PetscCall(ISDestroy(&redund->iscol));
1358: PetscCall(MatDestroySubMatrices(1, &redund->matseq));
1359: } else {
1360: PetscCall(PetscFree2(redund->send_rank, redund->recv_rank));
1361: PetscCall(PetscFree(redund->sbuf_j));
1362: PetscCall(PetscFree(redund->sbuf_a));
1363: for (PetscInt i = 0; i < redund->nrecvs; i++) {
1364: PetscCall(PetscFree(redund->rbuf_j[i]));
1365: PetscCall(PetscFree(redund->rbuf_a[i]));
1366: }
1367: PetscCall(PetscFree4(redund->sbuf_nz, redund->rbuf_nz, redund->rbuf_j, redund->rbuf_a));
1368: }
1370: if (redund->subcomm) PetscCall(PetscCommDestroy(&redund->subcomm));
1371: PetscCall(PetscFree(redund));
1372: }
1373: PetscFunctionReturn(PETSC_SUCCESS);
1374: }
1376: /*@C
1377: MatDestroy - Frees space taken by a matrix.
1379: Collective
1381: Input Parameter:
1382: . A - the matrix
1384: Level: beginner
1386: Developer Note:
1387: Some special arrays of matrices are not destroyed in this routine but instead by the routines called by
1388: `MatDestroySubMatrices()`. Thus one must be sure that any changes here must also be made in those routines.
1389: `MatHeaderMerge()` and `MatHeaderReplace()` also manipulate the data in the `Mat` object and likely need changes
1390: if changes are needed here.
1392: .seealso: [](ch_matrices), `Mat`, `MatCreate()`
1393: @*/
1394: PetscErrorCode MatDestroy(Mat *A)
1395: {
1396: PetscFunctionBegin;
1397: if (!*A) PetscFunctionReturn(PETSC_SUCCESS);
1399: if (--((PetscObject)*A)->refct > 0) {
1400: *A = NULL;
1401: PetscFunctionReturn(PETSC_SUCCESS);
1402: }
1404: /* if memory was published with SAWs then destroy it */
1405: PetscCall(PetscObjectSAWsViewOff((PetscObject)*A));
1406: PetscTryTypeMethod(*A, destroy);
1408: PetscCall(PetscFree((*A)->factorprefix));
1409: PetscCall(PetscFree((*A)->defaultvectype));
1410: PetscCall(PetscFree((*A)->defaultrandtype));
1411: PetscCall(PetscFree((*A)->bsizes));
1412: PetscCall(PetscFree((*A)->solvertype));
1413: for (PetscInt i = 0; i < MAT_FACTOR_NUM_TYPES; i++) PetscCall(PetscFree((*A)->preferredordering[i]));
1414: if ((*A)->redundant && (*A)->redundant->matseq[0] == *A) (*A)->redundant->matseq[0] = NULL;
1415: PetscCall(MatDestroy_Redundant(&(*A)->redundant));
1416: PetscCall(MatProductClear(*A));
1417: PetscCall(MatNullSpaceDestroy(&(*A)->nullsp));
1418: PetscCall(MatNullSpaceDestroy(&(*A)->transnullsp));
1419: PetscCall(MatNullSpaceDestroy(&(*A)->nearnullsp));
1420: PetscCall(MatDestroy(&(*A)->schur));
1421: PetscCall(PetscLayoutDestroy(&(*A)->rmap));
1422: PetscCall(PetscLayoutDestroy(&(*A)->cmap));
1423: PetscCall(PetscHeaderDestroy(A));
1424: PetscFunctionReturn(PETSC_SUCCESS);
1425: }
1427: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1428: /*@C
1429: MatSetValues - Inserts or adds a block of values into a matrix.
1430: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1431: MUST be called after all calls to `MatSetValues()` have been completed.
1433: Not Collective
1435: Input Parameters:
1436: + mat - the matrix
1437: . v - a logically two-dimensional array of values
1438: . m - the number of rows
1439: . idxm - the global indices of the rows
1440: . n - the number of columns
1441: . idxn - the global indices of the columns
1442: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1444: Level: beginner
1446: Notes:
1447: By default the values, `v`, are stored row-oriented. See `MatSetOption()` for other options.
1449: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1450: options cannot be mixed without intervening calls to the assembly
1451: routines.
1453: `MatSetValues()` uses 0-based row and column numbers in Fortran
1454: as well as in C.
1456: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1457: simply ignored. This allows easily inserting element stiffness matrices
1458: with homogeneous Dirichlet boundary conditions that you don't want represented
1459: in the matrix.
1461: Efficiency Alert:
1462: The routine `MatSetValuesBlocked()` may offer much better efficiency
1463: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1465: Developer Note:
1466: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1467: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1469: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1470: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1471: @*/
1472: PetscErrorCode MatSetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
1473: {
1474: PetscFunctionBeginHot;
1477: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1478: PetscAssertPointer(idxm, 3);
1479: PetscAssertPointer(idxn, 5);
1480: MatCheckPreallocated(mat, 1);
1482: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
1483: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
1485: if (PetscDefined(USE_DEBUG)) {
1486: PetscInt i, j;
1488: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1489: if (v) {
1490: for (i = 0; i < m; i++) {
1491: for (j = 0; j < n; j++) {
1492: if (mat->erroriffailure && PetscIsInfOrNanScalar(v[i * n + j]))
1493: #if defined(PETSC_USE_COMPLEX)
1494: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g+i%g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)PetscRealPart(v[i * n + j]), (double)PetscImaginaryPart(v[i * n + j]), idxm[i], idxn[j]);
1495: #else
1496: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)v[i * n + j], idxm[i], idxn[j]);
1497: #endif
1498: }
1499: }
1500: }
1501: for (i = 0; i < m; i++) PetscCheck(idxm[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in row %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxm[i], mat->rmap->N - 1);
1502: for (i = 0; i < n; i++) PetscCheck(idxn[i] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in column %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxn[i], mat->cmap->N - 1);
1503: }
1505: if (mat->assembled) {
1506: mat->was_assembled = PETSC_TRUE;
1507: mat->assembled = PETSC_FALSE;
1508: }
1509: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1510: PetscUseTypeMethod(mat, setvalues, m, idxm, n, idxn, v, addv);
1511: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1512: PetscFunctionReturn(PETSC_SUCCESS);
1513: }
1515: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1516: /*@C
1517: MatSetValuesIS - Inserts or adds a block of values into a matrix using an `IS` to indicate the rows and columns
1518: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1519: MUST be called after all calls to `MatSetValues()` have been completed.
1521: Not Collective
1523: Input Parameters:
1524: + mat - the matrix
1525: . v - a logically two-dimensional array of values
1526: . ism - the rows to provide
1527: . isn - the columns to provide
1528: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1530: Level: beginner
1532: Notes:
1533: By default the values, `v`, are stored row-oriented. See `MatSetOption()` for other options.
1535: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1536: options cannot be mixed without intervening calls to the assembly
1537: routines.
1539: `MatSetValues()` uses 0-based row and column numbers in Fortran
1540: as well as in C.
1542: Negative indices may be passed in `ism` and `isn`, these rows and columns are
1543: simply ignored. This allows easily inserting element stiffness matrices
1544: with homogeneous Dirichlet boundary conditions that you don't want represented
1545: in the matrix.
1547: Efficiency Alert:
1548: The routine `MatSetValuesBlocked()` may offer much better efficiency
1549: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1551: This is currently not optimized for any particular `ISType`
1553: Developer Note:
1554: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1555: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1557: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatSetValues()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1558: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1559: @*/
1560: PetscErrorCode MatSetValuesIS(Mat mat, IS ism, IS isn, const PetscScalar v[], InsertMode addv)
1561: {
1562: PetscInt m, n;
1563: const PetscInt *rows, *cols;
1565: PetscFunctionBeginHot;
1567: PetscCall(ISGetIndices(ism, &rows));
1568: PetscCall(ISGetIndices(isn, &cols));
1569: PetscCall(ISGetLocalSize(ism, &m));
1570: PetscCall(ISGetLocalSize(isn, &n));
1571: PetscCall(MatSetValues(mat, m, rows, n, cols, v, addv));
1572: PetscCall(ISRestoreIndices(ism, &rows));
1573: PetscCall(ISRestoreIndices(isn, &cols));
1574: PetscFunctionReturn(PETSC_SUCCESS);
1575: }
1577: /*@
1578: MatSetValuesRowLocal - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1579: values into a matrix
1581: Not Collective
1583: Input Parameters:
1584: + mat - the matrix
1585: . row - the (block) row to set
1586: - v - a logically two-dimensional array of values
1588: Level: intermediate
1590: Notes:
1591: The values, `v`, are column-oriented (for the block version) and sorted
1593: All the nonzero values in `row` must be provided
1595: The matrix must have previously had its column indices set, likely by having been assembled.
1597: `row` must belong to this MPI process
1599: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1600: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetValuesRow()`, `MatSetLocalToGlobalMapping()`
1601: @*/
1602: PetscErrorCode MatSetValuesRowLocal(Mat mat, PetscInt row, const PetscScalar v[])
1603: {
1604: PetscInt globalrow;
1606: PetscFunctionBegin;
1609: PetscAssertPointer(v, 3);
1610: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, 1, &row, &globalrow));
1611: PetscCall(MatSetValuesRow(mat, globalrow, v));
1612: PetscFunctionReturn(PETSC_SUCCESS);
1613: }
1615: /*@
1616: MatSetValuesRow - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1617: values into a matrix
1619: Not Collective
1621: Input Parameters:
1622: + mat - the matrix
1623: . row - the (block) row to set
1624: - v - a logically two-dimensional (column major) array of values for block matrices with blocksize larger than one, otherwise a one dimensional array of values
1626: Level: advanced
1628: Notes:
1629: The values, `v`, are column-oriented for the block version.
1631: All the nonzeros in `row` must be provided
1633: THE MATRIX MUST HAVE PREVIOUSLY HAD ITS COLUMN INDICES SET. IT IS RARE THAT THIS ROUTINE IS USED, usually `MatSetValues()` is used.
1635: `row` must belong to this process
1637: .seealso: [](ch_matrices), `Mat`, `MatSetValues()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1638: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1639: @*/
1640: PetscErrorCode MatSetValuesRow(Mat mat, PetscInt row, const PetscScalar v[])
1641: {
1642: PetscFunctionBeginHot;
1645: MatCheckPreallocated(mat, 1);
1646: PetscAssertPointer(v, 3);
1647: PetscCheck(mat->insertmode != ADD_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add and insert values");
1648: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1649: mat->insertmode = INSERT_VALUES;
1651: if (mat->assembled) {
1652: mat->was_assembled = PETSC_TRUE;
1653: mat->assembled = PETSC_FALSE;
1654: }
1655: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1656: PetscUseTypeMethod(mat, setvaluesrow, row, v);
1657: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1658: PetscFunctionReturn(PETSC_SUCCESS);
1659: }
1661: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1662: /*@
1663: MatSetValuesStencil - Inserts or adds a block of values into a matrix.
1664: Using structured grid indexing
1666: Not Collective
1668: Input Parameters:
1669: + mat - the matrix
1670: . m - number of rows being entered
1671: . idxm - grid coordinates (and component number when dof > 1) for matrix rows being entered
1672: . n - number of columns being entered
1673: . idxn - grid coordinates (and component number when dof > 1) for matrix columns being entered
1674: . v - a logically two-dimensional array of values
1675: - addv - either `ADD_VALUES` to add to existing entries at that location or `INSERT_VALUES` to replace existing entries with new values
1677: Level: beginner
1679: Notes:
1680: By default the values, `v`, are row-oriented. See `MatSetOption()` for other options.
1682: Calls to `MatSetValuesStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1683: options cannot be mixed without intervening calls to the assembly
1684: routines.
1686: The grid coordinates are across the entire grid, not just the local portion
1688: `MatSetValuesStencil()` uses 0-based row and column numbers in Fortran
1689: as well as in C.
1691: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1693: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1694: or call `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1696: The columns and rows in the stencil passed in MUST be contained within the
1697: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1698: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1699: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1700: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1702: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
1703: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
1704: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
1705: `DM_BOUNDARY_PERIODIC` boundary type.
1707: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
1708: a single value per point) you can skip filling those indices.
1710: Inspired by the structured grid interface to the HYPRE package
1711: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1713: Efficiency Alert:
1714: The routine `MatSetValuesBlockedStencil()` may offer much better efficiency
1715: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1717: Fortran Note:
1718: `idxm` and `idxn` should be declared as
1719: $ MatStencil idxm(4,m),idxn(4,n)
1720: and the values inserted using
1721: .vb
1722: idxm(MatStencil_i,1) = i
1723: idxm(MatStencil_j,1) = j
1724: idxm(MatStencil_k,1) = k
1725: idxm(MatStencil_c,1) = c
1726: etc
1727: .ve
1729: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1730: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`
1731: @*/
1732: PetscErrorCode MatSetValuesStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1733: {
1734: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1735: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1736: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1738: PetscFunctionBegin;
1739: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1742: PetscAssertPointer(idxm, 3);
1743: PetscAssertPointer(idxn, 5);
1745: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1746: jdxm = buf;
1747: jdxn = buf + m;
1748: } else {
1749: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1750: jdxm = bufm;
1751: jdxn = bufn;
1752: }
1753: for (i = 0; i < m; i++) {
1754: for (j = 0; j < 3 - sdim; j++) dxm++;
1755: tmp = *dxm++ - starts[0];
1756: for (j = 0; j < dim - 1; j++) {
1757: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1758: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1759: }
1760: if (mat->stencil.noc) dxm++;
1761: jdxm[i] = tmp;
1762: }
1763: for (i = 0; i < n; i++) {
1764: for (j = 0; j < 3 - sdim; j++) dxn++;
1765: tmp = *dxn++ - starts[0];
1766: for (j = 0; j < dim - 1; j++) {
1767: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1768: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1769: }
1770: if (mat->stencil.noc) dxn++;
1771: jdxn[i] = tmp;
1772: }
1773: PetscCall(MatSetValuesLocal(mat, m, jdxm, n, jdxn, v, addv));
1774: PetscCall(PetscFree2(bufm, bufn));
1775: PetscFunctionReturn(PETSC_SUCCESS);
1776: }
1778: /*@
1779: MatSetValuesBlockedStencil - Inserts or adds a block of values into a matrix.
1780: Using structured grid indexing
1782: Not Collective
1784: Input Parameters:
1785: + mat - the matrix
1786: . m - number of rows being entered
1787: . idxm - grid coordinates for matrix rows being entered
1788: . n - number of columns being entered
1789: . idxn - grid coordinates for matrix columns being entered
1790: . v - a logically two-dimensional array of values
1791: - addv - either `ADD_VALUES` to add to existing entries or `INSERT_VALUES` to replace existing entries with new values
1793: Level: beginner
1795: Notes:
1796: By default the values, `v`, are row-oriented and unsorted.
1797: See `MatSetOption()` for other options.
1799: Calls to `MatSetValuesBlockedStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1800: options cannot be mixed without intervening calls to the assembly
1801: routines.
1803: The grid coordinates are across the entire grid, not just the local portion
1805: `MatSetValuesBlockedStencil()` uses 0-based row and column numbers in Fortran
1806: as well as in C.
1808: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1810: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1811: or call `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1813: The columns and rows in the stencil passed in MUST be contained within the
1814: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1815: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1816: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1817: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1819: Negative indices may be passed in idxm and idxn, these rows and columns are
1820: simply ignored. This allows easily inserting element stiffness matrices
1821: with homogeneous Dirichlet boundary conditions that you don't want represented
1822: in the matrix.
1824: Inspired by the structured grid interface to the HYPRE package
1825: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1827: Fortran Note:
1828: `idxm` and `idxn` should be declared as
1829: $ MatStencil idxm(4,m),idxn(4,n)
1830: and the values inserted using
1831: .vb
1832: idxm(MatStencil_i,1) = i
1833: idxm(MatStencil_j,1) = j
1834: idxm(MatStencil_k,1) = k
1835: etc
1836: .ve
1838: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1839: `MatSetValues()`, `MatSetValuesStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`,
1840: `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`
1841: @*/
1842: PetscErrorCode MatSetValuesBlockedStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1843: {
1844: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1845: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1846: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1848: PetscFunctionBegin;
1849: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1852: PetscAssertPointer(idxm, 3);
1853: PetscAssertPointer(idxn, 5);
1854: PetscAssertPointer(v, 6);
1856: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1857: jdxm = buf;
1858: jdxn = buf + m;
1859: } else {
1860: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1861: jdxm = bufm;
1862: jdxn = bufn;
1863: }
1864: for (i = 0; i < m; i++) {
1865: for (j = 0; j < 3 - sdim; j++) dxm++;
1866: tmp = *dxm++ - starts[0];
1867: for (j = 0; j < sdim - 1; j++) {
1868: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1869: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1870: }
1871: dxm++;
1872: jdxm[i] = tmp;
1873: }
1874: for (i = 0; i < n; i++) {
1875: for (j = 0; j < 3 - sdim; j++) dxn++;
1876: tmp = *dxn++ - starts[0];
1877: for (j = 0; j < sdim - 1; j++) {
1878: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1879: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1880: }
1881: dxn++;
1882: jdxn[i] = tmp;
1883: }
1884: PetscCall(MatSetValuesBlockedLocal(mat, m, jdxm, n, jdxn, v, addv));
1885: PetscCall(PetscFree2(bufm, bufn));
1886: PetscFunctionReturn(PETSC_SUCCESS);
1887: }
1889: /*@
1890: MatSetStencil - Sets the grid information for setting values into a matrix via
1891: `MatSetValuesStencil()`
1893: Not Collective
1895: Input Parameters:
1896: + mat - the matrix
1897: . dim - dimension of the grid 1, 2, or 3
1898: . dims - number of grid points in x, y, and z direction, including ghost points on your processor
1899: . starts - starting point of ghost nodes on your processor in x, y, and z direction
1900: - dof - number of degrees of freedom per node
1902: Level: beginner
1904: Notes:
1905: Inspired by the structured grid interface to the HYPRE package
1906: (www.llnl.gov/CASC/hyper)
1908: For matrices generated with `DMCreateMatrix()` this routine is automatically called and so not needed by the
1909: user.
1911: .seealso: [](ch_matrices), `Mat`, `MatStencil`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1912: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetValuesStencil()`
1913: @*/
1914: PetscErrorCode MatSetStencil(Mat mat, PetscInt dim, const PetscInt dims[], const PetscInt starts[], PetscInt dof)
1915: {
1916: PetscFunctionBegin;
1918: PetscAssertPointer(dims, 3);
1919: PetscAssertPointer(starts, 4);
1921: mat->stencil.dim = dim + (dof > 1);
1922: for (PetscInt i = 0; i < dim; i++) {
1923: mat->stencil.dims[i] = dims[dim - i - 1]; /* copy the values in backwards */
1924: mat->stencil.starts[i] = starts[dim - i - 1];
1925: }
1926: mat->stencil.dims[dim] = dof;
1927: mat->stencil.starts[dim] = 0;
1928: mat->stencil.noc = (PetscBool)(dof == 1);
1929: PetscFunctionReturn(PETSC_SUCCESS);
1930: }
1932: /*@C
1933: MatSetValuesBlocked - Inserts or adds a block of values into a matrix.
1935: Not Collective
1937: Input Parameters:
1938: + mat - the matrix
1939: . v - a logically two-dimensional array of values
1940: . m - the number of block rows
1941: . idxm - the global block indices
1942: . n - the number of block columns
1943: . idxn - the global block indices
1944: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` replaces existing entries with new values
1946: Level: intermediate
1948: Notes:
1949: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call
1950: MatXXXXSetPreallocation() or `MatSetUp()` before using this routine.
1952: The `m` and `n` count the NUMBER of blocks in the row direction and column direction,
1953: NOT the total number of rows/columns; for example, if the block size is 2 and
1954: you are passing in values for rows 2,3,4,5 then `m` would be 2 (not 4).
1955: The values in `idxm` would be 1 2; that is the first index for each block divided by
1956: the block size.
1958: You must call `MatSetBlockSize()` when constructing this matrix (before
1959: preallocating it).
1961: By default the values, `v`, are row-oriented, so the layout of
1962: `v` is the same as for `MatSetValues()`. See `MatSetOption()` for other options.
1964: Calls to `MatSetValuesBlocked()` with the `INSERT_VALUES` and `ADD_VALUES`
1965: options cannot be mixed without intervening calls to the assembly
1966: routines.
1968: `MatSetValuesBlocked()` uses 0-based row and column numbers in Fortran
1969: as well as in C.
1971: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1972: simply ignored. This allows easily inserting element stiffness matrices
1973: with homogeneous Dirichlet boundary conditions that you don't want represented
1974: in the matrix.
1976: Each time an entry is set within a sparse matrix via `MatSetValues()`,
1977: internal searching must be done to determine where to place the
1978: data in the matrix storage space. By instead inserting blocks of
1979: entries via `MatSetValuesBlocked()`, the overhead of matrix assembly is
1980: reduced.
1982: Example:
1983: .vb
1984: Suppose m=n=2 and block size(bs) = 2 The array is
1986: 1 2 | 3 4
1987: 5 6 | 7 8
1988: - - - | - - -
1989: 9 10 | 11 12
1990: 13 14 | 15 16
1992: v[] should be passed in like
1993: v[] = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]
1995: If you are not using row-oriented storage of v (that is you called MatSetOption(mat,MAT_ROW_ORIENTED,PETSC_FALSE)) then
1996: v[] = [1,5,9,13,2,6,10,14,3,7,11,15,4,8,12,16]
1997: .ve
1999: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesBlockedLocal()`
2000: @*/
2001: PetscErrorCode MatSetValuesBlocked(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
2002: {
2003: PetscFunctionBeginHot;
2006: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2007: PetscAssertPointer(idxm, 3);
2008: PetscAssertPointer(idxn, 5);
2009: MatCheckPreallocated(mat, 1);
2010: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2011: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2012: if (PetscDefined(USE_DEBUG)) {
2013: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2014: PetscCheck(mat->ops->setvaluesblocked || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2015: }
2016: if (PetscDefined(USE_DEBUG)) {
2017: PetscInt rbs, cbs, M, N, i;
2018: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
2019: PetscCall(MatGetSize(mat, &M, &N));
2020: for (i = 0; i < m; i++) PetscCheck(idxm[i] * rbs < M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than row length %" PetscInt_FMT, i, idxm[i], rbs, M);
2021: for (i = 0; i < n; i++)
2022: PetscCheck(idxn[i] * cbs < N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than column length %" PetscInt_FMT, i, idxn[i], cbs, N);
2023: }
2024: if (mat->assembled) {
2025: mat->was_assembled = PETSC_TRUE;
2026: mat->assembled = PETSC_FALSE;
2027: }
2028: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2029: if (mat->ops->setvaluesblocked) {
2030: PetscUseTypeMethod(mat, setvaluesblocked, m, idxm, n, idxn, v, addv);
2031: } else {
2032: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *iidxm, *iidxn;
2033: PetscInt i, j, bs, cbs;
2035: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
2036: if ((m * bs + n * cbs) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2037: iidxm = buf;
2038: iidxn = buf + m * bs;
2039: } else {
2040: PetscCall(PetscMalloc2(m * bs, &bufr, n * cbs, &bufc));
2041: iidxm = bufr;
2042: iidxn = bufc;
2043: }
2044: for (i = 0; i < m; i++) {
2045: for (j = 0; j < bs; j++) iidxm[i * bs + j] = bs * idxm[i] + j;
2046: }
2047: if (m != n || bs != cbs || idxm != idxn) {
2048: for (i = 0; i < n; i++) {
2049: for (j = 0; j < cbs; j++) iidxn[i * cbs + j] = cbs * idxn[i] + j;
2050: }
2051: } else iidxn = iidxm;
2052: PetscCall(MatSetValues(mat, m * bs, iidxm, n * cbs, iidxn, v, addv));
2053: PetscCall(PetscFree2(bufr, bufc));
2054: }
2055: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2056: PetscFunctionReturn(PETSC_SUCCESS);
2057: }
2059: /*@C
2060: MatGetValues - Gets a block of local values from a matrix.
2062: Not Collective; can only return values that are owned by the give process
2064: Input Parameters:
2065: + mat - the matrix
2066: . v - a logically two-dimensional array for storing the values
2067: . m - the number of rows
2068: . idxm - the global indices of the rows
2069: . n - the number of columns
2070: - idxn - the global indices of the columns
2072: Level: advanced
2074: Notes:
2075: The user must allocate space (m*n `PetscScalar`s) for the values, `v`.
2076: The values, `v`, are then returned in a row-oriented format,
2077: analogous to that used by default in `MatSetValues()`.
2079: `MatGetValues()` uses 0-based row and column numbers in
2080: Fortran as well as in C.
2082: `MatGetValues()` requires that the matrix has been assembled
2083: with `MatAssemblyBegin()`/`MatAssemblyEnd()`. Thus, calls to
2084: `MatSetValues()` and `MatGetValues()` CANNOT be made in succession
2085: without intermediate matrix assembly.
2087: Negative row or column indices will be ignored and those locations in `v` will be
2088: left unchanged.
2090: For the standard row-based matrix formats, `idxm` can only contain rows owned by the requesting MPI process.
2091: That is, rows with global index greater than or equal to rstart and less than rend where rstart and rend are obtainable
2092: from `MatGetOwnershipRange`(mat,&rstart,&rend).
2094: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatSetValues()`, `MatGetOwnershipRange()`, `MatGetValuesLocal()`, `MatGetValue()`
2095: @*/
2096: PetscErrorCode MatGetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
2097: {
2098: PetscFunctionBegin;
2101: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS);
2102: PetscAssertPointer(idxm, 3);
2103: PetscAssertPointer(idxn, 5);
2104: PetscAssertPointer(v, 6);
2105: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2106: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2107: MatCheckPreallocated(mat, 1);
2109: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2110: PetscUseTypeMethod(mat, getvalues, m, idxm, n, idxn, v);
2111: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2112: PetscFunctionReturn(PETSC_SUCCESS);
2113: }
2115: /*@C
2116: MatGetValuesLocal - retrieves values from certain locations in a matrix using the local numbering of the indices
2117: defined previously by `MatSetLocalToGlobalMapping()`
2119: Not Collective
2121: Input Parameters:
2122: + mat - the matrix
2123: . nrow - number of rows
2124: . irow - the row local indices
2125: . ncol - number of columns
2126: - icol - the column local indices
2128: Output Parameter:
2129: . y - a logically two-dimensional array of values
2131: Level: advanced
2133: Notes:
2134: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine.
2136: This routine can only return values that are owned by the requesting MPI process. That is, for standard matrix formats, rows that, in the global numbering,
2137: are greater than or equal to rstart and less than rend where rstart and rend are obtainable from `MatGetOwnershipRange`(mat,&rstart,&rend). One can
2138: determine if the resulting global row associated with the local row r is owned by the requesting MPI process by applying the `ISLocalToGlobalMapping` set
2139: with `MatSetLocalToGlobalMapping()`.
2141: Developer Note:
2142: This is labelled with C so does not automatically generate Fortran stubs and interfaces
2143: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2145: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2146: `MatSetValuesLocal()`, `MatGetValues()`
2147: @*/
2148: PetscErrorCode MatGetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], PetscScalar y[])
2149: {
2150: PetscFunctionBeginHot;
2153: MatCheckPreallocated(mat, 1);
2154: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to retrieve */
2155: PetscAssertPointer(irow, 3);
2156: PetscAssertPointer(icol, 5);
2157: if (PetscDefined(USE_DEBUG)) {
2158: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2159: PetscCheck(mat->ops->getvalueslocal || mat->ops->getvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2160: }
2161: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2162: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2163: if (mat->ops->getvalueslocal) PetscUseTypeMethod(mat, getvalueslocal, nrow, irow, ncol, icol, y);
2164: else {
2165: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *irowm, *icolm;
2166: if ((nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2167: irowm = buf;
2168: icolm = buf + nrow;
2169: } else {
2170: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2171: irowm = bufr;
2172: icolm = bufc;
2173: }
2174: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global row mapping (See MatSetLocalToGlobalMapping()).");
2175: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global column mapping (See MatSetLocalToGlobalMapping()).");
2176: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, irowm));
2177: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, icolm));
2178: PetscCall(MatGetValues(mat, nrow, irowm, ncol, icolm, y));
2179: PetscCall(PetscFree2(bufr, bufc));
2180: }
2181: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2182: PetscFunctionReturn(PETSC_SUCCESS);
2183: }
2185: /*@
2186: MatSetValuesBatch - Adds (`ADD_VALUES`) many blocks of values into a matrix at once. The blocks must all be square and
2187: the same size. Currently, this can only be called once and creates the given matrix.
2189: Not Collective
2191: Input Parameters:
2192: + mat - the matrix
2193: . nb - the number of blocks
2194: . bs - the number of rows (and columns) in each block
2195: . rows - a concatenation of the rows for each block
2196: - v - a concatenation of logically two-dimensional arrays of values
2198: Level: advanced
2200: Notes:
2201: `MatSetPreallocationCOO()` and `MatSetValuesCOO()` may be a better way to provide the values
2203: In the future, we will extend this routine to handle rectangular blocks, and to allow multiple calls for a given matrix.
2205: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
2206: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetPreallocationCOO()`, `MatSetValuesCOO()`
2207: @*/
2208: PetscErrorCode MatSetValuesBatch(Mat mat, PetscInt nb, PetscInt bs, PetscInt rows[], const PetscScalar v[])
2209: {
2210: PetscFunctionBegin;
2213: PetscAssertPointer(rows, 4);
2214: PetscAssertPointer(v, 5);
2215: PetscAssert(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2217: PetscCall(PetscLogEventBegin(MAT_SetValuesBatch, mat, 0, 0, 0));
2218: if (mat->ops->setvaluesbatch) PetscUseTypeMethod(mat, setvaluesbatch, nb, bs, rows, v);
2219: else {
2220: for (PetscInt b = 0; b < nb; ++b) PetscCall(MatSetValues(mat, bs, &rows[b * bs], bs, &rows[b * bs], &v[b * bs * bs], ADD_VALUES));
2221: }
2222: PetscCall(PetscLogEventEnd(MAT_SetValuesBatch, mat, 0, 0, 0));
2223: PetscFunctionReturn(PETSC_SUCCESS);
2224: }
2226: /*@
2227: MatSetLocalToGlobalMapping - Sets a local-to-global numbering for use by
2228: the routine `MatSetValuesLocal()` to allow users to insert matrix entries
2229: using a local (per-processor) numbering.
2231: Not Collective
2233: Input Parameters:
2234: + x - the matrix
2235: . rmapping - row mapping created with `ISLocalToGlobalMappingCreate()` or `ISLocalToGlobalMappingCreateIS()`
2236: - cmapping - column mapping
2238: Level: intermediate
2240: Note:
2241: If the matrix is obtained with `DMCreateMatrix()` then this may already have been called on the matrix
2243: .seealso: [](ch_matrices), `Mat`, `DM`, `DMCreateMatrix()`, `MatGetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesLocal()`, `MatGetValuesLocal()`
2244: @*/
2245: PetscErrorCode MatSetLocalToGlobalMapping(Mat x, ISLocalToGlobalMapping rmapping, ISLocalToGlobalMapping cmapping)
2246: {
2247: PetscFunctionBegin;
2252: if (x->ops->setlocaltoglobalmapping) PetscUseTypeMethod(x, setlocaltoglobalmapping, rmapping, cmapping);
2253: else {
2254: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->rmap, rmapping));
2255: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->cmap, cmapping));
2256: }
2257: PetscFunctionReturn(PETSC_SUCCESS);
2258: }
2260: /*@
2261: MatGetLocalToGlobalMapping - Gets the local-to-global numbering set by `MatSetLocalToGlobalMapping()`
2263: Not Collective
2265: Input Parameter:
2266: . A - the matrix
2268: Output Parameters:
2269: + rmapping - row mapping
2270: - cmapping - column mapping
2272: Level: advanced
2274: .seealso: [](ch_matrices), `Mat`, `MatSetLocalToGlobalMapping()`, `MatSetValuesLocal()`
2275: @*/
2276: PetscErrorCode MatGetLocalToGlobalMapping(Mat A, ISLocalToGlobalMapping *rmapping, ISLocalToGlobalMapping *cmapping)
2277: {
2278: PetscFunctionBegin;
2281: if (rmapping) {
2282: PetscAssertPointer(rmapping, 2);
2283: *rmapping = A->rmap->mapping;
2284: }
2285: if (cmapping) {
2286: PetscAssertPointer(cmapping, 3);
2287: *cmapping = A->cmap->mapping;
2288: }
2289: PetscFunctionReturn(PETSC_SUCCESS);
2290: }
2292: /*@
2293: MatSetLayouts - Sets the `PetscLayout` objects for rows and columns of a matrix
2295: Logically Collective
2297: Input Parameters:
2298: + A - the matrix
2299: . rmap - row layout
2300: - cmap - column layout
2302: Level: advanced
2304: Note:
2305: The `PetscLayout` objects are usually created automatically for the matrix so this routine rarely needs to be called.
2307: .seealso: [](ch_matrices), `Mat`, `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatGetLayouts()`
2308: @*/
2309: PetscErrorCode MatSetLayouts(Mat A, PetscLayout rmap, PetscLayout cmap)
2310: {
2311: PetscFunctionBegin;
2313: PetscCall(PetscLayoutReference(rmap, &A->rmap));
2314: PetscCall(PetscLayoutReference(cmap, &A->cmap));
2315: PetscFunctionReturn(PETSC_SUCCESS);
2316: }
2318: /*@
2319: MatGetLayouts - Gets the `PetscLayout` objects for rows and columns
2321: Not Collective
2323: Input Parameter:
2324: . A - the matrix
2326: Output Parameters:
2327: + rmap - row layout
2328: - cmap - column layout
2330: Level: advanced
2332: .seealso: [](ch_matrices), `Mat`, [Matrix Layouts](sec_matlayout), `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatSetLayouts()`
2333: @*/
2334: PetscErrorCode MatGetLayouts(Mat A, PetscLayout *rmap, PetscLayout *cmap)
2335: {
2336: PetscFunctionBegin;
2339: if (rmap) {
2340: PetscAssertPointer(rmap, 2);
2341: *rmap = A->rmap;
2342: }
2343: if (cmap) {
2344: PetscAssertPointer(cmap, 3);
2345: *cmap = A->cmap;
2346: }
2347: PetscFunctionReturn(PETSC_SUCCESS);
2348: }
2350: /*@C
2351: MatSetValuesLocal - Inserts or adds values into certain locations of a matrix,
2352: using a local numbering of the rows and columns.
2354: Not Collective
2356: Input Parameters:
2357: + mat - the matrix
2358: . nrow - number of rows
2359: . irow - the row local indices
2360: . ncol - number of columns
2361: . icol - the column local indices
2362: . y - a logically two-dimensional array of values
2363: - addv - either `INSERT_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2365: Level: intermediate
2367: Notes:
2368: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine
2370: Calls to `MatSetValuesLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2371: options cannot be mixed without intervening calls to the assembly
2372: routines.
2374: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2375: MUST be called after all calls to `MatSetValuesLocal()` have been completed.
2377: Developer Note:
2378: This is labeled with C so does not automatically generate Fortran stubs and interfaces
2379: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2381: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2382: `MatGetValuesLocal()`
2383: @*/
2384: PetscErrorCode MatSetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2385: {
2386: PetscFunctionBeginHot;
2389: MatCheckPreallocated(mat, 1);
2390: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2391: PetscAssertPointer(irow, 3);
2392: PetscAssertPointer(icol, 5);
2393: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2394: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2395: if (PetscDefined(USE_DEBUG)) {
2396: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2397: PetscCheck(mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2398: }
2400: if (mat->assembled) {
2401: mat->was_assembled = PETSC_TRUE;
2402: mat->assembled = PETSC_FALSE;
2403: }
2404: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2405: if (mat->ops->setvalueslocal) PetscUseTypeMethod(mat, setvalueslocal, nrow, irow, ncol, icol, y, addv);
2406: else {
2407: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2408: const PetscInt *irowm, *icolm;
2410: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2411: bufr = buf;
2412: bufc = buf + nrow;
2413: irowm = bufr;
2414: icolm = bufc;
2415: } else {
2416: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2417: irowm = bufr;
2418: icolm = bufc;
2419: }
2420: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, bufr));
2421: else irowm = irow;
2422: if (mat->cmap->mapping) {
2423: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) {
2424: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, bufc));
2425: } else icolm = irowm;
2426: } else icolm = icol;
2427: PetscCall(MatSetValues(mat, nrow, irowm, ncol, icolm, y, addv));
2428: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2429: }
2430: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2431: PetscFunctionReturn(PETSC_SUCCESS);
2432: }
2434: /*@C
2435: MatSetValuesBlockedLocal - Inserts or adds values into certain locations of a matrix,
2436: using a local ordering of the nodes a block at a time.
2438: Not Collective
2440: Input Parameters:
2441: + mat - the matrix
2442: . nrow - number of rows
2443: . irow - the row local indices
2444: . ncol - number of columns
2445: . icol - the column local indices
2446: . y - a logically two-dimensional array of values
2447: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2449: Level: intermediate
2451: Notes:
2452: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetBlockSize()` and `MatSetLocalToGlobalMapping()`
2453: before using this routineBefore calling `MatSetValuesLocal()`, the user must first set the
2455: Calls to `MatSetValuesBlockedLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2456: options cannot be mixed without intervening calls to the assembly
2457: routines.
2459: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2460: MUST be called after all calls to `MatSetValuesBlockedLocal()` have been completed.
2462: Developer Note:
2463: This is labeled with C so does not automatically generate Fortran stubs and interfaces
2464: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2466: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`,
2467: `MatSetValuesLocal()`, `MatSetValuesBlocked()`
2468: @*/
2469: PetscErrorCode MatSetValuesBlockedLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2470: {
2471: PetscFunctionBeginHot;
2474: MatCheckPreallocated(mat, 1);
2475: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2476: PetscAssertPointer(irow, 3);
2477: PetscAssertPointer(icol, 5);
2478: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2479: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2480: if (PetscDefined(USE_DEBUG)) {
2481: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2482: PetscCheck(mat->ops->setvaluesblockedlocal || mat->ops->setvaluesblocked || mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2483: }
2485: if (mat->assembled) {
2486: mat->was_assembled = PETSC_TRUE;
2487: mat->assembled = PETSC_FALSE;
2488: }
2489: if (PetscUnlikelyDebug(mat->rmap->mapping)) { /* Condition on the mapping existing, because MatSetValuesBlockedLocal_IS does not require it to be set. */
2490: PetscInt irbs, rbs;
2491: PetscCall(MatGetBlockSizes(mat, &rbs, NULL));
2492: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->rmap->mapping, &irbs));
2493: PetscCheck(rbs == irbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different row block sizes! mat %" PetscInt_FMT ", row l2g map %" PetscInt_FMT, rbs, irbs);
2494: }
2495: if (PetscUnlikelyDebug(mat->cmap->mapping)) {
2496: PetscInt icbs, cbs;
2497: PetscCall(MatGetBlockSizes(mat, NULL, &cbs));
2498: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->cmap->mapping, &icbs));
2499: PetscCheck(cbs == icbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different col block sizes! mat %" PetscInt_FMT ", col l2g map %" PetscInt_FMT, cbs, icbs);
2500: }
2501: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2502: if (mat->ops->setvaluesblockedlocal) PetscUseTypeMethod(mat, setvaluesblockedlocal, nrow, irow, ncol, icol, y, addv);
2503: else {
2504: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2505: const PetscInt *irowm, *icolm;
2507: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= ((PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf))) {
2508: bufr = buf;
2509: bufc = buf + nrow;
2510: irowm = bufr;
2511: icolm = bufc;
2512: } else {
2513: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2514: irowm = bufr;
2515: icolm = bufc;
2516: }
2517: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApplyBlock(mat->rmap->mapping, nrow, irow, bufr));
2518: else irowm = irow;
2519: if (mat->cmap->mapping) {
2520: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) {
2521: PetscCall(ISLocalToGlobalMappingApplyBlock(mat->cmap->mapping, ncol, icol, bufc));
2522: } else icolm = irowm;
2523: } else icolm = icol;
2524: PetscCall(MatSetValuesBlocked(mat, nrow, irowm, ncol, icolm, y, addv));
2525: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2526: }
2527: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2528: PetscFunctionReturn(PETSC_SUCCESS);
2529: }
2531: /*@
2532: MatMultDiagonalBlock - Computes the matrix-vector product, $y = Dx$. Where `D` is defined by the inode or block structure of the diagonal
2534: Collective
2536: Input Parameters:
2537: + mat - the matrix
2538: - x - the vector to be multiplied
2540: Output Parameter:
2541: . y - the result
2543: Level: developer
2545: Note:
2546: The vectors `x` and `y` cannot be the same. I.e., one cannot
2547: call `MatMultDiagonalBlock`(A,y,y).
2549: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2550: @*/
2551: PetscErrorCode MatMultDiagonalBlock(Mat mat, Vec x, Vec y)
2552: {
2553: PetscFunctionBegin;
2559: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2560: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2561: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2562: MatCheckPreallocated(mat, 1);
2564: PetscUseTypeMethod(mat, multdiagonalblock, x, y);
2565: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2566: PetscFunctionReturn(PETSC_SUCCESS);
2567: }
2569: /*@
2570: MatMult - Computes the matrix-vector product, $y = Ax$.
2572: Neighbor-wise Collective
2574: Input Parameters:
2575: + mat - the matrix
2576: - x - the vector to be multiplied
2578: Output Parameter:
2579: . y - the result
2581: Level: beginner
2583: Note:
2584: The vectors `x` and `y` cannot be the same. I.e., one cannot
2585: call `MatMult`(A,y,y).
2587: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2588: @*/
2589: PetscErrorCode MatMult(Mat mat, Vec x, Vec y)
2590: {
2591: PetscFunctionBegin;
2595: VecCheckAssembled(x);
2597: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2598: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2599: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2600: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
2601: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
2602: PetscCheck(mat->cmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, x->map->n);
2603: PetscCheck(mat->rmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, y->map->n);
2604: PetscCall(VecSetErrorIfLocked(y, 3));
2605: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2606: MatCheckPreallocated(mat, 1);
2608: PetscCall(VecLockReadPush(x));
2609: PetscCall(PetscLogEventBegin(MAT_Mult, mat, x, y, 0));
2610: PetscUseTypeMethod(mat, mult, x, y);
2611: PetscCall(PetscLogEventEnd(MAT_Mult, mat, x, y, 0));
2612: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2613: PetscCall(VecLockReadPop(x));
2614: PetscFunctionReturn(PETSC_SUCCESS);
2615: }
2617: /*@
2618: MatMultTranspose - Computes matrix transpose times a vector $y = A^T * x$.
2620: Neighbor-wise Collective
2622: Input Parameters:
2623: + mat - the matrix
2624: - x - the vector to be multiplied
2626: Output Parameter:
2627: . y - the result
2629: Level: beginner
2631: Notes:
2632: The vectors `x` and `y` cannot be the same. I.e., one cannot
2633: call `MatMultTranspose`(A,y,y).
2635: For complex numbers this does NOT compute the Hermitian (complex conjugate) transpose multiple,
2636: use `MatMultHermitianTranspose()`
2638: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatMultHermitianTranspose()`, `MatTranspose()`
2639: @*/
2640: PetscErrorCode MatMultTranspose(Mat mat, Vec x, Vec y)
2641: {
2642: PetscErrorCode (*op)(Mat, Vec, Vec) = NULL;
2644: PetscFunctionBegin;
2648: VecCheckAssembled(x);
2651: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2652: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2653: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2654: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2655: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2656: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2657: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2658: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2659: MatCheckPreallocated(mat, 1);
2661: if (!mat->ops->multtranspose) {
2662: if (mat->symmetric == PETSC_BOOL3_TRUE && mat->ops->mult) op = mat->ops->mult;
2663: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s does not have a multiply transpose defined or is symmetric and does not have a multiply defined", ((PetscObject)mat)->type_name);
2664: } else op = mat->ops->multtranspose;
2665: PetscCall(PetscLogEventBegin(MAT_MultTranspose, mat, x, y, 0));
2666: PetscCall(VecLockReadPush(x));
2667: PetscCall((*op)(mat, x, y));
2668: PetscCall(VecLockReadPop(x));
2669: PetscCall(PetscLogEventEnd(MAT_MultTranspose, mat, x, y, 0));
2670: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2671: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2672: PetscFunctionReturn(PETSC_SUCCESS);
2673: }
2675: /*@
2676: MatMultHermitianTranspose - Computes matrix Hermitian-transpose times a vector $y = A^H * x$.
2678: Neighbor-wise Collective
2680: Input Parameters:
2681: + mat - the matrix
2682: - x - the vector to be multiplied
2684: Output Parameter:
2685: . y - the result
2687: Level: beginner
2689: Notes:
2690: The vectors `x` and `y` cannot be the same. I.e., one cannot
2691: call `MatMultHermitianTranspose`(A,y,y).
2693: Also called the conjugate transpose, complex conjugate transpose, or adjoint.
2695: For real numbers `MatMultTranspose()` and `MatMultHermitianTranspose()` are identical.
2697: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultHermitianTransposeAdd()`, `MatMultTranspose()`
2698: @*/
2699: PetscErrorCode MatMultHermitianTranspose(Mat mat, Vec x, Vec y)
2700: {
2701: PetscFunctionBegin;
2707: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2708: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2709: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2710: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2711: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2712: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2713: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2714: MatCheckPreallocated(mat, 1);
2716: PetscCall(PetscLogEventBegin(MAT_MultHermitianTranspose, mat, x, y, 0));
2717: #if defined(PETSC_USE_COMPLEX)
2718: if (mat->ops->multhermitiantranspose || (mat->hermitian == PETSC_BOOL3_TRUE && mat->ops->mult)) {
2719: PetscCall(VecLockReadPush(x));
2720: if (mat->ops->multhermitiantranspose) PetscUseTypeMethod(mat, multhermitiantranspose, x, y);
2721: else PetscUseTypeMethod(mat, mult, x, y);
2722: PetscCall(VecLockReadPop(x));
2723: } else {
2724: Vec w;
2725: PetscCall(VecDuplicate(x, &w));
2726: PetscCall(VecCopy(x, w));
2727: PetscCall(VecConjugate(w));
2728: PetscCall(MatMultTranspose(mat, w, y));
2729: PetscCall(VecDestroy(&w));
2730: PetscCall(VecConjugate(y));
2731: }
2732: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2733: #else
2734: PetscCall(MatMultTranspose(mat, x, y));
2735: #endif
2736: PetscCall(PetscLogEventEnd(MAT_MultHermitianTranspose, mat, x, y, 0));
2737: PetscFunctionReturn(PETSC_SUCCESS);
2738: }
2740: /*@
2741: MatMultAdd - Computes $v3 = v2 + A * v1$.
2743: Neighbor-wise Collective
2745: Input Parameters:
2746: + mat - the matrix
2747: . v1 - the vector to be multiplied by `mat`
2748: - v2 - the vector to be added to the result
2750: Output Parameter:
2751: . v3 - the result
2753: Level: beginner
2755: Note:
2756: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2757: call `MatMultAdd`(A,v1,v2,v1).
2759: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMult()`, `MatMultTransposeAdd()`
2760: @*/
2761: PetscErrorCode MatMultAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2762: {
2763: PetscFunctionBegin;
2770: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2771: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2772: PetscCheck(mat->cmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v1->map->N);
2773: /* PetscCheck(mat->rmap->N == v2->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v2->map->N);
2774: PetscCheck(mat->rmap->N == v3->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v3->map->N); */
2775: PetscCheck(mat->rmap->n == v3->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v3->map->n);
2776: PetscCheck(mat->rmap->n == v2->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v2->map->n);
2777: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2778: MatCheckPreallocated(mat, 1);
2780: PetscCall(PetscLogEventBegin(MAT_MultAdd, mat, v1, v2, v3));
2781: PetscCall(VecLockReadPush(v1));
2782: PetscUseTypeMethod(mat, multadd, v1, v2, v3);
2783: PetscCall(VecLockReadPop(v1));
2784: PetscCall(PetscLogEventEnd(MAT_MultAdd, mat, v1, v2, v3));
2785: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2786: PetscFunctionReturn(PETSC_SUCCESS);
2787: }
2789: /*@
2790: MatMultTransposeAdd - Computes $v3 = v2 + A^T * v1$.
2792: Neighbor-wise Collective
2794: Input Parameters:
2795: + mat - the matrix
2796: . v1 - the vector to be multiplied by the transpose of the matrix
2797: - v2 - the vector to be added to the result
2799: Output Parameter:
2800: . v3 - the result
2802: Level: beginner
2804: Note:
2805: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2806: call `MatMultTransposeAdd`(A,v1,v2,v1).
2808: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2809: @*/
2810: PetscErrorCode MatMultTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2811: {
2812: PetscErrorCode (*op)(Mat, Vec, Vec, Vec) = (!mat->ops->multtransposeadd && mat->symmetric) ? mat->ops->multadd : mat->ops->multtransposeadd;
2814: PetscFunctionBegin;
2821: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2822: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2823: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2824: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2825: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2826: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2827: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2828: MatCheckPreallocated(mat, 1);
2830: PetscCall(PetscLogEventBegin(MAT_MultTransposeAdd, mat, v1, v2, v3));
2831: PetscCall(VecLockReadPush(v1));
2832: PetscCall((*op)(mat, v1, v2, v3));
2833: PetscCall(VecLockReadPop(v1));
2834: PetscCall(PetscLogEventEnd(MAT_MultTransposeAdd, mat, v1, v2, v3));
2835: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2836: PetscFunctionReturn(PETSC_SUCCESS);
2837: }
2839: /*@
2840: MatMultHermitianTransposeAdd - Computes $v3 = v2 + A^H * v1$.
2842: Neighbor-wise Collective
2844: Input Parameters:
2845: + mat - the matrix
2846: . v1 - the vector to be multiplied by the Hermitian transpose
2847: - v2 - the vector to be added to the result
2849: Output Parameter:
2850: . v3 - the result
2852: Level: beginner
2854: Note:
2855: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2856: call `MatMultHermitianTransposeAdd`(A,v1,v2,v1).
2858: .seealso: [](ch_matrices), `Mat`, `MatMultHermitianTranspose()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2859: @*/
2860: PetscErrorCode MatMultHermitianTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2861: {
2862: PetscFunctionBegin;
2869: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2870: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2871: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2872: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2873: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2874: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2875: MatCheckPreallocated(mat, 1);
2877: PetscCall(PetscLogEventBegin(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2878: PetscCall(VecLockReadPush(v1));
2879: if (mat->ops->multhermitiantransposeadd) PetscUseTypeMethod(mat, multhermitiantransposeadd, v1, v2, v3);
2880: else {
2881: Vec w, z;
2882: PetscCall(VecDuplicate(v1, &w));
2883: PetscCall(VecCopy(v1, w));
2884: PetscCall(VecConjugate(w));
2885: PetscCall(VecDuplicate(v3, &z));
2886: PetscCall(MatMultTranspose(mat, w, z));
2887: PetscCall(VecDestroy(&w));
2888: PetscCall(VecConjugate(z));
2889: if (v2 != v3) {
2890: PetscCall(VecWAXPY(v3, 1.0, v2, z));
2891: } else {
2892: PetscCall(VecAXPY(v3, 1.0, z));
2893: }
2894: PetscCall(VecDestroy(&z));
2895: }
2896: PetscCall(VecLockReadPop(v1));
2897: PetscCall(PetscLogEventEnd(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2898: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2899: PetscFunctionReturn(PETSC_SUCCESS);
2900: }
2902: /*@C
2903: MatGetFactorType - gets the type of factorization a matrix is
2905: Not Collective
2907: Input Parameter:
2908: . mat - the matrix
2910: Output Parameter:
2911: . t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2913: Level: intermediate
2915: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatSetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
2916: `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2917: @*/
2918: PetscErrorCode MatGetFactorType(Mat mat, MatFactorType *t)
2919: {
2920: PetscFunctionBegin;
2923: PetscAssertPointer(t, 2);
2924: *t = mat->factortype;
2925: PetscFunctionReturn(PETSC_SUCCESS);
2926: }
2928: /*@C
2929: MatSetFactorType - sets the type of factorization a matrix is
2931: Logically Collective
2933: Input Parameters:
2934: + mat - the matrix
2935: - t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2937: Level: intermediate
2939: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatGetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
2940: `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2941: @*/
2942: PetscErrorCode MatSetFactorType(Mat mat, MatFactorType t)
2943: {
2944: PetscFunctionBegin;
2947: mat->factortype = t;
2948: PetscFunctionReturn(PETSC_SUCCESS);
2949: }
2951: /*@C
2952: MatGetInfo - Returns information about matrix storage (number of
2953: nonzeros, memory, etc.).
2955: Collective if `MAT_GLOBAL_MAX` or `MAT_GLOBAL_SUM` is used as the flag
2957: Input Parameters:
2958: + mat - the matrix
2959: - flag - flag indicating the type of parameters to be returned (`MAT_LOCAL` - local matrix, `MAT_GLOBAL_MAX` - maximum over all processors, `MAT_GLOBAL_SUM` - sum over all processors)
2961: Output Parameter:
2962: . info - matrix information context
2964: Options Database Key:
2965: . -mat_view ::ascii_info - print matrix info to `PETSC_STDOUT`
2967: Notes:
2968: The `MatInfo` context contains a variety of matrix data, including
2969: number of nonzeros allocated and used, number of mallocs during
2970: matrix assembly, etc. Additional information for factored matrices
2971: is provided (such as the fill ratio, number of mallocs during
2972: factorization, etc.).
2974: Example:
2975: See the file ${PETSC_DIR}/include/petscmat.h for a complete list of
2976: data within the MatInfo context. For example,
2977: .vb
2978: MatInfo info;
2979: Mat A;
2980: double mal, nz_a, nz_u;
2982: MatGetInfo(A, MAT_LOCAL, &info);
2983: mal = info.mallocs;
2984: nz_a = info.nz_allocated;
2985: .ve
2987: Fortran users should declare info as a double precision
2988: array of dimension `MAT_INFO_SIZE`, and then extract the parameters
2989: of interest. See the file ${PETSC_DIR}/include/petsc/finclude/petscmat.h
2990: a complete list of parameter names.
2991: .vb
2992: double precision info(MAT_INFO_SIZE)
2993: double precision mal, nz_a
2994: Mat A
2995: integer ierr
2997: call MatGetInfo(A, MAT_LOCAL, info, ierr)
2998: mal = info(MAT_INFO_MALLOCS)
2999: nz_a = info(MAT_INFO_NZ_ALLOCATED)
3000: .ve
3002: Level: intermediate
3004: Developer Note:
3005: The Fortran interface is not autogenerated as the
3006: interface definition cannot be generated correctly [due to `MatInfo` argument]
3008: .seealso: [](ch_matrices), `Mat`, `MatInfo`, `MatStashGetInfo()`
3009: @*/
3010: PetscErrorCode MatGetInfo(Mat mat, MatInfoType flag, MatInfo *info)
3011: {
3012: PetscFunctionBegin;
3015: PetscAssertPointer(info, 3);
3016: MatCheckPreallocated(mat, 1);
3017: PetscUseTypeMethod(mat, getinfo, flag, info);
3018: PetscFunctionReturn(PETSC_SUCCESS);
3019: }
3021: /*
3022: This is used by external packages where it is not easy to get the info from the actual
3023: matrix factorization.
3024: */
3025: PetscErrorCode MatGetInfo_External(Mat A, MatInfoType flag, MatInfo *info)
3026: {
3027: PetscFunctionBegin;
3028: PetscCall(PetscMemzero(info, sizeof(MatInfo)));
3029: PetscFunctionReturn(PETSC_SUCCESS);
3030: }
3032: /*@C
3033: MatLUFactor - Performs in-place LU factorization of matrix.
3035: Collective
3037: Input Parameters:
3038: + mat - the matrix
3039: . row - row permutation
3040: . col - column permutation
3041: - info - options for factorization, includes
3042: .vb
3043: fill - expected fill as ratio of original fill.
3044: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3045: Run with the option -info to determine an optimal value to use
3046: .ve
3048: Level: developer
3050: Notes:
3051: Most users should employ the `KSP` interface for linear solvers
3052: instead of working directly with matrix algebra routines such as this.
3053: See, e.g., `KSPCreate()`.
3055: This changes the state of the matrix to a factored matrix; it cannot be used
3056: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3058: This is really in-place only for dense matrices, the preferred approach is to use `MatGetFactor()`, `MatLUFactorSymbolic()`, and `MatLUFactorNumeric()`
3059: when not using `KSP`.
3061: Developer Note:
3062: The Fortran interface is not autogenerated as the
3063: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3065: .seealso: [](ch_matrices), [Matrix Factorization](sec_matfactor), `Mat`, `MatFactorType`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`,
3066: `MatGetOrdering()`, `MatSetUnfactored()`, `MatFactorInfo`, `MatGetFactor()`
3067: @*/
3068: PetscErrorCode MatLUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3069: {
3070: MatFactorInfo tinfo;
3072: PetscFunctionBegin;
3076: if (info) PetscAssertPointer(info, 4);
3078: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3079: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3080: MatCheckPreallocated(mat, 1);
3081: if (!info) {
3082: PetscCall(MatFactorInfoInitialize(&tinfo));
3083: info = &tinfo;
3084: }
3086: PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, row, col, 0));
3087: PetscUseTypeMethod(mat, lufactor, row, col, info);
3088: PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, row, col, 0));
3089: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3090: PetscFunctionReturn(PETSC_SUCCESS);
3091: }
3093: /*@C
3094: MatILUFactor - Performs in-place ILU factorization of matrix.
3096: Collective
3098: Input Parameters:
3099: + mat - the matrix
3100: . row - row permutation
3101: . col - column permutation
3102: - info - structure containing
3103: .vb
3104: levels - number of levels of fill.
3105: expected fill - as ratio of original fill.
3106: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
3107: missing diagonal entries)
3108: .ve
3110: Level: developer
3112: Notes:
3113: Most users should employ the `KSP` interface for linear solvers
3114: instead of working directly with matrix algebra routines such as this.
3115: See, e.g., `KSPCreate()`.
3117: Probably really in-place only when level of fill is zero, otherwise allocates
3118: new space to store factored matrix and deletes previous memory. The preferred approach is to use `MatGetFactor()`, `MatILUFactorSymbolic()`, and `MatILUFactorNumeric()`
3119: when not using `KSP`.
3121: Developer Note:
3122: The Fortran interface is not autogenerated as the
3123: interface definition cannot be generated correctly [due to MatFactorInfo]
3125: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatILUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
3126: @*/
3127: PetscErrorCode MatILUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3128: {
3129: PetscFunctionBegin;
3133: PetscAssertPointer(info, 4);
3135: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
3136: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3137: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3138: MatCheckPreallocated(mat, 1);
3140: PetscCall(PetscLogEventBegin(MAT_ILUFactor, mat, row, col, 0));
3141: PetscUseTypeMethod(mat, ilufactor, row, col, info);
3142: PetscCall(PetscLogEventEnd(MAT_ILUFactor, mat, row, col, 0));
3143: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3144: PetscFunctionReturn(PETSC_SUCCESS);
3145: }
3147: /*@C
3148: MatLUFactorSymbolic - Performs symbolic LU factorization of matrix.
3149: Call this routine before calling `MatLUFactorNumeric()` and after `MatGetFactor()`.
3151: Collective
3153: Input Parameters:
3154: + fact - the factor matrix obtained with `MatGetFactor()`
3155: . mat - the matrix
3156: . row - the row permutation
3157: . col - the column permutation
3158: - info - options for factorization, includes
3159: .vb
3160: fill - expected fill as ratio of original fill. Run with the option -info to determine an optimal value to use
3161: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3162: .ve
3164: Level: developer
3166: Notes:
3167: See [Matrix Factorization](sec_matfactor) for additional information about factorizations
3169: Most users should employ the simplified `KSP` interface for linear solvers
3170: instead of working directly with matrix algebra routines such as this.
3171: See, e.g., `KSPCreate()`.
3173: Developer Note:
3174: The Fortran interface is not autogenerated as the
3175: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3177: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`, `MatFactorInfoInitialize()`
3178: @*/
3179: PetscErrorCode MatLUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
3180: {
3181: MatFactorInfo tinfo;
3183: PetscFunctionBegin;
3188: if (info) PetscAssertPointer(info, 5);
3191: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3192: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3193: MatCheckPreallocated(mat, 2);
3194: if (!info) {
3195: PetscCall(MatFactorInfoInitialize(&tinfo));
3196: info = &tinfo;
3197: }
3199: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorSymbolic, mat, row, col, 0));
3200: PetscUseTypeMethod(fact, lufactorsymbolic, mat, row, col, info);
3201: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorSymbolic, mat, row, col, 0));
3202: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3203: PetscFunctionReturn(PETSC_SUCCESS);
3204: }
3206: /*@C
3207: MatLUFactorNumeric - Performs numeric LU factorization of a matrix.
3208: Call this routine after first calling `MatLUFactorSymbolic()` and `MatGetFactor()`.
3210: Collective
3212: Input Parameters:
3213: + fact - the factor matrix obtained with `MatGetFactor()`
3214: . mat - the matrix
3215: - info - options for factorization
3217: Level: developer
3219: Notes:
3220: See `MatLUFactor()` for in-place factorization. See
3221: `MatCholeskyFactorNumeric()` for the symmetric, positive definite case.
3223: Most users should employ the `KSP` interface for linear solvers
3224: instead of working directly with matrix algebra routines such as this.
3225: See, e.g., `KSPCreate()`.
3227: Developer Note:
3228: The Fortran interface is not autogenerated as the
3229: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3231: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactorSymbolic()`, `MatLUFactor()`, `MatCholeskyFactor()`
3232: @*/
3233: PetscErrorCode MatLUFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3234: {
3235: MatFactorInfo tinfo;
3237: PetscFunctionBegin;
3242: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3243: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3244: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3246: MatCheckPreallocated(mat, 2);
3247: if (!info) {
3248: PetscCall(MatFactorInfoInitialize(&tinfo));
3249: info = &tinfo;
3250: }
3252: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorNumeric, mat, fact, 0, 0));
3253: else PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, fact, 0, 0));
3254: PetscUseTypeMethod(fact, lufactornumeric, mat, info);
3255: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorNumeric, mat, fact, 0, 0));
3256: else PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, fact, 0, 0));
3257: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3258: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3259: PetscFunctionReturn(PETSC_SUCCESS);
3260: }
3262: /*@C
3263: MatCholeskyFactor - Performs in-place Cholesky factorization of a
3264: symmetric matrix.
3266: Collective
3268: Input Parameters:
3269: + mat - the matrix
3270: . perm - row and column permutations
3271: - info - expected fill as ratio of original fill
3273: Level: developer
3275: Notes:
3276: See `MatLUFactor()` for the nonsymmetric case. See also `MatGetFactor()`,
3277: `MatCholeskyFactorSymbolic()`, and `MatCholeskyFactorNumeric()`.
3279: Most users should employ the `KSP` interface for linear solvers
3280: instead of working directly with matrix algebra routines such as this.
3281: See, e.g., `KSPCreate()`.
3283: Developer Note:
3284: The Fortran interface is not autogenerated as the
3285: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3287: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactorNumeric()`
3288: `MatGetOrdering()`
3289: @*/
3290: PetscErrorCode MatCholeskyFactor(Mat mat, IS perm, const MatFactorInfo *info)
3291: {
3292: MatFactorInfo tinfo;
3294: PetscFunctionBegin;
3297: if (info) PetscAssertPointer(info, 3);
3299: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3300: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3301: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3302: MatCheckPreallocated(mat, 1);
3303: if (!info) {
3304: PetscCall(MatFactorInfoInitialize(&tinfo));
3305: info = &tinfo;
3306: }
3308: PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, perm, 0, 0));
3309: PetscUseTypeMethod(mat, choleskyfactor, perm, info);
3310: PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, perm, 0, 0));
3311: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3312: PetscFunctionReturn(PETSC_SUCCESS);
3313: }
3315: /*@C
3316: MatCholeskyFactorSymbolic - Performs symbolic Cholesky factorization
3317: of a symmetric matrix.
3319: Collective
3321: Input Parameters:
3322: + fact - the factor matrix obtained with `MatGetFactor()`
3323: . mat - the matrix
3324: . perm - row and column permutations
3325: - info - options for factorization, includes
3326: .vb
3327: fill - expected fill as ratio of original fill.
3328: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3329: Run with the option -info to determine an optimal value to use
3330: .ve
3332: Level: developer
3334: Notes:
3335: See `MatLUFactorSymbolic()` for the nonsymmetric case. See also
3336: `MatCholeskyFactor()` and `MatCholeskyFactorNumeric()`.
3338: Most users should employ the `KSP` interface for linear solvers
3339: instead of working directly with matrix algebra routines such as this.
3340: See, e.g., `KSPCreate()`.
3342: Developer Note:
3343: The Fortran interface is not autogenerated as the
3344: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3346: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactor()`, `MatCholeskyFactorNumeric()`
3347: `MatGetOrdering()`
3348: @*/
3349: PetscErrorCode MatCholeskyFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
3350: {
3351: MatFactorInfo tinfo;
3353: PetscFunctionBegin;
3357: if (info) PetscAssertPointer(info, 4);
3360: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3361: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3362: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3363: MatCheckPreallocated(mat, 2);
3364: if (!info) {
3365: PetscCall(MatFactorInfoInitialize(&tinfo));
3366: info = &tinfo;
3367: }
3369: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3370: PetscUseTypeMethod(fact, choleskyfactorsymbolic, mat, perm, info);
3371: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3372: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3373: PetscFunctionReturn(PETSC_SUCCESS);
3374: }
3376: /*@C
3377: MatCholeskyFactorNumeric - Performs numeric Cholesky factorization
3378: of a symmetric matrix. Call this routine after first calling `MatGetFactor()` and
3379: `MatCholeskyFactorSymbolic()`.
3381: Collective
3383: Input Parameters:
3384: + fact - the factor matrix obtained with `MatGetFactor()`, where the factored values are stored
3385: . mat - the initial matrix that is to be factored
3386: - info - options for factorization
3388: Level: developer
3390: Note:
3391: Most users should employ the `KSP` interface for linear solvers
3392: instead of working directly with matrix algebra routines such as this.
3393: See, e.g., `KSPCreate()`.
3395: Developer Note:
3396: The Fortran interface is not autogenerated as the
3397: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3399: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactor()`, `MatLUFactorNumeric()`
3400: @*/
3401: PetscErrorCode MatCholeskyFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3402: {
3403: MatFactorInfo tinfo;
3405: PetscFunctionBegin;
3410: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3411: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dim %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3412: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3413: MatCheckPreallocated(mat, 2);
3414: if (!info) {
3415: PetscCall(MatFactorInfoInitialize(&tinfo));
3416: info = &tinfo;
3417: }
3419: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3420: else PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, fact, 0, 0));
3421: PetscUseTypeMethod(fact, choleskyfactornumeric, mat, info);
3422: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3423: else PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, fact, 0, 0));
3424: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3425: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3426: PetscFunctionReturn(PETSC_SUCCESS);
3427: }
3429: /*@
3430: MatQRFactor - Performs in-place QR factorization of matrix.
3432: Collective
3434: Input Parameters:
3435: + mat - the matrix
3436: . col - column permutation
3437: - info - options for factorization, includes
3438: .vb
3439: fill - expected fill as ratio of original fill.
3440: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3441: Run with the option -info to determine an optimal value to use
3442: .ve
3444: Level: developer
3446: Notes:
3447: Most users should employ the `KSP` interface for linear solvers
3448: instead of working directly with matrix algebra routines such as this.
3449: See, e.g., `KSPCreate()`.
3451: This changes the state of the matrix to a factored matrix; it cannot be used
3452: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3454: Developer Note:
3455: The Fortran interface is not autogenerated as the
3456: interface definition cannot be generated correctly [due to MatFactorInfo]
3458: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactorSymbolic()`, `MatQRFactorNumeric()`, `MatLUFactor()`,
3459: `MatSetUnfactored()`
3460: @*/
3461: PetscErrorCode MatQRFactor(Mat mat, IS col, const MatFactorInfo *info)
3462: {
3463: PetscFunctionBegin;
3466: if (info) PetscAssertPointer(info, 3);
3468: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3469: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3470: MatCheckPreallocated(mat, 1);
3471: PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, col, 0, 0));
3472: PetscUseMethod(mat, "MatQRFactor_C", (Mat, IS, const MatFactorInfo *), (mat, col, info));
3473: PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, col, 0, 0));
3474: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3475: PetscFunctionReturn(PETSC_SUCCESS);
3476: }
3478: /*@
3479: MatQRFactorSymbolic - Performs symbolic QR factorization of matrix.
3480: Call this routine after `MatGetFactor()` but before calling `MatQRFactorNumeric()`.
3482: Collective
3484: Input Parameters:
3485: + fact - the factor matrix obtained with `MatGetFactor()`
3486: . mat - the matrix
3487: . col - column permutation
3488: - info - options for factorization, includes
3489: .vb
3490: fill - expected fill as ratio of original fill.
3491: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3492: Run with the option -info to determine an optimal value to use
3493: .ve
3495: Level: developer
3497: Note:
3498: Most users should employ the `KSP` interface for linear solvers
3499: instead of working directly with matrix algebra routines such as this.
3500: See, e.g., `KSPCreate()`.
3502: Developer Note:
3503: The Fortran interface is not autogenerated as the
3504: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3506: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatQRFactor()`, `MatQRFactorNumeric()`, `MatLUFactor()`, `MatFactorInfoInitialize()`
3507: @*/
3508: PetscErrorCode MatQRFactorSymbolic(Mat fact, Mat mat, IS col, const MatFactorInfo *info)
3509: {
3510: MatFactorInfo tinfo;
3512: PetscFunctionBegin;
3516: if (info) PetscAssertPointer(info, 4);
3519: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3520: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3521: MatCheckPreallocated(mat, 2);
3522: if (!info) {
3523: PetscCall(MatFactorInfoInitialize(&tinfo));
3524: info = &tinfo;
3525: }
3527: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorSymbolic, fact, mat, col, 0));
3528: PetscUseMethod(fact, "MatQRFactorSymbolic_C", (Mat, Mat, IS, const MatFactorInfo *), (fact, mat, col, info));
3529: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorSymbolic, fact, mat, col, 0));
3530: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3531: PetscFunctionReturn(PETSC_SUCCESS);
3532: }
3534: /*@
3535: MatQRFactorNumeric - Performs numeric QR factorization of a matrix.
3536: Call this routine after first calling `MatGetFactor()`, and `MatQRFactorSymbolic()`.
3538: Collective
3540: Input Parameters:
3541: + fact - the factor matrix obtained with `MatGetFactor()`
3542: . mat - the matrix
3543: - info - options for factorization
3545: Level: developer
3547: Notes:
3548: See `MatQRFactor()` for in-place factorization.
3550: Most users should employ the `KSP` interface for linear solvers
3551: instead of working directly with matrix algebra routines such as this.
3552: See, e.g., `KSPCreate()`.
3554: Developer Note:
3555: The Fortran interface is not autogenerated as the
3556: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3558: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactor()`, `MatQRFactorSymbolic()`, `MatLUFactor()`
3559: @*/
3560: PetscErrorCode MatQRFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3561: {
3562: MatFactorInfo tinfo;
3564: PetscFunctionBegin;
3569: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3570: PetscCheck(mat->rmap->N == fact->rmap->N && mat->cmap->N == fact->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3571: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3573: MatCheckPreallocated(mat, 2);
3574: if (!info) {
3575: PetscCall(MatFactorInfoInitialize(&tinfo));
3576: info = &tinfo;
3577: }
3579: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorNumeric, mat, fact, 0, 0));
3580: else PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, fact, 0, 0));
3581: PetscUseMethod(fact, "MatQRFactorNumeric_C", (Mat, Mat, const MatFactorInfo *), (fact, mat, info));
3582: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorNumeric, mat, fact, 0, 0));
3583: else PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, fact, 0, 0));
3584: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3585: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3586: PetscFunctionReturn(PETSC_SUCCESS);
3587: }
3589: /*@
3590: MatSolve - Solves $A x = b$, given a factored matrix.
3592: Neighbor-wise Collective
3594: Input Parameters:
3595: + mat - the factored matrix
3596: - b - the right-hand-side vector
3598: Output Parameter:
3599: . x - the result vector
3601: Level: developer
3603: Notes:
3604: The vectors `b` and `x` cannot be the same. I.e., one cannot
3605: call `MatSolve`(A,x,x).
3607: Most users should employ the `KSP` interface for linear solvers
3608: instead of working directly with matrix algebra routines such as this.
3609: See, e.g., `KSPCreate()`.
3611: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3612: @*/
3613: PetscErrorCode MatSolve(Mat mat, Vec b, Vec x)
3614: {
3615: PetscFunctionBegin;
3620: PetscCheckSameComm(mat, 1, b, 2);
3621: PetscCheckSameComm(mat, 1, x, 3);
3622: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3623: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3624: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3625: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3626: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3627: MatCheckPreallocated(mat, 1);
3629: PetscCall(PetscLogEventBegin(MAT_Solve, mat, b, x, 0));
3630: if (mat->factorerrortype) {
3631: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
3632: PetscCall(VecSetInf(x));
3633: } else PetscUseTypeMethod(mat, solve, b, x);
3634: PetscCall(PetscLogEventEnd(MAT_Solve, mat, b, x, 0));
3635: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3636: PetscFunctionReturn(PETSC_SUCCESS);
3637: }
3639: static PetscErrorCode MatMatSolve_Basic(Mat A, Mat B, Mat X, PetscBool trans)
3640: {
3641: Vec b, x;
3642: PetscInt N, i;
3643: PetscErrorCode (*f)(Mat, Vec, Vec);
3644: PetscBool Abound, Bneedconv = PETSC_FALSE, Xneedconv = PETSC_FALSE;
3646: PetscFunctionBegin;
3647: if (A->factorerrortype) {
3648: PetscCall(PetscInfo(A, "MatFactorError %d\n", A->factorerrortype));
3649: PetscCall(MatSetInf(X));
3650: PetscFunctionReturn(PETSC_SUCCESS);
3651: }
3652: f = (!trans || (!A->ops->solvetranspose && A->symmetric)) ? A->ops->solve : A->ops->solvetranspose;
3653: PetscCheck(f, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)A)->type_name);
3654: PetscCall(MatBoundToCPU(A, &Abound));
3655: if (!Abound) {
3656: PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &Bneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3657: PetscCall(PetscObjectTypeCompareAny((PetscObject)X, &Xneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3658: }
3659: #if PetscDefined(HAVE_CUDA)
3660: if (Bneedconv) PetscCall(MatConvert(B, MATDENSECUDA, MAT_INPLACE_MATRIX, &B));
3661: if (Xneedconv) PetscCall(MatConvert(X, MATDENSECUDA, MAT_INPLACE_MATRIX, &X));
3662: #elif PetscDefined(HAVE_HIP)
3663: if (Bneedconv) PetscCall(MatConvert(B, MATDENSEHIP, MAT_INPLACE_MATRIX, &B));
3664: if (Xneedconv) PetscCall(MatConvert(X, MATDENSEHIP, MAT_INPLACE_MATRIX, &X));
3665: #endif
3666: PetscCall(MatGetSize(B, NULL, &N));
3667: for (i = 0; i < N; i++) {
3668: PetscCall(MatDenseGetColumnVecRead(B, i, &b));
3669: PetscCall(MatDenseGetColumnVecWrite(X, i, &x));
3670: PetscCall((*f)(A, b, x));
3671: PetscCall(MatDenseRestoreColumnVecWrite(X, i, &x));
3672: PetscCall(MatDenseRestoreColumnVecRead(B, i, &b));
3673: }
3674: if (Bneedconv) PetscCall(MatConvert(B, MATDENSE, MAT_INPLACE_MATRIX, &B));
3675: if (Xneedconv) PetscCall(MatConvert(X, MATDENSE, MAT_INPLACE_MATRIX, &X));
3676: PetscFunctionReturn(PETSC_SUCCESS);
3677: }
3679: /*@
3680: MatMatSolve - Solves $A X = B$, given a factored matrix.
3682: Neighbor-wise Collective
3684: Input Parameters:
3685: + A - the factored matrix
3686: - B - the right-hand-side matrix `MATDENSE` (or sparse `MATAIJ`-- when using MUMPS)
3688: Output Parameter:
3689: . X - the result matrix (dense matrix)
3691: Level: developer
3693: Note:
3694: If `B` is a `MATDENSE` matrix then one can call `MatMatSolve`(A,B,B) except with `MATSOLVERMKL_CPARDISO`;
3695: otherwise, `B` and `X` cannot be the same.
3697: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3698: @*/
3699: PetscErrorCode MatMatSolve(Mat A, Mat B, Mat X)
3700: {
3701: PetscFunctionBegin;
3706: PetscCheckSameComm(A, 1, B, 2);
3707: PetscCheckSameComm(A, 1, X, 3);
3708: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3709: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3710: PetscCheck(X->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3711: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3712: MatCheckPreallocated(A, 1);
3714: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3715: if (!A->ops->matsolve) {
3716: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolve\n", ((PetscObject)A)->type_name));
3717: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_FALSE));
3718: } else PetscUseTypeMethod(A, matsolve, B, X);
3719: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3720: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3721: PetscFunctionReturn(PETSC_SUCCESS);
3722: }
3724: /*@
3725: MatMatSolveTranspose - Solves $A^T X = B $, given a factored matrix.
3727: Neighbor-wise Collective
3729: Input Parameters:
3730: + A - the factored matrix
3731: - B - the right-hand-side matrix (`MATDENSE` matrix)
3733: Output Parameter:
3734: . X - the result matrix (dense matrix)
3736: Level: developer
3738: Note:
3739: The matrices `B` and `X` cannot be the same. I.e., one cannot
3740: call `MatMatSolveTranspose`(A,X,X).
3742: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolveTranspose()`, `MatMatSolve()`, `MatLUFactor()`, `MatCholeskyFactor()`
3743: @*/
3744: PetscErrorCode MatMatSolveTranspose(Mat A, Mat B, Mat X)
3745: {
3746: PetscFunctionBegin;
3751: PetscCheckSameComm(A, 1, B, 2);
3752: PetscCheckSameComm(A, 1, X, 3);
3753: PetscCheck(X != B, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3754: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3755: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3756: PetscCheck(A->rmap->n == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat A,Mat B: local dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->n, B->rmap->n);
3757: PetscCheck(X->cmap->N >= B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3758: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3759: MatCheckPreallocated(A, 1);
3761: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3762: if (!A->ops->matsolvetranspose) {
3763: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolveTranspose\n", ((PetscObject)A)->type_name));
3764: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_TRUE));
3765: } else PetscUseTypeMethod(A, matsolvetranspose, B, X);
3766: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3767: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3768: PetscFunctionReturn(PETSC_SUCCESS);
3769: }
3771: /*@
3772: MatMatTransposeSolve - Solves $A X = B^T$, given a factored matrix.
3774: Neighbor-wise Collective
3776: Input Parameters:
3777: + A - the factored matrix
3778: - Bt - the transpose of right-hand-side matrix as a `MATDENSE`
3780: Output Parameter:
3781: . X - the result matrix (dense matrix)
3783: Level: developer
3785: Note:
3786: For MUMPS, it only supports centralized sparse compressed column format on the host processor for right-hand side matrix. User must create `Bt` in sparse compressed row
3787: format on the host processor and call `MatMatTransposeSolve()` to implement MUMPS' `MatMatSolve()`.
3789: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatMatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3790: @*/
3791: PetscErrorCode MatMatTransposeSolve(Mat A, Mat Bt, Mat X)
3792: {
3793: PetscFunctionBegin;
3798: PetscCheckSameComm(A, 1, Bt, 2);
3799: PetscCheckSameComm(A, 1, X, 3);
3801: PetscCheck(X != Bt, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3802: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3803: PetscCheck(A->rmap->N == Bt->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat Bt: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, Bt->cmap->N);
3804: PetscCheck(X->cmap->N >= Bt->rmap->N, PetscObjectComm((PetscObject)X), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as row number of the rhs matrix");
3805: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3806: PetscCheck(A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
3807: MatCheckPreallocated(A, 1);
3809: PetscCall(PetscLogEventBegin(MAT_MatTrSolve, A, Bt, X, 0));
3810: PetscUseTypeMethod(A, mattransposesolve, Bt, X);
3811: PetscCall(PetscLogEventEnd(MAT_MatTrSolve, A, Bt, X, 0));
3812: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3813: PetscFunctionReturn(PETSC_SUCCESS);
3814: }
3816: /*@
3817: MatForwardSolve - Solves $ L x = b $, given a factored matrix, $A = LU $, or
3818: $U^T*D^(1/2) x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3820: Neighbor-wise Collective
3822: Input Parameters:
3823: + mat - the factored matrix
3824: - b - the right-hand-side vector
3826: Output Parameter:
3827: . x - the result vector
3829: Level: developer
3831: Notes:
3832: `MatSolve()` should be used for most applications, as it performs
3833: a forward solve followed by a backward solve.
3835: The vectors `b` and `x` cannot be the same, i.e., one cannot
3836: call `MatForwardSolve`(A,x,x).
3838: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3839: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3840: `MatForwardSolve()` solves $U^T*D y = b$, and
3841: `MatBackwardSolve()` solves $U x = y$.
3842: Thus they do not provide a symmetric preconditioner.
3844: .seealso: [](ch_matrices), `Mat`, `MatBackwardSolve()`, `MatGetFactor()`, `MatSolve()`
3845: @*/
3846: PetscErrorCode MatForwardSolve(Mat mat, Vec b, Vec x)
3847: {
3848: PetscFunctionBegin;
3853: PetscCheckSameComm(mat, 1, b, 2);
3854: PetscCheckSameComm(mat, 1, x, 3);
3855: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3856: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3857: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3858: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3859: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3860: MatCheckPreallocated(mat, 1);
3862: PetscCall(PetscLogEventBegin(MAT_ForwardSolve, mat, b, x, 0));
3863: PetscUseTypeMethod(mat, forwardsolve, b, x);
3864: PetscCall(PetscLogEventEnd(MAT_ForwardSolve, mat, b, x, 0));
3865: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3866: PetscFunctionReturn(PETSC_SUCCESS);
3867: }
3869: /*@
3870: MatBackwardSolve - Solves $U x = b$, given a factored matrix, $A = LU$.
3871: $D^(1/2) U x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3873: Neighbor-wise Collective
3875: Input Parameters:
3876: + mat - the factored matrix
3877: - b - the right-hand-side vector
3879: Output Parameter:
3880: . x - the result vector
3882: Level: developer
3884: Notes:
3885: `MatSolve()` should be used for most applications, as it performs
3886: a forward solve followed by a backward solve.
3888: The vectors `b` and `x` cannot be the same. I.e., one cannot
3889: call `MatBackwardSolve`(A,x,x).
3891: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3892: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3893: `MatForwardSolve()` solves $U^T*D y = b$, and
3894: `MatBackwardSolve()` solves $U x = y$.
3895: Thus they do not provide a symmetric preconditioner.
3897: .seealso: [](ch_matrices), `Mat`, `MatForwardSolve()`, `MatGetFactor()`, `MatSolve()`
3898: @*/
3899: PetscErrorCode MatBackwardSolve(Mat mat, Vec b, Vec x)
3900: {
3901: PetscFunctionBegin;
3906: PetscCheckSameComm(mat, 1, b, 2);
3907: PetscCheckSameComm(mat, 1, x, 3);
3908: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3909: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3910: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3911: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3912: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3913: MatCheckPreallocated(mat, 1);
3915: PetscCall(PetscLogEventBegin(MAT_BackwardSolve, mat, b, x, 0));
3916: PetscUseTypeMethod(mat, backwardsolve, b, x);
3917: PetscCall(PetscLogEventEnd(MAT_BackwardSolve, mat, b, x, 0));
3918: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3919: PetscFunctionReturn(PETSC_SUCCESS);
3920: }
3922: /*@
3923: MatSolveAdd - Computes $x = y + A^{-1}*b$, given a factored matrix.
3925: Neighbor-wise Collective
3927: Input Parameters:
3928: + mat - the factored matrix
3929: . b - the right-hand-side vector
3930: - y - the vector to be added to
3932: Output Parameter:
3933: . x - the result vector
3935: Level: developer
3937: Note:
3938: The vectors `b` and `x` cannot be the same. I.e., one cannot
3939: call `MatSolveAdd`(A,x,y,x).
3941: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolve()`, `MatGetFactor()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3942: @*/
3943: PetscErrorCode MatSolveAdd(Mat mat, Vec b, Vec y, Vec x)
3944: {
3945: PetscScalar one = 1.0;
3946: Vec tmp;
3948: PetscFunctionBegin;
3954: PetscCheckSameComm(mat, 1, b, 2);
3955: PetscCheckSameComm(mat, 1, y, 3);
3956: PetscCheckSameComm(mat, 1, x, 4);
3957: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3958: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3959: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3960: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
3961: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3962: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
3963: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3964: MatCheckPreallocated(mat, 1);
3966: PetscCall(PetscLogEventBegin(MAT_SolveAdd, mat, b, x, y));
3967: if (mat->factorerrortype) {
3968: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
3969: PetscCall(VecSetInf(x));
3970: } else if (mat->ops->solveadd) {
3971: PetscUseTypeMethod(mat, solveadd, b, y, x);
3972: } else {
3973: /* do the solve then the add manually */
3974: if (x != y) {
3975: PetscCall(MatSolve(mat, b, x));
3976: PetscCall(VecAXPY(x, one, y));
3977: } else {
3978: PetscCall(VecDuplicate(x, &tmp));
3979: PetscCall(VecCopy(x, tmp));
3980: PetscCall(MatSolve(mat, b, x));
3981: PetscCall(VecAXPY(x, one, tmp));
3982: PetscCall(VecDestroy(&tmp));
3983: }
3984: }
3985: PetscCall(PetscLogEventEnd(MAT_SolveAdd, mat, b, x, y));
3986: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3987: PetscFunctionReturn(PETSC_SUCCESS);
3988: }
3990: /*@
3991: MatSolveTranspose - Solves $A^T x = b$, given a factored matrix.
3993: Neighbor-wise Collective
3995: Input Parameters:
3996: + mat - the factored matrix
3997: - b - the right-hand-side vector
3999: Output Parameter:
4000: . x - the result vector
4002: Level: developer
4004: Notes:
4005: The vectors `b` and `x` cannot be the same. I.e., one cannot
4006: call `MatSolveTranspose`(A,x,x).
4008: Most users should employ the `KSP` interface for linear solvers
4009: instead of working directly with matrix algebra routines such as this.
4010: See, e.g., `KSPCreate()`.
4012: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `KSP`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTransposeAdd()`
4013: @*/
4014: PetscErrorCode MatSolveTranspose(Mat mat, Vec b, Vec x)
4015: {
4016: PetscErrorCode (*f)(Mat, Vec, Vec) = (!mat->ops->solvetranspose && mat->symmetric) ? mat->ops->solve : mat->ops->solvetranspose;
4018: PetscFunctionBegin;
4023: PetscCheckSameComm(mat, 1, b, 2);
4024: PetscCheckSameComm(mat, 1, x, 3);
4025: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4026: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4027: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4028: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4029: MatCheckPreallocated(mat, 1);
4030: PetscCall(PetscLogEventBegin(MAT_SolveTranspose, mat, b, x, 0));
4031: if (mat->factorerrortype) {
4032: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4033: PetscCall(VecSetInf(x));
4034: } else {
4035: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s", ((PetscObject)mat)->type_name);
4036: PetscCall((*f)(mat, b, x));
4037: }
4038: PetscCall(PetscLogEventEnd(MAT_SolveTranspose, mat, b, x, 0));
4039: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4040: PetscFunctionReturn(PETSC_SUCCESS);
4041: }
4043: /*@
4044: MatSolveTransposeAdd - Computes $x = y + A^{-T} b$
4045: factored matrix.
4047: Neighbor-wise Collective
4049: Input Parameters:
4050: + mat - the factored matrix
4051: . b - the right-hand-side vector
4052: - y - the vector to be added to
4054: Output Parameter:
4055: . x - the result vector
4057: Level: developer
4059: Note:
4060: The vectors `b` and `x` cannot be the same. I.e., one cannot
4061: call `MatSolveTransposeAdd`(A,x,y,x).
4063: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTranspose()`
4064: @*/
4065: PetscErrorCode MatSolveTransposeAdd(Mat mat, Vec b, Vec y, Vec x)
4066: {
4067: PetscScalar one = 1.0;
4068: Vec tmp;
4069: PetscErrorCode (*f)(Mat, Vec, Vec, Vec) = (!mat->ops->solvetransposeadd && mat->symmetric) ? mat->ops->solveadd : mat->ops->solvetransposeadd;
4071: PetscFunctionBegin;
4077: PetscCheckSameComm(mat, 1, b, 2);
4078: PetscCheckSameComm(mat, 1, y, 3);
4079: PetscCheckSameComm(mat, 1, x, 4);
4080: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4081: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4082: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4083: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
4084: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4085: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4086: MatCheckPreallocated(mat, 1);
4088: PetscCall(PetscLogEventBegin(MAT_SolveTransposeAdd, mat, b, x, y));
4089: if (mat->factorerrortype) {
4090: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4091: PetscCall(VecSetInf(x));
4092: } else if (f) {
4093: PetscCall((*f)(mat, b, y, x));
4094: } else {
4095: /* do the solve then the add manually */
4096: if (x != y) {
4097: PetscCall(MatSolveTranspose(mat, b, x));
4098: PetscCall(VecAXPY(x, one, y));
4099: } else {
4100: PetscCall(VecDuplicate(x, &tmp));
4101: PetscCall(VecCopy(x, tmp));
4102: PetscCall(MatSolveTranspose(mat, b, x));
4103: PetscCall(VecAXPY(x, one, tmp));
4104: PetscCall(VecDestroy(&tmp));
4105: }
4106: }
4107: PetscCall(PetscLogEventEnd(MAT_SolveTransposeAdd, mat, b, x, y));
4108: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4109: PetscFunctionReturn(PETSC_SUCCESS);
4110: }
4112: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
4113: /*@
4114: MatSOR - Computes relaxation (SOR, Gauss-Seidel) sweeps.
4116: Neighbor-wise Collective
4118: Input Parameters:
4119: + mat - the matrix
4120: . b - the right-hand side
4121: . omega - the relaxation factor
4122: . flag - flag indicating the type of SOR (see below)
4123: . shift - diagonal shift
4124: . its - the number of iterations
4125: - lits - the number of local iterations
4127: Output Parameter:
4128: . x - the solution (can contain an initial guess, use option `SOR_ZERO_INITIAL_GUESS` to indicate no guess)
4130: SOR Flags:
4131: + `SOR_FORWARD_SWEEP` - forward SOR
4132: . `SOR_BACKWARD_SWEEP` - backward SOR
4133: . `SOR_SYMMETRIC_SWEEP` - SSOR (symmetric SOR)
4134: . `SOR_LOCAL_FORWARD_SWEEP` - local forward SOR
4135: . `SOR_LOCAL_BACKWARD_SWEEP` - local forward SOR
4136: . `SOR_LOCAL_SYMMETRIC_SWEEP` - local SSOR
4137: . `SOR_EISENSTAT` - SOR with Eisenstat trick
4138: . `SOR_APPLY_UPPER`, `SOR_APPLY_LOWER` - applies
4139: upper/lower triangular part of matrix to
4140: vector (with omega)
4141: - `SOR_ZERO_INITIAL_GUESS` - zero initial guess
4143: Level: developer
4145: Notes:
4146: `SOR_LOCAL_FORWARD_SWEEP`, `SOR_LOCAL_BACKWARD_SWEEP`, and
4147: `SOR_LOCAL_SYMMETRIC_SWEEP` perform separate independent smoothings
4148: on each processor.
4150: Application programmers will not generally use `MatSOR()` directly,
4151: but instead will employ the `KSP`/`PC` interface.
4153: For `MATBAIJ`, `MATSBAIJ`, and `MATAIJ` matrices with Inodes this does a block SOR smoothing, otherwise it does a pointwise smoothing
4155: Most users should employ the `KSP` interface for linear solvers
4156: instead of working directly with matrix algebra routines such as this.
4157: See, e.g., `KSPCreate()`.
4159: Vectors `x` and `b` CANNOT be the same
4161: The flags are implemented as bitwise inclusive or operations.
4162: For example, use (`SOR_ZERO_INITIAL_GUESS` | `SOR_SYMMETRIC_SWEEP`)
4163: to specify a zero initial guess for SSOR.
4165: Developer Note:
4166: We should add block SOR support for `MATAIJ` matrices with block size set to great than one and no inodes
4168: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `KSP`, `PC`, `MatGetFactor()`
4169: @*/
4170: PetscErrorCode MatSOR(Mat mat, Vec b, PetscReal omega, MatSORType flag, PetscReal shift, PetscInt its, PetscInt lits, Vec x)
4171: {
4172: PetscFunctionBegin;
4177: PetscCheckSameComm(mat, 1, b, 2);
4178: PetscCheckSameComm(mat, 1, x, 8);
4179: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4180: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4181: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4182: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4183: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4184: PetscCheck(its > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires global its %" PetscInt_FMT " positive", its);
4185: PetscCheck(lits > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires local its %" PetscInt_FMT " positive", lits);
4186: PetscCheck(b != x, PETSC_COMM_SELF, PETSC_ERR_ARG_IDN, "b and x vector cannot be the same");
4188: MatCheckPreallocated(mat, 1);
4189: PetscCall(PetscLogEventBegin(MAT_SOR, mat, b, x, 0));
4190: PetscUseTypeMethod(mat, sor, b, omega, flag, shift, its, lits, x);
4191: PetscCall(PetscLogEventEnd(MAT_SOR, mat, b, x, 0));
4192: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4193: PetscFunctionReturn(PETSC_SUCCESS);
4194: }
4196: /*
4197: Default matrix copy routine.
4198: */
4199: PetscErrorCode MatCopy_Basic(Mat A, Mat B, MatStructure str)
4200: {
4201: PetscInt i, rstart = 0, rend = 0, nz;
4202: const PetscInt *cwork;
4203: const PetscScalar *vwork;
4205: PetscFunctionBegin;
4206: if (B->assembled) PetscCall(MatZeroEntries(B));
4207: if (str == SAME_NONZERO_PATTERN) {
4208: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
4209: for (i = rstart; i < rend; i++) {
4210: PetscCall(MatGetRow(A, i, &nz, &cwork, &vwork));
4211: PetscCall(MatSetValues(B, 1, &i, nz, cwork, vwork, INSERT_VALUES));
4212: PetscCall(MatRestoreRow(A, i, &nz, &cwork, &vwork));
4213: }
4214: } else {
4215: PetscCall(MatAYPX(B, 0.0, A, str));
4216: }
4217: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
4218: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
4219: PetscFunctionReturn(PETSC_SUCCESS);
4220: }
4222: /*@
4223: MatCopy - Copies a matrix to another matrix.
4225: Collective
4227: Input Parameters:
4228: + A - the matrix
4229: - str - `SAME_NONZERO_PATTERN` or `DIFFERENT_NONZERO_PATTERN`
4231: Output Parameter:
4232: . B - where the copy is put
4234: Level: intermediate
4236: Notes:
4237: If you use `SAME_NONZERO_PATTERN`, then the two matrices must have the same nonzero pattern or the routine will crash.
4239: `MatCopy()` copies the matrix entries of a matrix to another existing
4240: matrix (after first zeroing the second matrix). A related routine is
4241: `MatConvert()`, which first creates a new matrix and then copies the data.
4243: .seealso: [](ch_matrices), `Mat`, `MatConvert()`, `MatDuplicate()`
4244: @*/
4245: PetscErrorCode MatCopy(Mat A, Mat B, MatStructure str)
4246: {
4247: PetscInt i;
4249: PetscFunctionBegin;
4254: PetscCheckSameComm(A, 1, B, 2);
4255: MatCheckPreallocated(B, 2);
4256: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4257: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4258: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim (%" PetscInt_FMT ",%" PetscInt_FMT ") (%" PetscInt_FMT ",%" PetscInt_FMT ")", A->rmap->N, B->rmap->N,
4259: A->cmap->N, B->cmap->N);
4260: MatCheckPreallocated(A, 1);
4261: if (A == B) PetscFunctionReturn(PETSC_SUCCESS);
4263: PetscCall(PetscLogEventBegin(MAT_Copy, A, B, 0, 0));
4264: if (A->ops->copy) PetscUseTypeMethod(A, copy, B, str);
4265: else PetscCall(MatCopy_Basic(A, B, str));
4267: B->stencil.dim = A->stencil.dim;
4268: B->stencil.noc = A->stencil.noc;
4269: for (i = 0; i <= A->stencil.dim + (A->stencil.noc ? 0 : -1); i++) {
4270: B->stencil.dims[i] = A->stencil.dims[i];
4271: B->stencil.starts[i] = A->stencil.starts[i];
4272: }
4274: PetscCall(PetscLogEventEnd(MAT_Copy, A, B, 0, 0));
4275: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4276: PetscFunctionReturn(PETSC_SUCCESS);
4277: }
4279: /*@C
4280: MatConvert - Converts a matrix to another matrix, either of the same
4281: or different type.
4283: Collective
4285: Input Parameters:
4286: + mat - the matrix
4287: . newtype - new matrix type. Use `MATSAME` to create a new matrix of the
4288: same type as the original matrix.
4289: - reuse - denotes if the destination matrix is to be created or reused.
4290: Use `MAT_INPLACE_MATRIX` for inplace conversion (that is when you want the input mat to be changed to contain the matrix in the new format), otherwise use
4291: `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX` (can only be used after the first call was made with `MAT_INITIAL_MATRIX`, causes the matrix space in M to be reused).
4293: Output Parameter:
4294: . M - pointer to place new matrix
4296: Level: intermediate
4298: Notes:
4299: `MatConvert()` first creates a new matrix and then copies the data from
4300: the first matrix. A related routine is `MatCopy()`, which copies the matrix
4301: entries of one matrix to another already existing matrix context.
4303: Cannot be used to convert a sequential matrix to parallel or parallel to sequential,
4304: the MPI communicator of the generated matrix is always the same as the communicator
4305: of the input matrix.
4307: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatDuplicate()`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
4308: @*/
4309: PetscErrorCode MatConvert(Mat mat, MatType newtype, MatReuse reuse, Mat *M)
4310: {
4311: PetscBool sametype, issame, flg;
4312: PetscBool3 issymmetric, ishermitian;
4313: char convname[256], mtype[256];
4314: Mat B;
4316: PetscFunctionBegin;
4319: PetscAssertPointer(M, 4);
4320: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4321: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4322: MatCheckPreallocated(mat, 1);
4324: PetscCall(PetscOptionsGetString(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matconvert_type", mtype, sizeof(mtype), &flg));
4325: if (flg) newtype = mtype;
4327: PetscCall(PetscObjectTypeCompare((PetscObject)mat, newtype, &sametype));
4328: PetscCall(PetscStrcmp(newtype, "same", &issame));
4329: PetscCheck(!(reuse == MAT_INPLACE_MATRIX) || !(mat != *M), PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires same input and output matrix");
4330: if (reuse == MAT_REUSE_MATRIX) {
4332: PetscCheck(mat != *M, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_REUSE_MATRIX means reuse matrix in final argument, perhaps you mean MAT_INPLACE_MATRIX");
4333: }
4335: if ((reuse == MAT_INPLACE_MATRIX) && (issame || sametype)) {
4336: PetscCall(PetscInfo(mat, "Early return for inplace %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4337: PetscFunctionReturn(PETSC_SUCCESS);
4338: }
4340: /* Cache Mat options because some converters use MatHeaderReplace */
4341: issymmetric = mat->symmetric;
4342: ishermitian = mat->hermitian;
4344: if ((sametype || issame) && (reuse == MAT_INITIAL_MATRIX) && mat->ops->duplicate) {
4345: PetscCall(PetscInfo(mat, "Calling duplicate for initial matrix %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4346: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4347: } else {
4348: PetscErrorCode (*conv)(Mat, MatType, MatReuse, Mat *) = NULL;
4349: const char *prefix[3] = {"seq", "mpi", ""};
4350: PetscInt i;
4351: /*
4352: Order of precedence:
4353: 0) See if newtype is a superclass of the current matrix.
4354: 1) See if a specialized converter is known to the current matrix.
4355: 2) See if a specialized converter is known to the desired matrix class.
4356: 3) See if a good general converter is registered for the desired class
4357: (as of 6/27/03 only MATMPIADJ falls into this category).
4358: 4) See if a good general converter is known for the current matrix.
4359: 5) Use a really basic converter.
4360: */
4362: /* 0) See if newtype is a superclass of the current matrix.
4363: i.e mat is mpiaij and newtype is aij */
4364: for (i = 0; i < 2; i++) {
4365: PetscCall(PetscStrncpy(convname, prefix[i], sizeof(convname)));
4366: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4367: PetscCall(PetscStrcmp(convname, ((PetscObject)mat)->type_name, &flg));
4368: PetscCall(PetscInfo(mat, "Check superclass %s %s -> %d\n", convname, ((PetscObject)mat)->type_name, flg));
4369: if (flg) {
4370: if (reuse == MAT_INPLACE_MATRIX) {
4371: PetscCall(PetscInfo(mat, "Early return\n"));
4372: PetscFunctionReturn(PETSC_SUCCESS);
4373: } else if (reuse == MAT_INITIAL_MATRIX && mat->ops->duplicate) {
4374: PetscCall(PetscInfo(mat, "Calling MatDuplicate\n"));
4375: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4376: PetscFunctionReturn(PETSC_SUCCESS);
4377: } else if (reuse == MAT_REUSE_MATRIX && mat->ops->copy) {
4378: PetscCall(PetscInfo(mat, "Calling MatCopy\n"));
4379: PetscCall(MatCopy(mat, *M, SAME_NONZERO_PATTERN));
4380: PetscFunctionReturn(PETSC_SUCCESS);
4381: }
4382: }
4383: }
4384: /* 1) See if a specialized converter is known to the current matrix and the desired class */
4385: for (i = 0; i < 3; i++) {
4386: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4387: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4388: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4389: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4390: PetscCall(PetscStrlcat(convname, issame ? ((PetscObject)mat)->type_name : newtype, sizeof(convname)));
4391: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4392: PetscCall(PetscObjectQueryFunction((PetscObject)mat, convname, &conv));
4393: PetscCall(PetscInfo(mat, "Check specialized (1) %s (%s) -> %d\n", convname, ((PetscObject)mat)->type_name, !!conv));
4394: if (conv) goto foundconv;
4395: }
4397: /* 2) See if a specialized converter is known to the desired matrix class. */
4398: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &B));
4399: PetscCall(MatSetSizes(B, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
4400: PetscCall(MatSetType(B, newtype));
4401: for (i = 0; i < 3; i++) {
4402: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4403: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4404: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4405: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4406: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4407: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4408: PetscCall(PetscObjectQueryFunction((PetscObject)B, convname, &conv));
4409: PetscCall(PetscInfo(mat, "Check specialized (2) %s (%s) -> %d\n", convname, ((PetscObject)B)->type_name, !!conv));
4410: if (conv) {
4411: PetscCall(MatDestroy(&B));
4412: goto foundconv;
4413: }
4414: }
4416: /* 3) See if a good general converter is registered for the desired class */
4417: conv = B->ops->convertfrom;
4418: PetscCall(PetscInfo(mat, "Check convertfrom (%s) -> %d\n", ((PetscObject)B)->type_name, !!conv));
4419: PetscCall(MatDestroy(&B));
4420: if (conv) goto foundconv;
4422: /* 4) See if a good general converter is known for the current matrix */
4423: if (mat->ops->convert) conv = mat->ops->convert;
4424: PetscCall(PetscInfo(mat, "Check general convert (%s) -> %d\n", ((PetscObject)mat)->type_name, !!conv));
4425: if (conv) goto foundconv;
4427: /* 5) Use a really basic converter. */
4428: PetscCall(PetscInfo(mat, "Using MatConvert_Basic\n"));
4429: conv = MatConvert_Basic;
4431: foundconv:
4432: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4433: PetscCall((*conv)(mat, newtype, reuse, M));
4434: if (mat->rmap->mapping && mat->cmap->mapping && !(*M)->rmap->mapping && !(*M)->cmap->mapping) {
4435: /* the block sizes must be same if the mappings are copied over */
4436: (*M)->rmap->bs = mat->rmap->bs;
4437: (*M)->cmap->bs = mat->cmap->bs;
4438: PetscCall(PetscObjectReference((PetscObject)mat->rmap->mapping));
4439: PetscCall(PetscObjectReference((PetscObject)mat->cmap->mapping));
4440: (*M)->rmap->mapping = mat->rmap->mapping;
4441: (*M)->cmap->mapping = mat->cmap->mapping;
4442: }
4443: (*M)->stencil.dim = mat->stencil.dim;
4444: (*M)->stencil.noc = mat->stencil.noc;
4445: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4446: (*M)->stencil.dims[i] = mat->stencil.dims[i];
4447: (*M)->stencil.starts[i] = mat->stencil.starts[i];
4448: }
4449: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4450: }
4451: PetscCall(PetscObjectStateIncrease((PetscObject)*M));
4453: /* Copy Mat options */
4454: if (issymmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_TRUE));
4455: else if (issymmetric == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_FALSE));
4456: if (ishermitian == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_TRUE));
4457: else if (ishermitian == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_FALSE));
4458: PetscFunctionReturn(PETSC_SUCCESS);
4459: }
4461: /*@C
4462: MatFactorGetSolverType - Returns name of the package providing the factorization routines
4464: Not Collective
4466: Input Parameter:
4467: . mat - the matrix, must be a factored matrix
4469: Output Parameter:
4470: . type - the string name of the package (do not free this string)
4472: Level: intermediate
4474: Fortran Note:
4475: Pass in an empty string that is long enough and the package name will be copied into it.
4477: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolverType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`
4478: @*/
4479: PetscErrorCode MatFactorGetSolverType(Mat mat, MatSolverType *type)
4480: {
4481: PetscErrorCode (*conv)(Mat, MatSolverType *);
4483: PetscFunctionBegin;
4486: PetscAssertPointer(type, 2);
4487: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
4488: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorGetSolverType_C", &conv));
4489: if (conv) PetscCall((*conv)(mat, type));
4490: else *type = MATSOLVERPETSC;
4491: PetscFunctionReturn(PETSC_SUCCESS);
4492: }
4494: typedef struct _MatSolverTypeForSpecifcType *MatSolverTypeForSpecifcType;
4495: struct _MatSolverTypeForSpecifcType {
4496: MatType mtype;
4497: /* no entry for MAT_FACTOR_NONE */
4498: PetscErrorCode (*createfactor[MAT_FACTOR_NUM_TYPES - 1])(Mat, MatFactorType, Mat *);
4499: MatSolverTypeForSpecifcType next;
4500: };
4502: typedef struct _MatSolverTypeHolder *MatSolverTypeHolder;
4503: struct _MatSolverTypeHolder {
4504: char *name;
4505: MatSolverTypeForSpecifcType handlers;
4506: MatSolverTypeHolder next;
4507: };
4509: static MatSolverTypeHolder MatSolverTypeHolders = NULL;
4511: /*@C
4512: MatSolverTypeRegister - Registers a `MatSolverType` that works for a particular matrix type
4514: Input Parameters:
4515: + package - name of the package, for example petsc or superlu
4516: . mtype - the matrix type that works with this package
4517: . ftype - the type of factorization supported by the package
4518: - createfactor - routine that will create the factored matrix ready to be used
4520: Level: developer
4522: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorGetSolverType()`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`,
4523: `MatGetFactor()`
4524: @*/
4525: PetscErrorCode MatSolverTypeRegister(MatSolverType package, MatType mtype, MatFactorType ftype, PetscErrorCode (*createfactor)(Mat, MatFactorType, Mat *))
4526: {
4527: MatSolverTypeHolder next = MatSolverTypeHolders, prev = NULL;
4528: PetscBool flg;
4529: MatSolverTypeForSpecifcType inext, iprev = NULL;
4531: PetscFunctionBegin;
4532: PetscCall(MatInitializePackage());
4533: if (!next) {
4534: PetscCall(PetscNew(&MatSolverTypeHolders));
4535: PetscCall(PetscStrallocpy(package, &MatSolverTypeHolders->name));
4536: PetscCall(PetscNew(&MatSolverTypeHolders->handlers));
4537: PetscCall(PetscStrallocpy(mtype, (char **)&MatSolverTypeHolders->handlers->mtype));
4538: MatSolverTypeHolders->handlers->createfactor[(int)ftype - 1] = createfactor;
4539: PetscFunctionReturn(PETSC_SUCCESS);
4540: }
4541: while (next) {
4542: PetscCall(PetscStrcasecmp(package, next->name, &flg));
4543: if (flg) {
4544: PetscCheck(next->handlers, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatSolverTypeHolder is missing handlers");
4545: inext = next->handlers;
4546: while (inext) {
4547: PetscCall(PetscStrcasecmp(mtype, inext->mtype, &flg));
4548: if (flg) {
4549: inext->createfactor[(int)ftype - 1] = createfactor;
4550: PetscFunctionReturn(PETSC_SUCCESS);
4551: }
4552: iprev = inext;
4553: inext = inext->next;
4554: }
4555: PetscCall(PetscNew(&iprev->next));
4556: PetscCall(PetscStrallocpy(mtype, (char **)&iprev->next->mtype));
4557: iprev->next->createfactor[(int)ftype - 1] = createfactor;
4558: PetscFunctionReturn(PETSC_SUCCESS);
4559: }
4560: prev = next;
4561: next = next->next;
4562: }
4563: PetscCall(PetscNew(&prev->next));
4564: PetscCall(PetscStrallocpy(package, &prev->next->name));
4565: PetscCall(PetscNew(&prev->next->handlers));
4566: PetscCall(PetscStrallocpy(mtype, (char **)&prev->next->handlers->mtype));
4567: prev->next->handlers->createfactor[(int)ftype - 1] = createfactor;
4568: PetscFunctionReturn(PETSC_SUCCESS);
4569: }
4571: /*@C
4572: MatSolverTypeGet - Gets the function that creates the factor matrix if it exist
4574: Input Parameters:
4575: + type - name of the package, for example petsc or superlu, if this is 'NULL', then the first result that satisfies the other criteria is returned
4576: . ftype - the type of factorization supported by the type
4577: - mtype - the matrix type that works with this type
4579: Output Parameters:
4580: + foundtype - `PETSC_TRUE` if the type was registered
4581: . foundmtype - `PETSC_TRUE` if the type supports the requested mtype
4582: - createfactor - routine that will create the factored matrix ready to be used or `NULL` if not found
4584: Calling sequence of `createfactor`:
4585: + A - the matrix providing the factor matrix
4586: . mtype - the `MatType` of the factor requested
4587: - B - the new factor matrix that responds to MatXXFactorSymbolic,Numeric() functions, such as `MatLUFactorSymbolic()`
4589: Level: developer
4591: Note:
4592: When `type` is `NULL` the available functions are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4593: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4594: For example if one configuration had --download-mumps while a different one had --download-superlu_dist.
4596: .seealso: [](ch_matrices), `Mat`, `MatFactorType`, `MatType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatSolverTypeRegister()`, `MatGetFactor()`,
4597: `MatInitializePackage()`
4598: @*/
4599: PetscErrorCode MatSolverTypeGet(MatSolverType type, MatType mtype, MatFactorType ftype, PetscBool *foundtype, PetscBool *foundmtype, PetscErrorCode (**createfactor)(Mat A, MatFactorType mtype, Mat *B))
4600: {
4601: MatSolverTypeHolder next = MatSolverTypeHolders;
4602: PetscBool flg;
4603: MatSolverTypeForSpecifcType inext;
4605: PetscFunctionBegin;
4606: if (foundtype) *foundtype = PETSC_FALSE;
4607: if (foundmtype) *foundmtype = PETSC_FALSE;
4608: if (createfactor) *createfactor = NULL;
4610: if (type) {
4611: while (next) {
4612: PetscCall(PetscStrcasecmp(type, next->name, &flg));
4613: if (flg) {
4614: if (foundtype) *foundtype = PETSC_TRUE;
4615: inext = next->handlers;
4616: while (inext) {
4617: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4618: if (flg) {
4619: if (foundmtype) *foundmtype = PETSC_TRUE;
4620: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4621: PetscFunctionReturn(PETSC_SUCCESS);
4622: }
4623: inext = inext->next;
4624: }
4625: }
4626: next = next->next;
4627: }
4628: } else {
4629: while (next) {
4630: inext = next->handlers;
4631: while (inext) {
4632: PetscCall(PetscStrcmp(mtype, inext->mtype, &flg));
4633: if (flg && inext->createfactor[(int)ftype - 1]) {
4634: if (foundtype) *foundtype = PETSC_TRUE;
4635: if (foundmtype) *foundmtype = PETSC_TRUE;
4636: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4637: PetscFunctionReturn(PETSC_SUCCESS);
4638: }
4639: inext = inext->next;
4640: }
4641: next = next->next;
4642: }
4643: /* try with base classes inext->mtype */
4644: next = MatSolverTypeHolders;
4645: while (next) {
4646: inext = next->handlers;
4647: while (inext) {
4648: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4649: if (flg && inext->createfactor[(int)ftype - 1]) {
4650: if (foundtype) *foundtype = PETSC_TRUE;
4651: if (foundmtype) *foundmtype = PETSC_TRUE;
4652: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4653: PetscFunctionReturn(PETSC_SUCCESS);
4654: }
4655: inext = inext->next;
4656: }
4657: next = next->next;
4658: }
4659: }
4660: PetscFunctionReturn(PETSC_SUCCESS);
4661: }
4663: PetscErrorCode MatSolverTypeDestroy(void)
4664: {
4665: MatSolverTypeHolder next = MatSolverTypeHolders, prev;
4666: MatSolverTypeForSpecifcType inext, iprev;
4668: PetscFunctionBegin;
4669: while (next) {
4670: PetscCall(PetscFree(next->name));
4671: inext = next->handlers;
4672: while (inext) {
4673: PetscCall(PetscFree(inext->mtype));
4674: iprev = inext;
4675: inext = inext->next;
4676: PetscCall(PetscFree(iprev));
4677: }
4678: prev = next;
4679: next = next->next;
4680: PetscCall(PetscFree(prev));
4681: }
4682: MatSolverTypeHolders = NULL;
4683: PetscFunctionReturn(PETSC_SUCCESS);
4684: }
4686: /*@C
4687: MatFactorGetCanUseOrdering - Indicates if the factorization can use the ordering provided in `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4689: Logically Collective
4691: Input Parameter:
4692: . mat - the matrix
4694: Output Parameter:
4695: . flg - `PETSC_TRUE` if uses the ordering
4697: Level: developer
4699: Note:
4700: Most internal PETSc factorizations use the ordering passed to the factorization routine but external
4701: packages do not, thus we want to skip generating the ordering when it is not needed or used.
4703: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4704: @*/
4705: PetscErrorCode MatFactorGetCanUseOrdering(Mat mat, PetscBool *flg)
4706: {
4707: PetscFunctionBegin;
4708: *flg = mat->canuseordering;
4709: PetscFunctionReturn(PETSC_SUCCESS);
4710: }
4712: /*@C
4713: MatFactorGetPreferredOrdering - The preferred ordering for a particular matrix factor object
4715: Logically Collective
4717: Input Parameters:
4718: + mat - the matrix obtained with `MatGetFactor()`
4719: - ftype - the factorization type to be used
4721: Output Parameter:
4722: . otype - the preferred ordering type
4724: Level: developer
4726: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatOrderingType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4727: @*/
4728: PetscErrorCode MatFactorGetPreferredOrdering(Mat mat, MatFactorType ftype, MatOrderingType *otype)
4729: {
4730: PetscFunctionBegin;
4731: *otype = mat->preferredordering[ftype];
4732: PetscCheck(*otype, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatFactor did not have a preferred ordering");
4733: PetscFunctionReturn(PETSC_SUCCESS);
4734: }
4736: /*@C
4737: MatGetFactor - Returns a matrix suitable to calls to MatXXFactorSymbolic,Numeric()
4739: Collective
4741: Input Parameters:
4742: + mat - the matrix
4743: . type - name of solver type, for example, superlu, petsc (to use PETSc's solver if it is available), if this is 'NULL', then the first result that satisfies
4744: the other criteria is returned
4745: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4747: Output Parameter:
4748: . f - the factor matrix used with MatXXFactorSymbolic,Numeric() calls. Can be `NULL` in some cases, see notes below.
4750: Options Database Keys:
4751: + -pc_factor_mat_solver_type <type> - choose the type at run time. When using `KSP` solvers
4752: - -mat_factor_bind_factorization <host, device> - Where to do matrix factorization? Default is device (might consume more device memory.
4753: One can choose host to save device memory). Currently only supported with `MATSEQAIJCUSPARSE` matrices.
4755: Level: intermediate
4757: Notes:
4758: The return matrix can be `NULL` if the requested factorization is not available, since some combinations of matrix types and factorization
4759: types registered with `MatSolverTypeRegister()` cannot be fully tested if not at runtime.
4761: Users usually access the factorization solvers via `KSP`
4763: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4764: such as pastix, superlu, mumps etc. PETSc must have been ./configure to use the external solver, using the option --download-package or --with-package-dir
4766: When `type` is `NULL` the available results are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4767: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4768: For example if one configuration had --download-mumps while a different one had --download-superlu_dist.
4770: Some of the packages have options for controlling the factorization, these are in the form -prefix_mat_packagename_packageoption
4771: where prefix is normally obtained from the calling `KSP`/`PC`. If `MatGetFactor()` is called directly one can set
4772: call `MatSetOptionsPrefixFactor()` on the originating matrix or `MatSetOptionsPrefix()` on the resulting factor matrix.
4774: Developer Note:
4775: This should actually be called `MatCreateFactor()` since it creates a new factor object
4777: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `KSP`, `MatSolverType`, `MatFactorType`, `MatCopy()`, `MatDuplicate()`,
4778: `MatGetFactorAvailable()`, `MatFactorGetCanUseOrdering()`, `MatSolverTypeRegister()`, `MatSolverTypeGet()`
4779: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatInitializePackage()`
4780: @*/
4781: PetscErrorCode MatGetFactor(Mat mat, MatSolverType type, MatFactorType ftype, Mat *f)
4782: {
4783: PetscBool foundtype, foundmtype;
4784: PetscErrorCode (*conv)(Mat, MatFactorType, Mat *);
4786: PetscFunctionBegin;
4790: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4791: MatCheckPreallocated(mat, 1);
4793: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, &foundtype, &foundmtype, &conv));
4794: if (!foundtype) {
4795: if (type) {
4796: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate solver type %s for factorization type %s and matrix type %s. Perhaps you must ./configure with --download-%s", type, MatFactorTypes[ftype],
4797: ((PetscObject)mat)->type_name, type);
4798: } else {
4799: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate a solver type for factorization type %s and matrix type %s.", MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4800: }
4801: }
4802: PetscCheck(foundmtype, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support matrix type %s", type, ((PetscObject)mat)->type_name);
4803: PetscCheck(conv, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support factorization type %s for matrix type %s", type, MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4805: PetscCall((*conv)(mat, ftype, f));
4806: if (mat->factorprefix) PetscCall(MatSetOptionsPrefix(*f, mat->factorprefix));
4807: PetscFunctionReturn(PETSC_SUCCESS);
4808: }
4810: /*@C
4811: MatGetFactorAvailable - Returns a flag if matrix supports particular type and factor type
4813: Not Collective
4815: Input Parameters:
4816: + mat - the matrix
4817: . type - name of solver type, for example, superlu, petsc (to use PETSc's default)
4818: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4820: Output Parameter:
4821: . flg - PETSC_TRUE if the factorization is available
4823: Level: intermediate
4825: Notes:
4826: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4827: such as pastix, superlu, mumps etc.
4829: PETSc must have been ./configure to use the external solver, using the option --download-package
4831: Developer Note:
4832: This should actually be called `MatCreateFactorAvailable()` since `MatGetFactor()` creates a new factor object
4834: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolverType`, `MatFactorType`, `MatGetFactor()`, `MatCopy()`, `MatDuplicate()`, `MatSolverTypeRegister()`,
4835: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatSolverTypeGet()`
4836: @*/
4837: PetscErrorCode MatGetFactorAvailable(Mat mat, MatSolverType type, MatFactorType ftype, PetscBool *flg)
4838: {
4839: PetscErrorCode (*gconv)(Mat, MatFactorType, Mat *);
4841: PetscFunctionBegin;
4843: PetscAssertPointer(flg, 4);
4845: *flg = PETSC_FALSE;
4846: if (!((PetscObject)mat)->type_name) PetscFunctionReturn(PETSC_SUCCESS);
4848: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4849: MatCheckPreallocated(mat, 1);
4851: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, NULL, NULL, &gconv));
4852: *flg = gconv ? PETSC_TRUE : PETSC_FALSE;
4853: PetscFunctionReturn(PETSC_SUCCESS);
4854: }
4856: /*@
4857: MatDuplicate - Duplicates a matrix including the non-zero structure.
4859: Collective
4861: Input Parameters:
4862: + mat - the matrix
4863: - op - One of `MAT_DO_NOT_COPY_VALUES`, `MAT_COPY_VALUES`, or `MAT_SHARE_NONZERO_PATTERN`.
4864: See the manual page for `MatDuplicateOption()` for an explanation of these options.
4866: Output Parameter:
4867: . M - pointer to place new matrix
4869: Level: intermediate
4871: Notes:
4872: You cannot change the nonzero pattern for the parent or child matrix later if you use `MAT_SHARE_NONZERO_PATTERN`.
4874: If `op` is not `MAT_COPY_VALUES` the numerical values in the new matrix are zeroed.
4876: May be called with an unassembled input `Mat` if `MAT_DO_NOT_COPY_VALUES` is used, in which case the output `Mat` is unassembled as well.
4878: When original mat is a product of matrix operation, e.g., an output of `MatMatMult()` or `MatCreateSubMatrix()`, only the matrix data structure of `mat`
4879: is duplicated and the internal data structures created for the reuse of previous matrix operations are not duplicated.
4880: User should not use `MatDuplicate()` to create new matrix `M` if `M` is intended to be reused as the product of matrix operation.
4882: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatConvert()`, `MatDuplicateOption`
4883: @*/
4884: PetscErrorCode MatDuplicate(Mat mat, MatDuplicateOption op, Mat *M)
4885: {
4886: Mat B;
4887: VecType vtype;
4888: PetscInt i;
4889: PetscObject dm, container_h, container_d;
4890: void (*viewf)(void);
4892: PetscFunctionBegin;
4895: PetscAssertPointer(M, 3);
4896: PetscCheck(op != MAT_COPY_VALUES || mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MAT_COPY_VALUES not allowed for unassembled matrix");
4897: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4898: MatCheckPreallocated(mat, 1);
4900: *M = NULL;
4901: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4902: PetscUseTypeMethod(mat, duplicate, op, M);
4903: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4904: B = *M;
4906: PetscCall(MatGetOperation(mat, MATOP_VIEW, &viewf));
4907: if (viewf) PetscCall(MatSetOperation(B, MATOP_VIEW, viewf));
4908: PetscCall(MatGetVecType(mat, &vtype));
4909: PetscCall(MatSetVecType(B, vtype));
4911: B->stencil.dim = mat->stencil.dim;
4912: B->stencil.noc = mat->stencil.noc;
4913: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4914: B->stencil.dims[i] = mat->stencil.dims[i];
4915: B->stencil.starts[i] = mat->stencil.starts[i];
4916: }
4918: B->nooffproczerorows = mat->nooffproczerorows;
4919: B->nooffprocentries = mat->nooffprocentries;
4921: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_dm", &dm));
4922: if (dm) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_dm", dm));
4923: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Host", &container_h));
4924: if (container_h) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Host", container_h));
4925: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Device", &container_d));
4926: if (container_d) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Device", container_d));
4927: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4928: PetscFunctionReturn(PETSC_SUCCESS);
4929: }
4931: /*@
4932: MatGetDiagonal - Gets the diagonal of a matrix as a `Vec`
4934: Logically Collective
4936: Input Parameter:
4937: . mat - the matrix
4939: Output Parameter:
4940: . v - the diagonal of the matrix
4942: Level: intermediate
4944: Note:
4945: If `mat` has local sizes `n` x `m`, this routine fills the first `ndiag = min(n, m)` entries
4946: of `v` with the diagonal values. Thus `v` must have local size of at least `ndiag`. If `v`
4947: is larger than `ndiag`, the values of the remaining entries are unspecified.
4949: Currently only correct in parallel for square matrices.
4951: .seealso: [](ch_matrices), `Mat`, `Vec`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`
4952: @*/
4953: PetscErrorCode MatGetDiagonal(Mat mat, Vec v)
4954: {
4955: PetscFunctionBegin;
4959: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4960: MatCheckPreallocated(mat, 1);
4961: if (PetscDefined(USE_DEBUG)) {
4962: PetscInt nv, row, col, ndiag;
4964: PetscCall(VecGetLocalSize(v, &nv));
4965: PetscCall(MatGetLocalSize(mat, &row, &col));
4966: ndiag = PetscMin(row, col);
4967: PetscCheck(nv >= ndiag, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming Mat and Vec. Vec local size %" PetscInt_FMT " < Mat local diagonal length %" PetscInt_FMT, nv, ndiag);
4968: }
4970: PetscUseTypeMethod(mat, getdiagonal, v);
4971: PetscCall(PetscObjectStateIncrease((PetscObject)v));
4972: PetscFunctionReturn(PETSC_SUCCESS);
4973: }
4975: /*@C
4976: MatGetRowMin - Gets the minimum value (of the real part) of each
4977: row of the matrix
4979: Logically Collective
4981: Input Parameter:
4982: . mat - the matrix
4984: Output Parameters:
4985: + v - the vector for storing the maximums
4986: - idx - the indices of the column found for each row (optional, pass `NULL` if not needed)
4988: Level: intermediate
4990: Note:
4991: The result of this call are the same as if one converted the matrix to dense format
4992: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
4994: This code is only implemented for a couple of matrix formats.
4996: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`,
4997: `MatGetRowMax()`
4998: @*/
4999: PetscErrorCode MatGetRowMin(Mat mat, Vec v, PetscInt idx[])
5000: {
5001: PetscFunctionBegin;
5005: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5007: if (!mat->cmap->N) {
5008: PetscCall(VecSet(v, PETSC_MAX_REAL));
5009: if (idx) {
5010: PetscInt i, m = mat->rmap->n;
5011: for (i = 0; i < m; i++) idx[i] = -1;
5012: }
5013: } else {
5014: MatCheckPreallocated(mat, 1);
5015: }
5016: PetscUseTypeMethod(mat, getrowmin, v, idx);
5017: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5018: PetscFunctionReturn(PETSC_SUCCESS);
5019: }
5021: /*@C
5022: MatGetRowMinAbs - Gets the minimum value (in absolute value) of each
5023: row of the matrix
5025: Logically Collective
5027: Input Parameter:
5028: . mat - the matrix
5030: Output Parameters:
5031: + v - the vector for storing the minimums
5032: - idx - the indices of the column found for each row (or `NULL` if not needed)
5034: Level: intermediate
5036: Notes:
5037: if a row is completely empty or has only 0.0 values, then the `idx` value for that
5038: row is 0 (the first column).
5040: This code is only implemented for a couple of matrix formats.
5042: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`
5043: @*/
5044: PetscErrorCode MatGetRowMinAbs(Mat mat, Vec v, PetscInt idx[])
5045: {
5046: PetscFunctionBegin;
5050: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5051: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5053: if (!mat->cmap->N) {
5054: PetscCall(VecSet(v, 0.0));
5055: if (idx) {
5056: PetscInt i, m = mat->rmap->n;
5057: for (i = 0; i < m; i++) idx[i] = -1;
5058: }
5059: } else {
5060: MatCheckPreallocated(mat, 1);
5061: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5062: PetscUseTypeMethod(mat, getrowminabs, v, idx);
5063: }
5064: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5065: PetscFunctionReturn(PETSC_SUCCESS);
5066: }
5068: /*@C
5069: MatGetRowMax - Gets the maximum value (of the real part) of each
5070: row of the matrix
5072: Logically Collective
5074: Input Parameter:
5075: . mat - the matrix
5077: Output Parameters:
5078: + v - the vector for storing the maximums
5079: - idx - the indices of the column found for each row (optional, otherwise pass `NULL`)
5081: Level: intermediate
5083: Notes:
5084: The result of this call are the same as if one converted the matrix to dense format
5085: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5087: This code is only implemented for a couple of matrix formats.
5089: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5090: @*/
5091: PetscErrorCode MatGetRowMax(Mat mat, Vec v, PetscInt idx[])
5092: {
5093: PetscFunctionBegin;
5097: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5099: if (!mat->cmap->N) {
5100: PetscCall(VecSet(v, PETSC_MIN_REAL));
5101: if (idx) {
5102: PetscInt i, m = mat->rmap->n;
5103: for (i = 0; i < m; i++) idx[i] = -1;
5104: }
5105: } else {
5106: MatCheckPreallocated(mat, 1);
5107: PetscUseTypeMethod(mat, getrowmax, v, idx);
5108: }
5109: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5110: PetscFunctionReturn(PETSC_SUCCESS);
5111: }
5113: /*@C
5114: MatGetRowMaxAbs - Gets the maximum value (in absolute value) of each
5115: row of the matrix
5117: Logically Collective
5119: Input Parameter:
5120: . mat - the matrix
5122: Output Parameters:
5123: + v - the vector for storing the maximums
5124: - idx - the indices of the column found for each row (or `NULL` if not needed)
5126: Level: intermediate
5128: Notes:
5129: if a row is completely empty or has only 0.0 values, then the `idx` value for that
5130: row is 0 (the first column).
5132: This code is only implemented for a couple of matrix formats.
5134: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowSum()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5135: @*/
5136: PetscErrorCode MatGetRowMaxAbs(Mat mat, Vec v, PetscInt idx[])
5137: {
5138: PetscFunctionBegin;
5142: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5144: if (!mat->cmap->N) {
5145: PetscCall(VecSet(v, 0.0));
5146: if (idx) {
5147: PetscInt i, m = mat->rmap->n;
5148: for (i = 0; i < m; i++) idx[i] = -1;
5149: }
5150: } else {
5151: MatCheckPreallocated(mat, 1);
5152: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5153: PetscUseTypeMethod(mat, getrowmaxabs, v, idx);
5154: }
5155: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5156: PetscFunctionReturn(PETSC_SUCCESS);
5157: }
5159: /*@
5160: MatGetRowSumAbs - Gets the sum value (in absolute value) of each row of the matrix
5162: Logically Collective
5164: Input Parameter:
5165: . mat - the matrix
5167: Output Parameter:
5168: . v - the vector for storing the sum
5170: Level: intermediate
5172: This code is only implemented for a couple of matrix formats.
5174: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5175: @*/
5176: PetscErrorCode MatGetRowSumAbs(Mat mat, Vec v)
5177: {
5178: PetscFunctionBegin;
5182: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5184: if (!mat->cmap->N) {
5185: PetscCall(VecSet(v, 0.0));
5186: } else {
5187: MatCheckPreallocated(mat, 1);
5188: PetscUseTypeMethod(mat, getrowsumabs, v);
5189: }
5190: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5191: PetscFunctionReturn(PETSC_SUCCESS);
5192: }
5194: /*@
5195: MatGetRowSum - Gets the sum of each row of the matrix
5197: Logically or Neighborhood Collective
5199: Input Parameter:
5200: . mat - the matrix
5202: Output Parameter:
5203: . v - the vector for storing the sum of rows
5205: Level: intermediate
5207: Note:
5208: This code is slow since it is not currently specialized for different formats
5210: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`, `MatGetRowSumAbs()`
5211: @*/
5212: PetscErrorCode MatGetRowSum(Mat mat, Vec v)
5213: {
5214: Vec ones;
5216: PetscFunctionBegin;
5220: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5221: MatCheckPreallocated(mat, 1);
5222: PetscCall(MatCreateVecs(mat, &ones, NULL));
5223: PetscCall(VecSet(ones, 1.));
5224: PetscCall(MatMult(mat, ones, v));
5225: PetscCall(VecDestroy(&ones));
5226: PetscFunctionReturn(PETSC_SUCCESS);
5227: }
5229: /*@
5230: MatTransposeSetPrecursor - Set the matrix from which the second matrix will receive numerical transpose data with a call to `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B)
5231: when B was not obtained with `MatTranspose`(A,`MAT_INITIAL_MATRIX`,&B)
5233: Collective
5235: Input Parameter:
5236: . mat - the matrix to provide the transpose
5238: Output Parameter:
5239: . B - the matrix to contain the transpose; it MUST have the nonzero structure of the transpose of A or the code will crash or generate incorrect results
5241: Level: advanced
5243: Note:
5244: Normally the use of `MatTranspose`(A, `MAT_REUSE_MATRIX`, &B) requires that `B` was obtained with a call to `MatTranspose`(A, `MAT_INITIAL_MATRIX`, &B). This
5245: routine allows bypassing that call.
5247: .seealso: [](ch_matrices), `Mat`, `MatTransposeSymbolic()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5248: @*/
5249: PetscErrorCode MatTransposeSetPrecursor(Mat mat, Mat B)
5250: {
5251: PetscContainer rB = NULL;
5252: MatParentState *rb = NULL;
5254: PetscFunctionBegin;
5255: PetscCall(PetscNew(&rb));
5256: rb->id = ((PetscObject)mat)->id;
5257: rb->state = 0;
5258: PetscCall(MatGetNonzeroState(mat, &rb->nonzerostate));
5259: PetscCall(PetscContainerCreate(PetscObjectComm((PetscObject)B), &rB));
5260: PetscCall(PetscContainerSetPointer(rB, rb));
5261: PetscCall(PetscContainerSetUserDestroy(rB, PetscContainerUserDestroyDefault));
5262: PetscCall(PetscObjectCompose((PetscObject)B, "MatTransposeParent", (PetscObject)rB));
5263: PetscCall(PetscObjectDereference((PetscObject)rB));
5264: PetscFunctionReturn(PETSC_SUCCESS);
5265: }
5267: /*@
5268: MatTranspose - Computes an in-place or out-of-place transpose of a matrix.
5270: Collective
5272: Input Parameters:
5273: + mat - the matrix to transpose
5274: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5276: Output Parameter:
5277: . B - the transpose
5279: Level: intermediate
5281: Notes:
5282: If you use `MAT_INPLACE_MATRIX` then you must pass in `&mat` for `B`
5284: `MAT_REUSE_MATRIX` uses the `B` matrix obtained from a previous call to this function with `MAT_INITIAL_MATRIX`. If you already have a matrix to contain the
5285: transpose, call `MatTransposeSetPrecursor(mat, B)` before calling this routine.
5287: If the nonzero structure of mat changed from the previous call to this function with the same matrices an error will be generated for some matrix types.
5289: Consider using `MatCreateTranspose()` instead if you only need a matrix that behaves like the transpose, but don't need the storage to be changed.
5291: If mat is unchanged from the last call this function returns immediately without recomputing the result
5293: If you only need the symbolic transpose, and not the numerical values, use `MatTransposeSymbolic()`
5295: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`,
5296: `MatTransposeSymbolic()`, `MatCreateTranspose()`
5297: @*/
5298: PetscErrorCode MatTranspose(Mat mat, MatReuse reuse, Mat *B)
5299: {
5300: PetscContainer rB = NULL;
5301: MatParentState *rb = NULL;
5303: PetscFunctionBegin;
5306: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5307: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5308: PetscCheck(reuse != MAT_INPLACE_MATRIX || mat == *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires last matrix to match first");
5309: PetscCheck(reuse != MAT_REUSE_MATRIX || mat != *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Perhaps you mean MAT_INPLACE_MATRIX");
5310: MatCheckPreallocated(mat, 1);
5311: if (reuse == MAT_REUSE_MATRIX) {
5312: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5313: PetscCheck(rB, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose(). Suggest MatTransposeSetPrecursor().");
5314: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5315: PetscCheck(rb->id == ((PetscObject)mat)->id, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5316: if (rb->state == ((PetscObject)mat)->state) PetscFunctionReturn(PETSC_SUCCESS);
5317: }
5319: PetscCall(PetscLogEventBegin(MAT_Transpose, mat, 0, 0, 0));
5320: if (reuse != MAT_INPLACE_MATRIX || mat->symmetric != PETSC_BOOL3_TRUE) {
5321: PetscUseTypeMethod(mat, transpose, reuse, B);
5322: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5323: }
5324: PetscCall(PetscLogEventEnd(MAT_Transpose, mat, 0, 0, 0));
5326: if (reuse == MAT_INITIAL_MATRIX) PetscCall(MatTransposeSetPrecursor(mat, *B));
5327: if (reuse != MAT_INPLACE_MATRIX) {
5328: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5329: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5330: rb->state = ((PetscObject)mat)->state;
5331: rb->nonzerostate = mat->nonzerostate;
5332: }
5333: PetscFunctionReturn(PETSC_SUCCESS);
5334: }
5336: /*@
5337: MatTransposeSymbolic - Computes the symbolic part of the transpose of a matrix.
5339: Collective
5341: Input Parameter:
5342: . A - the matrix to transpose
5344: Output Parameter:
5345: . B - the transpose. This is a complete matrix but the numerical portion is invalid. One can call `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B) to compute the
5346: numerical portion.
5348: Level: intermediate
5350: Note:
5351: This is not supported for many matrix types, use `MatTranspose()` in those cases
5353: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5354: @*/
5355: PetscErrorCode MatTransposeSymbolic(Mat A, Mat *B)
5356: {
5357: PetscFunctionBegin;
5360: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5361: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5362: PetscCall(PetscLogEventBegin(MAT_Transpose, A, 0, 0, 0));
5363: PetscUseTypeMethod(A, transposesymbolic, B);
5364: PetscCall(PetscLogEventEnd(MAT_Transpose, A, 0, 0, 0));
5366: PetscCall(MatTransposeSetPrecursor(A, *B));
5367: PetscFunctionReturn(PETSC_SUCCESS);
5368: }
5370: PetscErrorCode MatTransposeCheckNonzeroState_Private(Mat A, Mat B)
5371: {
5372: PetscContainer rB;
5373: MatParentState *rb;
5375: PetscFunctionBegin;
5378: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5379: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5380: PetscCall(PetscObjectQuery((PetscObject)B, "MatTransposeParent", (PetscObject *)&rB));
5381: PetscCheck(rB, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose()");
5382: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5383: PetscCheck(rb->id == ((PetscObject)A)->id, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5384: PetscCheck(rb->nonzerostate == A->nonzerostate, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Reuse matrix has changed nonzero structure");
5385: PetscFunctionReturn(PETSC_SUCCESS);
5386: }
5388: /*@
5389: MatIsTranspose - Test whether a matrix is another one's transpose,
5390: or its own, in which case it tests symmetry.
5392: Collective
5394: Input Parameters:
5395: + A - the matrix to test
5396: . B - the matrix to test against, this can equal the first parameter
5397: - tol - tolerance, differences between entries smaller than this are counted as zero
5399: Output Parameter:
5400: . flg - the result
5402: Level: intermediate
5404: Notes:
5405: The sequential algorithm has a running time of the order of the number of nonzeros; the parallel
5406: test involves parallel copies of the block off-diagonal parts of the matrix.
5408: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`
5409: @*/
5410: PetscErrorCode MatIsTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5411: {
5412: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5414: PetscFunctionBegin;
5417: PetscAssertPointer(flg, 4);
5418: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsTranspose_C", &f));
5419: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsTranspose_C", &g));
5420: *flg = PETSC_FALSE;
5421: if (f && g) {
5422: PetscCheck(f == g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for symmetry test");
5423: PetscCall((*f)(A, B, tol, flg));
5424: } else {
5425: MatType mattype;
5427: PetscCall(MatGetType(f ? B : A, &mattype));
5428: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Matrix of type %s does not support checking for transpose", mattype);
5429: }
5430: PetscFunctionReturn(PETSC_SUCCESS);
5431: }
5433: /*@
5434: MatHermitianTranspose - Computes an in-place or out-of-place Hermitian transpose of a matrix in complex conjugate.
5436: Collective
5438: Input Parameters:
5439: + mat - the matrix to transpose and complex conjugate
5440: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5442: Output Parameter:
5443: . B - the Hermitian transpose
5445: Level: intermediate
5447: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`
5448: @*/
5449: PetscErrorCode MatHermitianTranspose(Mat mat, MatReuse reuse, Mat *B)
5450: {
5451: PetscFunctionBegin;
5452: PetscCall(MatTranspose(mat, reuse, B));
5453: #if defined(PETSC_USE_COMPLEX)
5454: PetscCall(MatConjugate(*B));
5455: #endif
5456: PetscFunctionReturn(PETSC_SUCCESS);
5457: }
5459: /*@
5460: MatIsHermitianTranspose - Test whether a matrix is another one's Hermitian transpose,
5462: Collective
5464: Input Parameters:
5465: + A - the matrix to test
5466: . B - the matrix to test against, this can equal the first parameter
5467: - tol - tolerance, differences between entries smaller than this are counted as zero
5469: Output Parameter:
5470: . flg - the result
5472: Level: intermediate
5474: Notes:
5475: Only available for `MATAIJ` matrices.
5477: The sequential algorithm
5478: has a running time of the order of the number of nonzeros; the parallel
5479: test involves parallel copies of the block off-diagonal parts of the matrix.
5481: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsTranspose()`
5482: @*/
5483: PetscErrorCode MatIsHermitianTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5484: {
5485: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5487: PetscFunctionBegin;
5490: PetscAssertPointer(flg, 4);
5491: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsHermitianTranspose_C", &f));
5492: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsHermitianTranspose_C", &g));
5493: if (f && g) {
5494: PetscCheck(f != g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for Hermitian test");
5495: PetscCall((*f)(A, B, tol, flg));
5496: }
5497: PetscFunctionReturn(PETSC_SUCCESS);
5498: }
5500: /*@
5501: MatPermute - Creates a new matrix with rows and columns permuted from the
5502: original.
5504: Collective
5506: Input Parameters:
5507: + mat - the matrix to permute
5508: . row - row permutation, each processor supplies only the permutation for its rows
5509: - col - column permutation, each processor supplies only the permutation for its columns
5511: Output Parameter:
5512: . B - the permuted matrix
5514: Level: advanced
5516: Note:
5517: The index sets map from row/col of permuted matrix to row/col of original matrix.
5518: The index sets should be on the same communicator as mat and have the same local sizes.
5520: Developer Note:
5521: If you want to implement `MatPermute()` for a matrix type, and your approach doesn't
5522: exploit the fact that row and col are permutations, consider implementing the
5523: more general `MatCreateSubMatrix()` instead.
5525: .seealso: [](ch_matrices), `Mat`, `MatGetOrdering()`, `ISAllGather()`, `MatCreateSubMatrix()`
5526: @*/
5527: PetscErrorCode MatPermute(Mat mat, IS row, IS col, Mat *B)
5528: {
5529: PetscFunctionBegin;
5534: PetscAssertPointer(B, 4);
5535: PetscCheckSameComm(mat, 1, row, 2);
5536: if (row != col) PetscCheckSameComm(row, 2, col, 3);
5537: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5538: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5539: PetscCheck(mat->ops->permute || mat->ops->createsubmatrix, PETSC_COMM_SELF, PETSC_ERR_SUP, "MatPermute not available for Mat type %s", ((PetscObject)mat)->type_name);
5540: MatCheckPreallocated(mat, 1);
5542: if (mat->ops->permute) {
5543: PetscUseTypeMethod(mat, permute, row, col, B);
5544: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5545: } else {
5546: PetscCall(MatCreateSubMatrix(mat, row, col, MAT_INITIAL_MATRIX, B));
5547: }
5548: PetscFunctionReturn(PETSC_SUCCESS);
5549: }
5551: /*@
5552: MatEqual - Compares two matrices.
5554: Collective
5556: Input Parameters:
5557: + A - the first matrix
5558: - B - the second matrix
5560: Output Parameter:
5561: . flg - `PETSC_TRUE` if the matrices are equal; `PETSC_FALSE` otherwise.
5563: Level: intermediate
5565: .seealso: [](ch_matrices), `Mat`
5566: @*/
5567: PetscErrorCode MatEqual(Mat A, Mat B, PetscBool *flg)
5568: {
5569: PetscFunctionBegin;
5574: PetscAssertPointer(flg, 3);
5575: PetscCheckSameComm(A, 1, B, 2);
5576: MatCheckPreallocated(A, 1);
5577: MatCheckPreallocated(B, 2);
5578: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5579: PetscCheck(B->assembled, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5580: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N, A->cmap->N,
5581: B->cmap->N);
5582: if (A->ops->equal && A->ops->equal == B->ops->equal) {
5583: PetscUseTypeMethod(A, equal, B, flg);
5584: } else {
5585: PetscCall(MatMultEqual(A, B, 10, flg));
5586: }
5587: PetscFunctionReturn(PETSC_SUCCESS);
5588: }
5590: /*@
5591: MatDiagonalScale - Scales a matrix on the left and right by diagonal
5592: matrices that are stored as vectors. Either of the two scaling
5593: matrices can be `NULL`.
5595: Collective
5597: Input Parameters:
5598: + mat - the matrix to be scaled
5599: . l - the left scaling vector (or `NULL`)
5600: - r - the right scaling vector (or `NULL`)
5602: Level: intermediate
5604: Note:
5605: `MatDiagonalScale()` computes $A = LAR$, where
5606: L = a diagonal matrix (stored as a vector), R = a diagonal matrix (stored as a vector)
5607: The L scales the rows of the matrix, the R scales the columns of the matrix.
5609: .seealso: [](ch_matrices), `Mat`, `MatScale()`, `MatShift()`, `MatDiagonalSet()`
5610: @*/
5611: PetscErrorCode MatDiagonalScale(Mat mat, Vec l, Vec r)
5612: {
5613: PetscFunctionBegin;
5616: if (l) {
5618: PetscCheckSameComm(mat, 1, l, 2);
5619: }
5620: if (r) {
5622: PetscCheckSameComm(mat, 1, r, 3);
5623: }
5624: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5625: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5626: MatCheckPreallocated(mat, 1);
5627: if (!l && !r) PetscFunctionReturn(PETSC_SUCCESS);
5629: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5630: PetscUseTypeMethod(mat, diagonalscale, l, r);
5631: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5632: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5633: if (l != r) mat->symmetric = PETSC_BOOL3_FALSE;
5634: PetscFunctionReturn(PETSC_SUCCESS);
5635: }
5637: /*@
5638: MatScale - Scales all elements of a matrix by a given number.
5640: Logically Collective
5642: Input Parameters:
5643: + mat - the matrix to be scaled
5644: - a - the scaling value
5646: Level: intermediate
5648: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
5649: @*/
5650: PetscErrorCode MatScale(Mat mat, PetscScalar a)
5651: {
5652: PetscFunctionBegin;
5655: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5656: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5658: MatCheckPreallocated(mat, 1);
5660: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5661: if (a != (PetscScalar)1.0) {
5662: PetscUseTypeMethod(mat, scale, a);
5663: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5664: }
5665: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5666: PetscFunctionReturn(PETSC_SUCCESS);
5667: }
5669: /*@
5670: MatNorm - Calculates various norms of a matrix.
5672: Collective
5674: Input Parameters:
5675: + mat - the matrix
5676: - type - the type of norm, `NORM_1`, `NORM_FROBENIUS`, `NORM_INFINITY`
5678: Output Parameter:
5679: . nrm - the resulting norm
5681: Level: intermediate
5683: .seealso: [](ch_matrices), `Mat`
5684: @*/
5685: PetscErrorCode MatNorm(Mat mat, NormType type, PetscReal *nrm)
5686: {
5687: PetscFunctionBegin;
5690: PetscAssertPointer(nrm, 3);
5692: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5693: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5694: MatCheckPreallocated(mat, 1);
5696: PetscUseTypeMethod(mat, norm, type, nrm);
5697: PetscFunctionReturn(PETSC_SUCCESS);
5698: }
5700: /*
5701: This variable is used to prevent counting of MatAssemblyBegin() that
5702: are called from within a MatAssemblyEnd().
5703: */
5704: static PetscInt MatAssemblyEnd_InUse = 0;
5705: /*@
5706: MatAssemblyBegin - Begins assembling the matrix. This routine should
5707: be called after completing all calls to `MatSetValues()`.
5709: Collective
5711: Input Parameters:
5712: + mat - the matrix
5713: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5715: Level: beginner
5717: Notes:
5718: `MatSetValues()` generally caches the values that belong to other MPI processes. The matrix is ready to
5719: use only after `MatAssemblyBegin()` and `MatAssemblyEnd()` have been called.
5721: Use `MAT_FLUSH_ASSEMBLY` when switching between `ADD_VALUES` and `INSERT_VALUES`
5722: in `MatSetValues()`; use `MAT_FINAL_ASSEMBLY` for the final assembly before
5723: using the matrix.
5725: ALL processes that share a matrix MUST call `MatAssemblyBegin()` and `MatAssemblyEnd()` the SAME NUMBER of times, and each time with the
5726: same flag of `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY` for all processes. Thus you CANNOT locally change from `ADD_VALUES` to `INSERT_VALUES`, that is
5727: a global collective operation requiring all processes that share the matrix.
5729: Space for preallocated nonzeros that is not filled by a call to `MatSetValues()` or a related routine are compressed
5730: out by assembly. If you intend to use that extra space on a subsequent assembly, be sure to insert explicit zeros
5731: before `MAT_FINAL_ASSEMBLY` so the space is not compressed out.
5733: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssembled()`
5734: @*/
5735: PetscErrorCode MatAssemblyBegin(Mat mat, MatAssemblyType type)
5736: {
5737: PetscFunctionBegin;
5740: MatCheckPreallocated(mat, 1);
5741: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix. Did you forget to call MatSetUnfactored()?");
5742: if (mat->assembled) {
5743: mat->was_assembled = PETSC_TRUE;
5744: mat->assembled = PETSC_FALSE;
5745: }
5747: if (!MatAssemblyEnd_InUse) {
5748: PetscCall(PetscLogEventBegin(MAT_AssemblyBegin, mat, 0, 0, 0));
5749: PetscTryTypeMethod(mat, assemblybegin, type);
5750: PetscCall(PetscLogEventEnd(MAT_AssemblyBegin, mat, 0, 0, 0));
5751: } else PetscTryTypeMethod(mat, assemblybegin, type);
5752: PetscFunctionReturn(PETSC_SUCCESS);
5753: }
5755: /*@
5756: MatAssembled - Indicates if a matrix has been assembled and is ready for
5757: use; for example, in matrix-vector product.
5759: Not Collective
5761: Input Parameter:
5762: . mat - the matrix
5764: Output Parameter:
5765: . assembled - `PETSC_TRUE` or `PETSC_FALSE`
5767: Level: advanced
5769: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssemblyBegin()`
5770: @*/
5771: PetscErrorCode MatAssembled(Mat mat, PetscBool *assembled)
5772: {
5773: PetscFunctionBegin;
5775: PetscAssertPointer(assembled, 2);
5776: *assembled = mat->assembled;
5777: PetscFunctionReturn(PETSC_SUCCESS);
5778: }
5780: /*@
5781: MatAssemblyEnd - Completes assembling the matrix. This routine should
5782: be called after `MatAssemblyBegin()`.
5784: Collective
5786: Input Parameters:
5787: + mat - the matrix
5788: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5790: Options Database Keys:
5791: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
5792: . -mat_view ::ascii_info_detail - Prints more detailed info
5793: . -mat_view - Prints matrix in ASCII format
5794: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
5795: . -mat_view draw - draws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
5796: . -display <name> - Sets display name (default is host)
5797: . -draw_pause <sec> - Sets number of seconds to pause after display
5798: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (See [Using MATLAB with PETSc](ch_matlab))
5799: . -viewer_socket_machine <machine> - Machine to use for socket
5800: . -viewer_socket_port <port> - Port number to use for socket
5801: - -mat_view binary:filename[:append] - Save matrix to file in binary format
5803: Level: beginner
5805: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatSetValues()`, `PetscDrawOpenX()`, `PetscDrawCreate()`, `MatView()`, `MatAssembled()`, `PetscViewerSocketOpen()`
5806: @*/
5807: PetscErrorCode MatAssemblyEnd(Mat mat, MatAssemblyType type)
5808: {
5809: static PetscInt inassm = 0;
5810: PetscBool flg = PETSC_FALSE;
5812: PetscFunctionBegin;
5816: inassm++;
5817: MatAssemblyEnd_InUse++;
5818: if (MatAssemblyEnd_InUse == 1) { /* Do the logging only the first time through */
5819: PetscCall(PetscLogEventBegin(MAT_AssemblyEnd, mat, 0, 0, 0));
5820: PetscTryTypeMethod(mat, assemblyend, type);
5821: PetscCall(PetscLogEventEnd(MAT_AssemblyEnd, mat, 0, 0, 0));
5822: } else PetscTryTypeMethod(mat, assemblyend, type);
5824: /* Flush assembly is not a true assembly */
5825: if (type != MAT_FLUSH_ASSEMBLY) {
5826: if (mat->num_ass) {
5827: if (!mat->symmetry_eternal) {
5828: mat->symmetric = PETSC_BOOL3_UNKNOWN;
5829: mat->hermitian = PETSC_BOOL3_UNKNOWN;
5830: }
5831: if (!mat->structural_symmetry_eternal && mat->ass_nonzerostate != mat->nonzerostate) mat->structurally_symmetric = PETSC_BOOL3_UNKNOWN;
5832: if (!mat->spd_eternal) mat->spd = PETSC_BOOL3_UNKNOWN;
5833: }
5834: mat->num_ass++;
5835: mat->assembled = PETSC_TRUE;
5836: mat->ass_nonzerostate = mat->nonzerostate;
5837: }
5839: mat->insertmode = NOT_SET_VALUES;
5840: MatAssemblyEnd_InUse--;
5841: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5842: if (inassm == 1 && type != MAT_FLUSH_ASSEMBLY) {
5843: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
5845: if (mat->checksymmetryonassembly) {
5846: PetscCall(MatIsSymmetric(mat, mat->checksymmetrytol, &flg));
5847: if (flg) {
5848: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5849: } else {
5850: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is not symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5851: }
5852: }
5853: if (mat->nullsp && mat->checknullspaceonassembly) PetscCall(MatNullSpaceTest(mat->nullsp, mat, NULL));
5854: }
5855: inassm--;
5856: PetscFunctionReturn(PETSC_SUCCESS);
5857: }
5859: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
5860: /*@
5861: MatSetOption - Sets a parameter option for a matrix. Some options
5862: may be specific to certain storage formats. Some options
5863: determine how values will be inserted (or added). Sorted,
5864: row-oriented input will generally assemble the fastest. The default
5865: is row-oriented.
5867: Logically Collective for certain operations, such as `MAT_SPD`, not collective for `MAT_ROW_ORIENTED`, see `MatOption`
5869: Input Parameters:
5870: + mat - the matrix
5871: . op - the option, one of those listed below (and possibly others),
5872: - flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
5874: Options Describing Matrix Structure:
5875: + `MAT_SPD` - symmetric positive definite
5876: . `MAT_SYMMETRIC` - symmetric in terms of both structure and value
5877: . `MAT_HERMITIAN` - transpose is the complex conjugation
5878: . `MAT_STRUCTURALLY_SYMMETRIC` - symmetric nonzero structure
5879: . `MAT_SYMMETRY_ETERNAL` - indicates the symmetry (or Hermitian structure) or its absence will persist through any changes to the matrix
5880: . `MAT_STRUCTURAL_SYMMETRY_ETERNAL` - indicates the structural symmetry or its absence will persist through any changes to the matrix
5881: . `MAT_SPD_ETERNAL` - indicates the value of `MAT_SPD` (true or false) will persist through any changes to the matrix
5883: These are not really options of the matrix, they are knowledge about the structure of the matrix that users may provide so that they
5884: do not need to be computed (usually at a high cost)
5886: Options For Use with `MatSetValues()`:
5887: Insert a logically dense subblock, which can be
5888: . `MAT_ROW_ORIENTED` - row-oriented (default)
5890: These options reflect the data you pass in with `MatSetValues()`; it has
5891: nothing to do with how the data is stored internally in the matrix
5892: data structure.
5894: When (re)assembling a matrix, we can restrict the input for
5895: efficiency/debugging purposes. These options include
5896: . `MAT_NEW_NONZERO_LOCATIONS` - additional insertions will be allowed if they generate a new nonzero (slow)
5897: . `MAT_FORCE_DIAGONAL_ENTRIES` - forces diagonal entries to be allocated
5898: . `MAT_IGNORE_OFF_PROC_ENTRIES` - drops off-processor entries
5899: . `MAT_NEW_NONZERO_LOCATION_ERR` - generates an error for new matrix entry
5900: . `MAT_USE_HASH_TABLE` - uses a hash table to speed up matrix assembly
5901: . `MAT_NO_OFF_PROC_ENTRIES` - you know each process will only set values for its own rows, will generate an error if
5902: any process sets values for another process. This avoids all reductions in the MatAssembly routines and thus improves
5903: performance for very large process counts.
5904: - `MAT_SUBSET_OFF_PROC_ENTRIES` - you know that the first assembly after setting this flag will set a superset
5905: of the off-process entries required for all subsequent assemblies. This avoids a rendezvous step in the MatAssembly
5906: functions, instead sending only neighbor messages.
5908: Level: intermediate
5910: Notes:
5911: Except for `MAT_UNUSED_NONZERO_LOCATION_ERR` and `MAT_ROW_ORIENTED` all processes that share the matrix must pass the same value in flg!
5913: Some options are relevant only for particular matrix types and
5914: are thus ignored by others. Other options are not supported by
5915: certain matrix types and will generate an error message if set.
5917: If using Fortran to compute a matrix, one may need to
5918: use the column-oriented option (or convert to the row-oriented
5919: format).
5921: `MAT_NEW_NONZERO_LOCATIONS` set to `PETSC_FALSE` indicates that any add or insertion
5922: that would generate a new entry in the nonzero structure is instead
5923: ignored. Thus, if memory has not already been allocated for this particular
5924: data, then the insertion is ignored. For dense matrices, in which
5925: the entire array is allocated, no entries are ever ignored.
5926: Set after the first `MatAssemblyEnd()`. If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
5928: `MAT_NEW_NONZERO_LOCATION_ERR` set to PETSC_TRUE indicates that any add or insertion
5929: that would generate a new entry in the nonzero structure instead produces
5930: an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats only.) If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
5932: `MAT_NEW_NONZERO_ALLOCATION_ERR` set to `PETSC_TRUE` indicates that any add or insertion
5933: that would generate a new entry that has not been preallocated will
5934: instead produce an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats
5935: only.) This is a useful flag when debugging matrix memory preallocation.
5936: If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
5938: `MAT_IGNORE_OFF_PROC_ENTRIES` set to `PETSC_TRUE` indicates entries destined for
5939: other processors should be dropped, rather than stashed.
5940: This is useful if you know that the "owning" processor is also
5941: always generating the correct matrix entries, so that PETSc need
5942: not transfer duplicate entries generated on another processor.
5944: `MAT_USE_HASH_TABLE` indicates that a hash table be used to improve the
5945: searches during matrix assembly. When this flag is set, the hash table
5946: is created during the first matrix assembly. This hash table is
5947: used the next time through, during `MatSetValues()`/`MatSetValuesBlocked()`
5948: to improve the searching of indices. `MAT_NEW_NONZERO_LOCATIONS` flag
5949: should be used with `MAT_USE_HASH_TABLE` flag. This option is currently
5950: supported by `MATMPIBAIJ` format only.
5952: `MAT_KEEP_NONZERO_PATTERN` indicates when `MatZeroRows()` is called the zeroed entries
5953: are kept in the nonzero structure. This flag is not used for `MatZeroRowsColumns()`
5955: `MAT_IGNORE_ZERO_ENTRIES` - for `MATAIJ` and `MATIS` matrices this will stop zero values from creating
5956: a zero location in the matrix
5958: `MAT_USE_INODES` - indicates using inode version of the code - works with `MATAIJ` matrix types
5960: `MAT_NO_OFF_PROC_ZERO_ROWS` - you know each process will only zero its own rows. This avoids all reductions in the
5961: zero row routines and thus improves performance for very large process counts.
5963: `MAT_IGNORE_LOWER_TRIANGULAR` - For `MATSBAIJ` matrices will ignore any insertions you make in the lower triangular
5964: part of the matrix (since they should match the upper triangular part).
5966: `MAT_SORTED_FULL` - each process provides exactly its local rows; all column indices for a given row are passed in a
5967: single call to `MatSetValues()`, preallocation is perfect, row-oriented, `INSERT_VALUES` is used. Common
5968: with finite difference schemes with non-periodic boundary conditions.
5970: Developer Note:
5971: `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, and `MAT_SPD_ETERNAL` are used by `MatAssemblyEnd()` and in other
5972: places where otherwise the value of `MAT_SYMMETRIC`, `MAT_STRUCTURALLY_SYMMETRIC` or `MAT_SPD` would need to be changed back
5973: to `PETSC_BOOL3_UNKNOWN` because the matrix values had changed so the code cannot be certain that the related property had
5974: not changed.
5976: .seealso: [](ch_matrices), `MatOption`, `Mat`, `MatGetOption()`
5977: @*/
5978: PetscErrorCode MatSetOption(Mat mat, MatOption op, PetscBool flg)
5979: {
5980: PetscFunctionBegin;
5982: if (op > 0) {
5985: }
5987: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
5989: switch (op) {
5990: case MAT_FORCE_DIAGONAL_ENTRIES:
5991: mat->force_diagonals = flg;
5992: PetscFunctionReturn(PETSC_SUCCESS);
5993: case MAT_NO_OFF_PROC_ENTRIES:
5994: mat->nooffprocentries = flg;
5995: PetscFunctionReturn(PETSC_SUCCESS);
5996: case MAT_SUBSET_OFF_PROC_ENTRIES:
5997: mat->assembly_subset = flg;
5998: if (!mat->assembly_subset) { /* See the same logic in VecAssembly wrt VEC_SUBSET_OFF_PROC_ENTRIES */
5999: #if !defined(PETSC_HAVE_MPIUNI)
6000: PetscCall(MatStashScatterDestroy_BTS(&mat->stash));
6001: #endif
6002: mat->stash.first_assembly_done = PETSC_FALSE;
6003: }
6004: PetscFunctionReturn(PETSC_SUCCESS);
6005: case MAT_NO_OFF_PROC_ZERO_ROWS:
6006: mat->nooffproczerorows = flg;
6007: PetscFunctionReturn(PETSC_SUCCESS);
6008: case MAT_SPD:
6009: if (flg) {
6010: mat->spd = PETSC_BOOL3_TRUE;
6011: mat->symmetric = PETSC_BOOL3_TRUE;
6012: mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6013: } else {
6014: mat->spd = PETSC_BOOL3_FALSE;
6015: }
6016: break;
6017: case MAT_SYMMETRIC:
6018: mat->symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6019: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6020: #if !defined(PETSC_USE_COMPLEX)
6021: mat->hermitian = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6022: #endif
6023: break;
6024: case MAT_HERMITIAN:
6025: mat->hermitian = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6026: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6027: #if !defined(PETSC_USE_COMPLEX)
6028: mat->symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6029: #endif
6030: break;
6031: case MAT_STRUCTURALLY_SYMMETRIC:
6032: mat->structurally_symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6033: break;
6034: case MAT_SYMMETRY_ETERNAL:
6035: PetscCheck(mat->symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SYMMETRY_ETERNAL without first setting MAT_SYMMETRIC to true or false");
6036: mat->symmetry_eternal = flg;
6037: if (flg) mat->structural_symmetry_eternal = PETSC_TRUE;
6038: break;
6039: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6040: PetscCheck(mat->structurally_symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_STRUCTURAL_SYMMETRY_ETERNAL without first setting MAT_STRUCTURALLY_SYMMETRIC to true or false");
6041: mat->structural_symmetry_eternal = flg;
6042: break;
6043: case MAT_SPD_ETERNAL:
6044: PetscCheck(mat->spd != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SPD_ETERNAL without first setting MAT_SPD to true or false");
6045: mat->spd_eternal = flg;
6046: if (flg) {
6047: mat->structural_symmetry_eternal = PETSC_TRUE;
6048: mat->symmetry_eternal = PETSC_TRUE;
6049: }
6050: break;
6051: case MAT_STRUCTURE_ONLY:
6052: mat->structure_only = flg;
6053: break;
6054: case MAT_SORTED_FULL:
6055: mat->sortedfull = flg;
6056: break;
6057: default:
6058: break;
6059: }
6060: PetscTryTypeMethod(mat, setoption, op, flg);
6061: PetscFunctionReturn(PETSC_SUCCESS);
6062: }
6064: /*@
6065: MatGetOption - Gets a parameter option that has been set for a matrix.
6067: Logically Collective
6069: Input Parameters:
6070: + mat - the matrix
6071: - op - the option, this only responds to certain options, check the code for which ones
6073: Output Parameter:
6074: . flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
6076: Level: intermediate
6078: Notes:
6079: Can only be called after `MatSetSizes()` and `MatSetType()` have been set.
6081: Certain option values may be unknown, for those use the routines `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, or
6082: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6084: .seealso: [](ch_matrices), `Mat`, `MatOption`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`,
6085: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6086: @*/
6087: PetscErrorCode MatGetOption(Mat mat, MatOption op, PetscBool *flg)
6088: {
6089: PetscFunctionBegin;
6093: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6094: PetscCheck(((PetscObject)mat)->type_name, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_TYPENOTSET, "Cannot get options until type and size have been set, see MatSetType() and MatSetSizes()");
6096: switch (op) {
6097: case MAT_NO_OFF_PROC_ENTRIES:
6098: *flg = mat->nooffprocentries;
6099: break;
6100: case MAT_NO_OFF_PROC_ZERO_ROWS:
6101: *flg = mat->nooffproczerorows;
6102: break;
6103: case MAT_SYMMETRIC:
6104: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSymmetric() or MatIsSymmetricKnown()");
6105: break;
6106: case MAT_HERMITIAN:
6107: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsHermitian() or MatIsHermitianKnown()");
6108: break;
6109: case MAT_STRUCTURALLY_SYMMETRIC:
6110: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsStructurallySymmetric() or MatIsStructurallySymmetricKnown()");
6111: break;
6112: case MAT_SPD:
6113: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSPDKnown()");
6114: break;
6115: case MAT_SYMMETRY_ETERNAL:
6116: *flg = mat->symmetry_eternal;
6117: break;
6118: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6119: *flg = mat->symmetry_eternal;
6120: break;
6121: default:
6122: break;
6123: }
6124: PetscFunctionReturn(PETSC_SUCCESS);
6125: }
6127: /*@
6128: MatZeroEntries - Zeros all entries of a matrix. For sparse matrices
6129: this routine retains the old nonzero structure.
6131: Logically Collective
6133: Input Parameter:
6134: . mat - the matrix
6136: Level: intermediate
6138: Note:
6139: If the matrix was not preallocated then a default, likely poor preallocation will be set in the matrix, so this should be called after the preallocation phase.
6140: See the Performance chapter of the users manual for information on preallocating matrices.
6142: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`
6143: @*/
6144: PetscErrorCode MatZeroEntries(Mat mat)
6145: {
6146: PetscFunctionBegin;
6149: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6150: PetscCheck(mat->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for matrices where you have set values but not yet assembled");
6151: MatCheckPreallocated(mat, 1);
6153: PetscCall(PetscLogEventBegin(MAT_ZeroEntries, mat, 0, 0, 0));
6154: PetscUseTypeMethod(mat, zeroentries);
6155: PetscCall(PetscLogEventEnd(MAT_ZeroEntries, mat, 0, 0, 0));
6156: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6157: PetscFunctionReturn(PETSC_SUCCESS);
6158: }
6160: /*@
6161: MatZeroRowsColumns - Zeros all entries (except possibly the main diagonal)
6162: of a set of rows and columns of a matrix.
6164: Collective
6166: Input Parameters:
6167: + mat - the matrix
6168: . numRows - the number of rows/columns to zero
6169: . rows - the global row indices
6170: . diag - value put in the diagonal of the eliminated rows
6171: . x - optional vector of the solution for zeroed rows (other entries in vector are not used), these must be set before this call
6172: - b - optional vector of the right-hand side, that will be adjusted by provided solution entries
6174: Level: intermediate
6176: Notes:
6177: This routine, along with `MatZeroRows()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6179: For each zeroed row, the value of the corresponding `b` is set to diag times the value of the corresponding `x`.
6180: The other entries of `b` will be adjusted by the known values of `x` times the corresponding matrix entries in the columns that are being eliminated
6182: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6183: Krylov method to take advantage of the known solution on the zeroed rows.
6185: For the parallel case, all processes that share the matrix (i.e.,
6186: those in the communicator used for matrix creation) MUST call this
6187: routine, regardless of whether any rows being zeroed are owned by
6188: them.
6190: Unlike `MatZeroRows()`, this ignores the `MAT_KEEP_NONZERO_PATTERN` option value set with `MatSetOption()`, it merely zeros those entries in the matrix, but never
6191: removes them from the nonzero pattern. The nonzero pattern of the matrix can still change if a nonzero needs to be inserted on a diagonal entry that was previously
6192: missing.
6194: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6195: list only rows local to itself).
6197: The option `MAT_NO_OFF_PROC_ZERO_ROWS` does not apply to this routine.
6199: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRows()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6200: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6201: @*/
6202: PetscErrorCode MatZeroRowsColumns(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6203: {
6204: PetscFunctionBegin;
6207: if (numRows) PetscAssertPointer(rows, 3);
6208: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6209: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6210: MatCheckPreallocated(mat, 1);
6212: PetscUseTypeMethod(mat, zerorowscolumns, numRows, rows, diag, x, b);
6213: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6214: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6215: PetscFunctionReturn(PETSC_SUCCESS);
6216: }
6218: /*@
6219: MatZeroRowsColumnsIS - Zeros all entries (except possibly the main diagonal)
6220: of a set of rows and columns of a matrix.
6222: Collective
6224: Input Parameters:
6225: + mat - the matrix
6226: . is - the rows to zero
6227: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6228: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6229: - b - optional vector of right-hand side, that will be adjusted by provided solution
6231: Level: intermediate
6233: Note:
6234: See `MatZeroRowsColumns()` for details on how this routine operates.
6236: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6237: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRows()`, `MatZeroRowsColumnsStencil()`
6238: @*/
6239: PetscErrorCode MatZeroRowsColumnsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6240: {
6241: PetscInt numRows;
6242: const PetscInt *rows;
6244: PetscFunctionBegin;
6249: PetscCall(ISGetLocalSize(is, &numRows));
6250: PetscCall(ISGetIndices(is, &rows));
6251: PetscCall(MatZeroRowsColumns(mat, numRows, rows, diag, x, b));
6252: PetscCall(ISRestoreIndices(is, &rows));
6253: PetscFunctionReturn(PETSC_SUCCESS);
6254: }
6256: /*@
6257: MatZeroRows - Zeros all entries (except possibly the main diagonal)
6258: of a set of rows of a matrix.
6260: Collective
6262: Input Parameters:
6263: + mat - the matrix
6264: . numRows - the number of rows to zero
6265: . rows - the global row indices
6266: . diag - value put in the diagonal of the zeroed rows
6267: . x - optional vector of solutions for zeroed rows (other entries in vector are not used), these must be set before this call
6268: - b - optional vector of right-hand side, that will be adjusted by provided solution entries
6270: Level: intermediate
6272: Notes:
6273: This routine, along with `MatZeroRowsColumns()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6275: For each zeroed row, the value of the corresponding `b` is set to `diag` times the value of the corresponding `x`.
6277: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6278: Krylov method to take advantage of the known solution on the zeroed rows.
6280: May be followed by using a `PC` of type `PCREDISTRIBUTE` to solve the reduced problem (`PCDISTRIBUTE` completely eliminates the zeroed rows and their corresponding columns)
6281: from the matrix.
6283: Unlike `MatZeroRowsColumns()` for the `MATAIJ` and `MATBAIJ` matrix formats this removes the old nonzero structure, from the eliminated rows of the matrix
6284: but does not release memory. Because of this removal matrix-vector products with the adjusted matrix will be a bit faster. For the dense and block diagonal
6285: formats this does not alter the nonzero structure.
6287: If the option `MatSetOption`(mat,`MAT_KEEP_NONZERO_PATTERN`,`PETSC_TRUE`) the nonzero structure
6288: of the matrix is not changed the values are
6289: merely zeroed.
6291: The user can set a value in the diagonal entry (or for the `MATAIJ` format
6292: formats can optionally remove the main diagonal entry from the
6293: nonzero structure as well, by passing 0.0 as the final argument).
6295: For the parallel case, all processes that share the matrix (i.e.,
6296: those in the communicator used for matrix creation) MUST call this
6297: routine, regardless of whether any rows being zeroed are owned by
6298: them.
6300: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6301: list only rows local to itself).
6303: You can call `MatSetOption`(mat,`MAT_NO_OFF_PROC_ZERO_ROWS`,`PETSC_TRUE`) if each process indicates only rows it
6304: owns that are to be zeroed. This saves a global synchronization in the implementation.
6306: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6307: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `PCREDISTRIBUTE`, `MAT_KEEP_NONZERO_PATTERN`
6308: @*/
6309: PetscErrorCode MatZeroRows(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6310: {
6311: PetscFunctionBegin;
6314: if (numRows) PetscAssertPointer(rows, 3);
6315: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6316: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6317: MatCheckPreallocated(mat, 1);
6319: PetscUseTypeMethod(mat, zerorows, numRows, rows, diag, x, b);
6320: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6321: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6322: PetscFunctionReturn(PETSC_SUCCESS);
6323: }
6325: /*@
6326: MatZeroRowsIS - Zeros all entries (except possibly the main diagonal)
6327: of a set of rows of a matrix.
6329: Collective
6331: Input Parameters:
6332: + mat - the matrix
6333: . is - index set of rows to remove (if `NULL` then no row is removed)
6334: . diag - value put in all diagonals of eliminated rows
6335: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6336: - b - optional vector of right-hand side, that will be adjusted by provided solution
6338: Level: intermediate
6340: Note:
6341: See `MatZeroRows()` for details on how this routine operates.
6343: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6344: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6345: @*/
6346: PetscErrorCode MatZeroRowsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6347: {
6348: PetscInt numRows = 0;
6349: const PetscInt *rows = NULL;
6351: PetscFunctionBegin;
6354: if (is) {
6356: PetscCall(ISGetLocalSize(is, &numRows));
6357: PetscCall(ISGetIndices(is, &rows));
6358: }
6359: PetscCall(MatZeroRows(mat, numRows, rows, diag, x, b));
6360: if (is) PetscCall(ISRestoreIndices(is, &rows));
6361: PetscFunctionReturn(PETSC_SUCCESS);
6362: }
6364: /*@
6365: MatZeroRowsStencil - Zeros all entries (except possibly the main diagonal)
6366: of a set of rows of a matrix. These rows must be local to the process.
6368: Collective
6370: Input Parameters:
6371: + mat - the matrix
6372: . numRows - the number of rows to remove
6373: . rows - the grid coordinates (and component number when dof > 1) for matrix rows
6374: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6375: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6376: - b - optional vector of right-hand side, that will be adjusted by provided solution
6378: Level: intermediate
6380: Notes:
6381: See `MatZeroRows()` for details on how this routine operates.
6383: The grid coordinates are across the entire grid, not just the local portion
6385: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6386: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6387: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6388: `DM_BOUNDARY_PERIODIC` boundary type.
6390: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
6391: a single value per point) you can skip filling those indices.
6393: Fortran Note:
6394: `idxm` and `idxn` should be declared as
6395: $ MatStencil idxm(4, m)
6396: and the values inserted using
6397: .vb
6398: idxm(MatStencil_i, 1) = i
6399: idxm(MatStencil_j, 1) = j
6400: idxm(MatStencil_k, 1) = k
6401: idxm(MatStencil_c, 1) = c
6402: etc
6403: .ve
6405: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRows()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6406: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6407: @*/
6408: PetscErrorCode MatZeroRowsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6409: {
6410: PetscInt dim = mat->stencil.dim;
6411: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6412: PetscInt *dims = mat->stencil.dims + 1;
6413: PetscInt *starts = mat->stencil.starts;
6414: PetscInt *dxm = (PetscInt *)rows;
6415: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6417: PetscFunctionBegin;
6420: if (numRows) PetscAssertPointer(rows, 3);
6422: PetscCall(PetscMalloc1(numRows, &jdxm));
6423: for (i = 0; i < numRows; ++i) {
6424: /* Skip unused dimensions (they are ordered k, j, i, c) */
6425: for (j = 0; j < 3 - sdim; ++j) dxm++;
6426: /* Local index in X dir */
6427: tmp = *dxm++ - starts[0];
6428: /* Loop over remaining dimensions */
6429: for (j = 0; j < dim - 1; ++j) {
6430: /* If nonlocal, set index to be negative */
6431: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_MIN_INT;
6432: /* Update local index */
6433: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6434: }
6435: /* Skip component slot if necessary */
6436: if (mat->stencil.noc) dxm++;
6437: /* Local row number */
6438: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6439: }
6440: PetscCall(MatZeroRowsLocal(mat, numNewRows, jdxm, diag, x, b));
6441: PetscCall(PetscFree(jdxm));
6442: PetscFunctionReturn(PETSC_SUCCESS);
6443: }
6445: /*@
6446: MatZeroRowsColumnsStencil - Zeros all row and column entries (except possibly the main diagonal)
6447: of a set of rows and columns of a matrix.
6449: Collective
6451: Input Parameters:
6452: + mat - the matrix
6453: . numRows - the number of rows/columns to remove
6454: . rows - the grid coordinates (and component number when dof > 1) for matrix rows
6455: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6456: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6457: - b - optional vector of right-hand side, that will be adjusted by provided solution
6459: Level: intermediate
6461: Notes:
6462: See `MatZeroRowsColumns()` for details on how this routine operates.
6464: The grid coordinates are across the entire grid, not just the local portion
6466: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6467: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6468: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6469: `DM_BOUNDARY_PERIODIC` boundary type.
6471: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
6472: a single value per point) you can skip filling those indices.
6474: Fortran Note:
6475: `idxm` and `idxn` should be declared as
6476: $ MatStencil idxm(4, m)
6477: and the values inserted using
6478: .vb
6479: idxm(MatStencil_i, 1) = i
6480: idxm(MatStencil_j, 1) = j
6481: idxm(MatStencil_k, 1) = k
6482: idxm(MatStencil_c, 1) = c
6483: etc
6484: .ve
6486: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6487: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRows()`
6488: @*/
6489: PetscErrorCode MatZeroRowsColumnsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6490: {
6491: PetscInt dim = mat->stencil.dim;
6492: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6493: PetscInt *dims = mat->stencil.dims + 1;
6494: PetscInt *starts = mat->stencil.starts;
6495: PetscInt *dxm = (PetscInt *)rows;
6496: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6498: PetscFunctionBegin;
6501: if (numRows) PetscAssertPointer(rows, 3);
6503: PetscCall(PetscMalloc1(numRows, &jdxm));
6504: for (i = 0; i < numRows; ++i) {
6505: /* Skip unused dimensions (they are ordered k, j, i, c) */
6506: for (j = 0; j < 3 - sdim; ++j) dxm++;
6507: /* Local index in X dir */
6508: tmp = *dxm++ - starts[0];
6509: /* Loop over remaining dimensions */
6510: for (j = 0; j < dim - 1; ++j) {
6511: /* If nonlocal, set index to be negative */
6512: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_MIN_INT;
6513: /* Update local index */
6514: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6515: }
6516: /* Skip component slot if necessary */
6517: if (mat->stencil.noc) dxm++;
6518: /* Local row number */
6519: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6520: }
6521: PetscCall(MatZeroRowsColumnsLocal(mat, numNewRows, jdxm, diag, x, b));
6522: PetscCall(PetscFree(jdxm));
6523: PetscFunctionReturn(PETSC_SUCCESS);
6524: }
6526: /*@C
6527: MatZeroRowsLocal - Zeros all entries (except possibly the main diagonal)
6528: of a set of rows of a matrix; using local numbering of rows.
6530: Collective
6532: Input Parameters:
6533: + mat - the matrix
6534: . numRows - the number of rows to remove
6535: . rows - the local row indices
6536: . diag - value put in all diagonals of eliminated rows
6537: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6538: - b - optional vector of right-hand side, that will be adjusted by provided solution
6540: Level: intermediate
6542: Notes:
6543: Before calling `MatZeroRowsLocal()`, the user must first set the
6544: local-to-global mapping by calling MatSetLocalToGlobalMapping(), this is often already set for matrices obtained with `DMCreateMatrix()`.
6546: See `MatZeroRows()` for details on how this routine operates.
6548: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRows()`, `MatSetOption()`,
6549: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6550: @*/
6551: PetscErrorCode MatZeroRowsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6552: {
6553: PetscFunctionBegin;
6556: if (numRows) PetscAssertPointer(rows, 3);
6557: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6558: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6559: MatCheckPreallocated(mat, 1);
6561: if (mat->ops->zerorowslocal) {
6562: PetscUseTypeMethod(mat, zerorowslocal, numRows, rows, diag, x, b);
6563: } else {
6564: IS is, newis;
6565: const PetscInt *newRows;
6567: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6568: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_COPY_VALUES, &is));
6569: PetscCall(ISLocalToGlobalMappingApplyIS(mat->rmap->mapping, is, &newis));
6570: PetscCall(ISGetIndices(newis, &newRows));
6571: PetscUseTypeMethod(mat, zerorows, numRows, newRows, diag, x, b);
6572: PetscCall(ISRestoreIndices(newis, &newRows));
6573: PetscCall(ISDestroy(&newis));
6574: PetscCall(ISDestroy(&is));
6575: }
6576: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6577: PetscFunctionReturn(PETSC_SUCCESS);
6578: }
6580: /*@
6581: MatZeroRowsLocalIS - Zeros all entries (except possibly the main diagonal)
6582: of a set of rows of a matrix; using local numbering of rows.
6584: Collective
6586: Input Parameters:
6587: + mat - the matrix
6588: . is - index set of rows to remove
6589: . diag - value put in all diagonals of eliminated rows
6590: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6591: - b - optional vector of right-hand side, that will be adjusted by provided solution
6593: Level: intermediate
6595: Notes:
6596: Before calling `MatZeroRowsLocalIS()`, the user must first set the
6597: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6599: See `MatZeroRows()` for details on how this routine operates.
6601: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRows()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6602: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6603: @*/
6604: PetscErrorCode MatZeroRowsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6605: {
6606: PetscInt numRows;
6607: const PetscInt *rows;
6609: PetscFunctionBegin;
6613: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6614: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6615: MatCheckPreallocated(mat, 1);
6617: PetscCall(ISGetLocalSize(is, &numRows));
6618: PetscCall(ISGetIndices(is, &rows));
6619: PetscCall(MatZeroRowsLocal(mat, numRows, rows, diag, x, b));
6620: PetscCall(ISRestoreIndices(is, &rows));
6621: PetscFunctionReturn(PETSC_SUCCESS);
6622: }
6624: /*@
6625: MatZeroRowsColumnsLocal - Zeros all entries (except possibly the main diagonal)
6626: of a set of rows and columns of a matrix; using local numbering of rows.
6628: Collective
6630: Input Parameters:
6631: + mat - the matrix
6632: . numRows - the number of rows to remove
6633: . rows - the global row indices
6634: . diag - value put in all diagonals of eliminated rows
6635: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6636: - b - optional vector of right-hand side, that will be adjusted by provided solution
6638: Level: intermediate
6640: Notes:
6641: Before calling `MatZeroRowsColumnsLocal()`, the user must first set the
6642: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6644: See `MatZeroRowsColumns()` for details on how this routine operates.
6646: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6647: `MatZeroRows()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6648: @*/
6649: PetscErrorCode MatZeroRowsColumnsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6650: {
6651: IS is, newis;
6652: const PetscInt *newRows;
6654: PetscFunctionBegin;
6657: if (numRows) PetscAssertPointer(rows, 3);
6658: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6659: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6660: MatCheckPreallocated(mat, 1);
6662: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6663: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_COPY_VALUES, &is));
6664: PetscCall(ISLocalToGlobalMappingApplyIS(mat->cmap->mapping, is, &newis));
6665: PetscCall(ISGetIndices(newis, &newRows));
6666: PetscUseTypeMethod(mat, zerorowscolumns, numRows, newRows, diag, x, b);
6667: PetscCall(ISRestoreIndices(newis, &newRows));
6668: PetscCall(ISDestroy(&newis));
6669: PetscCall(ISDestroy(&is));
6670: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6671: PetscFunctionReturn(PETSC_SUCCESS);
6672: }
6674: /*@
6675: MatZeroRowsColumnsLocalIS - Zeros all entries (except possibly the main diagonal)
6676: of a set of rows and columns of a matrix; using local numbering of rows.
6678: Collective
6680: Input Parameters:
6681: + mat - the matrix
6682: . is - index set of rows to remove
6683: . diag - value put in all diagonals of eliminated rows
6684: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6685: - b - optional vector of right-hand side, that will be adjusted by provided solution
6687: Level: intermediate
6689: Notes:
6690: Before calling `MatZeroRowsColumnsLocalIS()`, the user must first set the
6691: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6693: See `MatZeroRowsColumns()` for details on how this routine operates.
6695: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6696: `MatZeroRowsColumnsLocal()`, `MatZeroRows()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6697: @*/
6698: PetscErrorCode MatZeroRowsColumnsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6699: {
6700: PetscInt numRows;
6701: const PetscInt *rows;
6703: PetscFunctionBegin;
6707: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6708: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6709: MatCheckPreallocated(mat, 1);
6711: PetscCall(ISGetLocalSize(is, &numRows));
6712: PetscCall(ISGetIndices(is, &rows));
6713: PetscCall(MatZeroRowsColumnsLocal(mat, numRows, rows, diag, x, b));
6714: PetscCall(ISRestoreIndices(is, &rows));
6715: PetscFunctionReturn(PETSC_SUCCESS);
6716: }
6718: /*@C
6719: MatGetSize - Returns the numbers of rows and columns in a matrix.
6721: Not Collective
6723: Input Parameter:
6724: . mat - the matrix
6726: Output Parameters:
6727: + m - the number of global rows
6728: - n - the number of global columns
6730: Level: beginner
6732: Note:
6733: Both output parameters can be `NULL` on input.
6735: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetLocalSize()`
6736: @*/
6737: PetscErrorCode MatGetSize(Mat mat, PetscInt *m, PetscInt *n)
6738: {
6739: PetscFunctionBegin;
6741: if (m) *m = mat->rmap->N;
6742: if (n) *n = mat->cmap->N;
6743: PetscFunctionReturn(PETSC_SUCCESS);
6744: }
6746: /*@C
6747: MatGetLocalSize - For most matrix formats, excluding `MATELEMENTAL` and `MATSCALAPACK`, Returns the number of local rows and local columns
6748: of a matrix. For all matrices this is the local size of the left and right vectors as returned by `MatCreateVecs()`.
6750: Not Collective
6752: Input Parameter:
6753: . mat - the matrix
6755: Output Parameters:
6756: + m - the number of local rows, use `NULL` to not obtain this value
6757: - n - the number of local columns, use `NULL` to not obtain this value
6759: Level: beginner
6761: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetSize()`
6762: @*/
6763: PetscErrorCode MatGetLocalSize(Mat mat, PetscInt *m, PetscInt *n)
6764: {
6765: PetscFunctionBegin;
6767: if (m) PetscAssertPointer(m, 2);
6768: if (n) PetscAssertPointer(n, 3);
6769: if (m) *m = mat->rmap->n;
6770: if (n) *n = mat->cmap->n;
6771: PetscFunctionReturn(PETSC_SUCCESS);
6772: }
6774: /*@C
6775: MatGetOwnershipRangeColumn - Returns the range of matrix columns associated with rows of a
6776: vector one multiplies this matrix by that are owned by this processor.
6778: Not Collective, unless matrix has not been allocated, then collective
6780: Input Parameter:
6781: . mat - the matrix
6783: Output Parameters:
6784: + m - the global index of the first local column, use `NULL` to not obtain this value
6785: - n - one more than the global index of the last local column, use `NULL` to not obtain this value
6787: Level: developer
6789: Notes:
6790: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6792: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6793: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6795: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6796: the local values in the matrix.
6798: Returns the columns of the "diagonal block" for most sparse matrix formats. See [Matrix
6799: Layouts](sec_matlayout) for details on matrix layouts.
6801: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`,
6802: `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM`
6803: @*/
6804: PetscErrorCode MatGetOwnershipRangeColumn(Mat mat, PetscInt *m, PetscInt *n)
6805: {
6806: PetscFunctionBegin;
6809: if (m) PetscAssertPointer(m, 2);
6810: if (n) PetscAssertPointer(n, 3);
6811: MatCheckPreallocated(mat, 1);
6812: if (m) *m = mat->cmap->rstart;
6813: if (n) *n = mat->cmap->rend;
6814: PetscFunctionReturn(PETSC_SUCCESS);
6815: }
6817: /*@C
6818: MatGetOwnershipRange - For matrices that own values by row, excludes `MATELEMENTAL` and `MATSCALAPACK`, returns the range of matrix rows owned by
6819: this MPI process.
6821: Not Collective
6823: Input Parameter:
6824: . mat - the matrix
6826: Output Parameters:
6827: + m - the global index of the first local row, use `NULL` to not obtain this value
6828: - n - one more than the global index of the last local row, use `NULL` to not obtain this value
6830: Level: beginner
6832: Notes:
6833: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6835: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6836: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6838: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6839: the local values in the matrix.
6841: The high argument is one more than the last element stored locally.
6843: For all matrices it returns the range of matrix rows associated with rows of a vector that
6844: would contain the result of a matrix vector product with this matrix. See [Matrix
6845: Layouts](sec_matlayout) for details on matrix layouts.
6847: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscSplitOwnership()`,
6848: `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM`
6849: @*/
6850: PetscErrorCode MatGetOwnershipRange(Mat mat, PetscInt *m, PetscInt *n)
6851: {
6852: PetscFunctionBegin;
6855: if (m) PetscAssertPointer(m, 2);
6856: if (n) PetscAssertPointer(n, 3);
6857: MatCheckPreallocated(mat, 1);
6858: if (m) *m = mat->rmap->rstart;
6859: if (n) *n = mat->rmap->rend;
6860: PetscFunctionReturn(PETSC_SUCCESS);
6861: }
6863: /*@C
6864: MatGetOwnershipRanges - For matrices that own values by row, excludes `MATELEMENTAL` and
6865: `MATSCALAPACK`, returns the range of matrix rows owned by each process.
6867: Not Collective, unless matrix has not been allocated
6869: Input Parameter:
6870: . mat - the matrix
6872: Output Parameter:
6873: . ranges - start of each processors portion plus one more than the total length at the end
6875: Level: beginner
6877: Notes:
6878: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6880: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6881: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6883: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6884: the local values in the matrix.
6886: For all matrices it returns the ranges of matrix rows associated with rows of a vector that
6887: would contain the result of a matrix vector product with this matrix. See [Matrix
6888: Layouts](sec_matlayout) for details on matrix layouts.
6890: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`,
6891: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `MatSetSizes()`, `MatCreateAIJ()`,
6892: `DMDAGetGhostCorners()`, `DM`
6893: @*/
6894: PetscErrorCode MatGetOwnershipRanges(Mat mat, const PetscInt **ranges)
6895: {
6896: PetscFunctionBegin;
6899: MatCheckPreallocated(mat, 1);
6900: PetscCall(PetscLayoutGetRanges(mat->rmap, ranges));
6901: PetscFunctionReturn(PETSC_SUCCESS);
6902: }
6904: /*@C
6905: MatGetOwnershipRangesColumn - Returns the ranges of matrix columns associated with rows of a
6906: vector one multiplies this vector by that are owned by each processor.
6908: Not Collective, unless matrix has not been allocated
6910: Input Parameter:
6911: . mat - the matrix
6913: Output Parameter:
6914: . ranges - start of each processors portion plus one more than the total length at the end
6916: Level: beginner
6918: Notes:
6919: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6921: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6922: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6924: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6925: the local values in the matrix.
6927: Returns the columns of the "diagonal blocks", for most sparse matrix formats. See [Matrix
6928: Layouts](sec_matlayout) for details on matrix layouts.
6930: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRanges()`,
6931: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`,
6932: `DMDAGetGhostCorners()`, `DM`
6933: @*/
6934: PetscErrorCode MatGetOwnershipRangesColumn(Mat mat, const PetscInt **ranges)
6935: {
6936: PetscFunctionBegin;
6939: MatCheckPreallocated(mat, 1);
6940: PetscCall(PetscLayoutGetRanges(mat->cmap, ranges));
6941: PetscFunctionReturn(PETSC_SUCCESS);
6942: }
6944: /*@C
6945: MatGetOwnershipIS - Get row and column ownership of a matrices' values as index sets.
6947: Not Collective
6949: Input Parameter:
6950: . A - matrix
6952: Output Parameters:
6953: + rows - rows in which this process owns elements, , use `NULL` to not obtain this value
6954: - cols - columns in which this process owns elements, use `NULL` to not obtain this value
6956: Level: intermediate
6958: Note:
6959: For most matrices, excluding `MATELEMENTAL` and `MATSCALAPACK`, this corresponds to values
6960: returned by `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`. For `MATELEMENTAL` and
6961: `MATSCALAPACK` the ownership is more complicated. See [Matrix Layouts](sec_matlayout) for
6962: details on matrix layouts.
6964: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRanges()`, `MatSetValues()`, `MATELEMENTAL`, `MATSCALAPACK`
6965: @*/
6966: PetscErrorCode MatGetOwnershipIS(Mat A, IS *rows, IS *cols)
6967: {
6968: PetscErrorCode (*f)(Mat, IS *, IS *);
6970: PetscFunctionBegin;
6971: MatCheckPreallocated(A, 1);
6972: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatGetOwnershipIS_C", &f));
6973: if (f) {
6974: PetscCall((*f)(A, rows, cols));
6975: } else { /* Create a standard row-based partition, each process is responsible for ALL columns in their row block */
6976: if (rows) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->rmap->n, A->rmap->rstart, 1, rows));
6977: if (cols) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->cmap->N, 0, 1, cols));
6978: }
6979: PetscFunctionReturn(PETSC_SUCCESS);
6980: }
6982: /*@C
6983: MatILUFactorSymbolic - Performs symbolic ILU factorization of a matrix obtained with `MatGetFactor()`
6984: Uses levels of fill only, not drop tolerance. Use `MatLUFactorNumeric()`
6985: to complete the factorization.
6987: Collective
6989: Input Parameters:
6990: + fact - the factorized matrix obtained with `MatGetFactor()`
6991: . mat - the matrix
6992: . row - row permutation
6993: . col - column permutation
6994: - info - structure containing
6995: .vb
6996: levels - number of levels of fill.
6997: expected fill - as ratio of original fill.
6998: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
6999: missing diagonal entries)
7000: .ve
7002: Level: developer
7004: Notes:
7005: See [Matrix Factorization](sec_matfactor) for additional information.
7007: Most users should employ the `KSP` interface for linear solvers
7008: instead of working directly with matrix algebra routines such as this.
7009: See, e.g., `KSPCreate()`.
7011: Uses the definition of level of fill as in Y. Saad, {cite}`saad2003`
7013: Developer Note:
7014: The Fortran interface is not autogenerated as the
7015: interface definition cannot be generated correctly [due to `MatFactorInfo`]
7017: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
7018: `MatGetOrdering()`, `MatFactorInfo`
7019: @*/
7020: PetscErrorCode MatILUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
7021: {
7022: PetscFunctionBegin;
7027: PetscAssertPointer(info, 5);
7028: PetscAssertPointer(fact, 1);
7029: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels of fill negative %" PetscInt_FMT, (PetscInt)info->levels);
7030: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7031: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7032: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7033: MatCheckPreallocated(mat, 2);
7035: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ILUFactorSymbolic, mat, row, col, 0));
7036: PetscUseTypeMethod(fact, ilufactorsymbolic, mat, row, col, info);
7037: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ILUFactorSymbolic, mat, row, col, 0));
7038: PetscFunctionReturn(PETSC_SUCCESS);
7039: }
7041: /*@C
7042: MatICCFactorSymbolic - Performs symbolic incomplete
7043: Cholesky factorization for a symmetric matrix. Use
7044: `MatCholeskyFactorNumeric()` to complete the factorization.
7046: Collective
7048: Input Parameters:
7049: + fact - the factorized matrix obtained with `MatGetFactor()`
7050: . mat - the matrix to be factored
7051: . perm - row and column permutation
7052: - info - structure containing
7053: .vb
7054: levels - number of levels of fill.
7055: expected fill - as ratio of original fill.
7056: .ve
7058: Level: developer
7060: Notes:
7061: Most users should employ the `KSP` interface for linear solvers
7062: instead of working directly with matrix algebra routines such as this.
7063: See, e.g., `KSPCreate()`.
7065: This uses the definition of level of fill as in Y. Saad {cite}`saad2003`
7067: Developer Note:
7068: The Fortran interface is not autogenerated as the
7069: interface definition cannot be generated correctly [due to `MatFactorInfo`]
7071: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
7072: @*/
7073: PetscErrorCode MatICCFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
7074: {
7075: PetscFunctionBegin;
7079: PetscAssertPointer(info, 4);
7080: PetscAssertPointer(fact, 1);
7081: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7082: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels negative %" PetscInt_FMT, (PetscInt)info->levels);
7083: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7084: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7085: MatCheckPreallocated(mat, 2);
7087: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7088: PetscUseTypeMethod(fact, iccfactorsymbolic, mat, perm, info);
7089: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7090: PetscFunctionReturn(PETSC_SUCCESS);
7091: }
7093: /*@C
7094: MatCreateSubMatrices - Extracts several submatrices from a matrix. If submat
7095: points to an array of valid matrices, they may be reused to store the new
7096: submatrices.
7098: Collective
7100: Input Parameters:
7101: + mat - the matrix
7102: . n - the number of submatrixes to be extracted (on this processor, may be zero)
7103: . irow - index set of rows to extract
7104: . icol - index set of columns to extract
7105: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7107: Output Parameter:
7108: . submat - the array of submatrices
7110: Level: advanced
7112: Notes:
7113: `MatCreateSubMatrices()` can extract ONLY sequential submatrices
7114: (from both sequential and parallel matrices). Use `MatCreateSubMatrix()`
7115: to extract a parallel submatrix.
7117: Some matrix types place restrictions on the row and column
7118: indices, such as that they be sorted or that they be equal to each other.
7120: The index sets may not have duplicate entries.
7122: When extracting submatrices from a parallel matrix, each processor can
7123: form a different submatrix by setting the rows and columns of its
7124: individual index sets according to the local submatrix desired.
7126: When finished using the submatrices, the user should destroy
7127: them with `MatDestroySubMatrices()`.
7129: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
7130: original matrix has not changed from that last call to `MatCreateSubMatrices()`.
7132: This routine creates the matrices in submat; you should NOT create them before
7133: calling it. It also allocates the array of matrix pointers submat.
7135: For `MATBAIJ` matrices the index sets must respect the block structure, that is if they
7136: request one row/column in a block, they must request all rows/columns that are in
7137: that block. For example, if the block size is 2 you cannot request just row 0 and
7138: column 0.
7140: Fortran Note:
7141: One must pass in as `submat` a `Mat` array of size at least `n`+1.
7143: .seealso: [](ch_matrices), `Mat`, `MatDestroySubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7144: @*/
7145: PetscErrorCode MatCreateSubMatrices(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7146: {
7147: PetscInt i;
7148: PetscBool eq;
7150: PetscFunctionBegin;
7153: if (n) {
7154: PetscAssertPointer(irow, 3);
7156: PetscAssertPointer(icol, 4);
7158: }
7159: PetscAssertPointer(submat, 6);
7160: if (n && scall == MAT_REUSE_MATRIX) {
7161: PetscAssertPointer(*submat, 6);
7163: }
7164: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7165: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7166: MatCheckPreallocated(mat, 1);
7167: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7168: PetscUseTypeMethod(mat, createsubmatrices, n, irow, icol, scall, submat);
7169: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7170: for (i = 0; i < n; i++) {
7171: (*submat)[i]->factortype = MAT_FACTOR_NONE; /* in case in place factorization was previously done on submatrix */
7172: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7173: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7174: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
7175: if (mat->boundtocpu && mat->bindingpropagates) {
7176: PetscCall(MatBindToCPU((*submat)[i], PETSC_TRUE));
7177: PetscCall(MatSetBindingPropagates((*submat)[i], PETSC_TRUE));
7178: }
7179: #endif
7180: }
7181: PetscFunctionReturn(PETSC_SUCCESS);
7182: }
7184: /*@C
7185: MatCreateSubMatricesMPI - Extracts MPI submatrices across a sub communicator of mat (by pairs of `IS` that may live on subcomms).
7187: Collective
7189: Input Parameters:
7190: + mat - the matrix
7191: . n - the number of submatrixes to be extracted
7192: . irow - index set of rows to extract
7193: . icol - index set of columns to extract
7194: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7196: Output Parameter:
7197: . submat - the array of submatrices
7199: Level: advanced
7201: Note:
7202: This is used by `PCGASM`
7204: .seealso: [](ch_matrices), `Mat`, `PCGASM`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7205: @*/
7206: PetscErrorCode MatCreateSubMatricesMPI(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7207: {
7208: PetscInt i;
7209: PetscBool eq;
7211: PetscFunctionBegin;
7214: if (n) {
7215: PetscAssertPointer(irow, 3);
7217: PetscAssertPointer(icol, 4);
7219: }
7220: PetscAssertPointer(submat, 6);
7221: if (n && scall == MAT_REUSE_MATRIX) {
7222: PetscAssertPointer(*submat, 6);
7224: }
7225: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7226: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7227: MatCheckPreallocated(mat, 1);
7229: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7230: PetscUseTypeMethod(mat, createsubmatricesmpi, n, irow, icol, scall, submat);
7231: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7232: for (i = 0; i < n; i++) {
7233: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7234: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7235: }
7236: PetscFunctionReturn(PETSC_SUCCESS);
7237: }
7239: /*@C
7240: MatDestroyMatrices - Destroys an array of matrices.
7242: Collective
7244: Input Parameters:
7245: + n - the number of local matrices
7246: - mat - the matrices (this is a pointer to the array of matrices)
7248: Level: advanced
7250: Note:
7251: Frees not only the matrices, but also the array that contains the matrices
7253: Fortran Note:
7254: Does not free the `mat` array.
7256: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()` `MatDestroySubMatrices()`
7257: @*/
7258: PetscErrorCode MatDestroyMatrices(PetscInt n, Mat *mat[])
7259: {
7260: PetscInt i;
7262: PetscFunctionBegin;
7263: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7264: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7265: PetscAssertPointer(mat, 2);
7267: for (i = 0; i < n; i++) PetscCall(MatDestroy(&(*mat)[i]));
7269: /* memory is allocated even if n = 0 */
7270: PetscCall(PetscFree(*mat));
7271: PetscFunctionReturn(PETSC_SUCCESS);
7272: }
7274: /*@C
7275: MatDestroySubMatrices - Destroys a set of matrices obtained with `MatCreateSubMatrices()`.
7277: Collective
7279: Input Parameters:
7280: + n - the number of local matrices
7281: - mat - the matrices (this is a pointer to the array of matrices, just to match the calling
7282: sequence of `MatCreateSubMatrices()`)
7284: Level: advanced
7286: Note:
7287: Frees not only the matrices, but also the array that contains the matrices
7289: Fortran Note:
7290: Does not free the `mat` array.
7292: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7293: @*/
7294: PetscErrorCode MatDestroySubMatrices(PetscInt n, Mat *mat[])
7295: {
7296: Mat mat0;
7298: PetscFunctionBegin;
7299: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7300: /* mat[] is an array of length n+1, see MatCreateSubMatrices_xxx() */
7301: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7302: PetscAssertPointer(mat, 2);
7304: mat0 = (*mat)[0];
7305: if (mat0 && mat0->ops->destroysubmatrices) {
7306: PetscCall((*mat0->ops->destroysubmatrices)(n, mat));
7307: } else {
7308: PetscCall(MatDestroyMatrices(n, mat));
7309: }
7310: PetscFunctionReturn(PETSC_SUCCESS);
7311: }
7313: /*@C
7314: MatGetSeqNonzeroStructure - Extracts the nonzero structure from a matrix and stores it, in its entirety, on each process
7316: Collective
7318: Input Parameter:
7319: . mat - the matrix
7321: Output Parameter:
7322: . matstruct - the sequential matrix with the nonzero structure of `mat`
7324: Level: developer
7326: .seealso: [](ch_matrices), `Mat`, `MatDestroySeqNonzeroStructure()`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7327: @*/
7328: PetscErrorCode MatGetSeqNonzeroStructure(Mat mat, Mat *matstruct)
7329: {
7330: PetscFunctionBegin;
7332: PetscAssertPointer(matstruct, 2);
7335: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7336: MatCheckPreallocated(mat, 1);
7338: PetscCall(PetscLogEventBegin(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7339: PetscUseTypeMethod(mat, getseqnonzerostructure, matstruct);
7340: PetscCall(PetscLogEventEnd(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7341: PetscFunctionReturn(PETSC_SUCCESS);
7342: }
7344: /*@C
7345: MatDestroySeqNonzeroStructure - Destroys matrix obtained with `MatGetSeqNonzeroStructure()`.
7347: Collective
7349: Input Parameter:
7350: . mat - the matrix
7352: Level: advanced
7354: Note:
7355: This is not needed, one can just call `MatDestroy()`
7357: .seealso: [](ch_matrices), `Mat`, `MatGetSeqNonzeroStructure()`
7358: @*/
7359: PetscErrorCode MatDestroySeqNonzeroStructure(Mat *mat)
7360: {
7361: PetscFunctionBegin;
7362: PetscAssertPointer(mat, 1);
7363: PetscCall(MatDestroy(mat));
7364: PetscFunctionReturn(PETSC_SUCCESS);
7365: }
7367: /*@
7368: MatIncreaseOverlap - Given a set of submatrices indicated by index sets,
7369: replaces the index sets by larger ones that represent submatrices with
7370: additional overlap.
7372: Collective
7374: Input Parameters:
7375: + mat - the matrix
7376: . n - the number of index sets
7377: . is - the array of index sets (these index sets will changed during the call)
7378: - ov - the additional overlap requested
7380: Options Database Key:
7381: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7383: Level: developer
7385: Note:
7386: The computed overlap preserves the matrix block sizes when the blocks are square.
7387: That is: if a matrix nonzero for a given block would increase the overlap all columns associated with
7388: that block are included in the overlap regardless of whether each specific column would increase the overlap.
7390: .seealso: [](ch_matrices), `Mat`, `PCASM`, `MatSetBlockSize()`, `MatIncreaseOverlapSplit()`, `MatCreateSubMatrices()`
7391: @*/
7392: PetscErrorCode MatIncreaseOverlap(Mat mat, PetscInt n, IS is[], PetscInt ov)
7393: {
7394: PetscInt i, bs, cbs;
7396: PetscFunctionBegin;
7400: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7401: if (n) {
7402: PetscAssertPointer(is, 3);
7404: }
7405: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7406: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7407: MatCheckPreallocated(mat, 1);
7409: if (!ov || !n) PetscFunctionReturn(PETSC_SUCCESS);
7410: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7411: PetscUseTypeMethod(mat, increaseoverlap, n, is, ov);
7412: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7413: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
7414: if (bs == cbs) {
7415: for (i = 0; i < n; i++) PetscCall(ISSetBlockSize(is[i], bs));
7416: }
7417: PetscFunctionReturn(PETSC_SUCCESS);
7418: }
7420: PetscErrorCode MatIncreaseOverlapSplit_Single(Mat, IS *, PetscInt);
7422: /*@
7423: MatIncreaseOverlapSplit - Given a set of submatrices indicated by index sets across
7424: a sub communicator, replaces the index sets by larger ones that represent submatrices with
7425: additional overlap.
7427: Collective
7429: Input Parameters:
7430: + mat - the matrix
7431: . n - the number of index sets
7432: . is - the array of index sets (these index sets will changed during the call)
7433: - ov - the additional overlap requested
7435: ` Options Database Key:
7436: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7438: Level: developer
7440: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatIncreaseOverlap()`
7441: @*/
7442: PetscErrorCode MatIncreaseOverlapSplit(Mat mat, PetscInt n, IS is[], PetscInt ov)
7443: {
7444: PetscInt i;
7446: PetscFunctionBegin;
7449: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7450: if (n) {
7451: PetscAssertPointer(is, 3);
7453: }
7454: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7455: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7456: MatCheckPreallocated(mat, 1);
7457: if (!ov) PetscFunctionReturn(PETSC_SUCCESS);
7458: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7459: for (i = 0; i < n; i++) PetscCall(MatIncreaseOverlapSplit_Single(mat, &is[i], ov));
7460: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7461: PetscFunctionReturn(PETSC_SUCCESS);
7462: }
7464: /*@
7465: MatGetBlockSize - Returns the matrix block size.
7467: Not Collective
7469: Input Parameter:
7470: . mat - the matrix
7472: Output Parameter:
7473: . bs - block size
7475: Level: intermediate
7477: Notes:
7478: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7480: If the block size has not been set yet this routine returns 1.
7482: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSizes()`
7483: @*/
7484: PetscErrorCode MatGetBlockSize(Mat mat, PetscInt *bs)
7485: {
7486: PetscFunctionBegin;
7488: PetscAssertPointer(bs, 2);
7489: *bs = PetscAbs(mat->rmap->bs);
7490: PetscFunctionReturn(PETSC_SUCCESS);
7491: }
7493: /*@
7494: MatGetBlockSizes - Returns the matrix block row and column sizes.
7496: Not Collective
7498: Input Parameter:
7499: . mat - the matrix
7501: Output Parameters:
7502: + rbs - row block size
7503: - cbs - column block size
7505: Level: intermediate
7507: Notes:
7508: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7509: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7511: If a block size has not been set yet this routine returns 1.
7513: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatSetBlockSizes()`
7514: @*/
7515: PetscErrorCode MatGetBlockSizes(Mat mat, PetscInt *rbs, PetscInt *cbs)
7516: {
7517: PetscFunctionBegin;
7519: if (rbs) PetscAssertPointer(rbs, 2);
7520: if (cbs) PetscAssertPointer(cbs, 3);
7521: if (rbs) *rbs = PetscAbs(mat->rmap->bs);
7522: if (cbs) *cbs = PetscAbs(mat->cmap->bs);
7523: PetscFunctionReturn(PETSC_SUCCESS);
7524: }
7526: /*@
7527: MatSetBlockSize - Sets the matrix block size.
7529: Logically Collective
7531: Input Parameters:
7532: + mat - the matrix
7533: - bs - block size
7535: Level: intermediate
7537: Notes:
7538: Block row formats are `MATBAIJ` and `MATSBAIJ` formats ALWAYS have square block storage in the matrix.
7539: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7541: For `MATAIJ` matrix format, this function can be called at a later stage, provided that the specified block size
7542: is compatible with the matrix local sizes.
7544: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MATAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`
7545: @*/
7546: PetscErrorCode MatSetBlockSize(Mat mat, PetscInt bs)
7547: {
7548: PetscFunctionBegin;
7551: PetscCall(MatSetBlockSizes(mat, bs, bs));
7552: PetscFunctionReturn(PETSC_SUCCESS);
7553: }
7555: typedef struct {
7556: PetscInt n;
7557: IS *is;
7558: Mat *mat;
7559: PetscObjectState nonzerostate;
7560: Mat C;
7561: } EnvelopeData;
7563: static PetscErrorCode EnvelopeDataDestroy(void *ptr)
7564: {
7565: EnvelopeData *edata = (EnvelopeData *)ptr;
7567: PetscFunctionBegin;
7568: for (PetscInt i = 0; i < edata->n; i++) PetscCall(ISDestroy(&edata->is[i]));
7569: PetscCall(PetscFree(edata->is));
7570: PetscCall(PetscFree(edata));
7571: PetscFunctionReturn(PETSC_SUCCESS);
7572: }
7574: /*@
7575: MatComputeVariableBlockEnvelope - Given a matrix whose nonzeros are in blocks along the diagonal this computes and stores
7576: the sizes of these blocks in the matrix. An individual block may lie over several processes.
7578: Collective
7580: Input Parameter:
7581: . mat - the matrix
7583: Level: intermediate
7585: Notes:
7586: There can be zeros within the blocks
7588: The blocks can overlap between processes, including laying on more than two processes
7590: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatSetVariableBlockSizes()`
7591: @*/
7592: PetscErrorCode MatComputeVariableBlockEnvelope(Mat mat)
7593: {
7594: PetscInt n, *sizes, *starts, i = 0, env = 0, tbs = 0, lblocks = 0, rstart, II, ln = 0, cnt = 0, cstart, cend;
7595: PetscInt *diag, *odiag, sc;
7596: VecScatter scatter;
7597: PetscScalar *seqv;
7598: const PetscScalar *parv;
7599: const PetscInt *ia, *ja;
7600: PetscBool set, flag, done;
7601: Mat AA = mat, A;
7602: MPI_Comm comm;
7603: PetscMPIInt rank, size, tag;
7604: MPI_Status status;
7605: PetscContainer container;
7606: EnvelopeData *edata;
7607: Vec seq, par;
7608: IS isglobal;
7610: PetscFunctionBegin;
7612: PetscCall(MatIsSymmetricKnown(mat, &set, &flag));
7613: if (!set || !flag) {
7614: /* TODO: only needs nonzero structure of transpose */
7615: PetscCall(MatTranspose(mat, MAT_INITIAL_MATRIX, &AA));
7616: PetscCall(MatAXPY(AA, 1.0, mat, DIFFERENT_NONZERO_PATTERN));
7617: }
7618: PetscCall(MatAIJGetLocalMat(AA, &A));
7619: PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7620: PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Unable to get IJ structure from matrix");
7622: PetscCall(MatGetLocalSize(mat, &n, NULL));
7623: PetscCall(PetscObjectGetNewTag((PetscObject)mat, &tag));
7624: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
7625: PetscCallMPI(MPI_Comm_size(comm, &size));
7626: PetscCallMPI(MPI_Comm_rank(comm, &rank));
7628: PetscCall(PetscMalloc2(n, &sizes, n, &starts));
7630: if (rank > 0) {
7631: PetscCallMPI(MPI_Recv(&env, 1, MPIU_INT, rank - 1, tag, comm, &status));
7632: PetscCallMPI(MPI_Recv(&tbs, 1, MPIU_INT, rank - 1, tag, comm, &status));
7633: }
7634: PetscCall(MatGetOwnershipRange(mat, &rstart, NULL));
7635: for (i = 0; i < n; i++) {
7636: env = PetscMax(env, ja[ia[i + 1] - 1]);
7637: II = rstart + i;
7638: if (env == II) {
7639: starts[lblocks] = tbs;
7640: sizes[lblocks++] = 1 + II - tbs;
7641: tbs = 1 + II;
7642: }
7643: }
7644: if (rank < size - 1) {
7645: PetscCallMPI(MPI_Send(&env, 1, MPIU_INT, rank + 1, tag, comm));
7646: PetscCallMPI(MPI_Send(&tbs, 1, MPIU_INT, rank + 1, tag, comm));
7647: }
7649: PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7650: if (!set || !flag) PetscCall(MatDestroy(&AA));
7651: PetscCall(MatDestroy(&A));
7653: PetscCall(PetscNew(&edata));
7654: PetscCall(MatGetNonzeroState(mat, &edata->nonzerostate));
7655: edata->n = lblocks;
7656: /* create IS needed for extracting blocks from the original matrix */
7657: PetscCall(PetscMalloc1(lblocks, &edata->is));
7658: for (PetscInt i = 0; i < lblocks; i++) PetscCall(ISCreateStride(PETSC_COMM_SELF, sizes[i], starts[i], 1, &edata->is[i]));
7660: /* Create the resulting inverse matrix structure with preallocation information */
7661: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &edata->C));
7662: PetscCall(MatSetSizes(edata->C, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
7663: PetscCall(MatSetBlockSizesFromMats(edata->C, mat, mat));
7664: PetscCall(MatSetType(edata->C, MATAIJ));
7666: /* Communicate the start and end of each row, from each block to the correct rank */
7667: /* TODO: Use PetscSF instead of VecScatter */
7668: for (PetscInt i = 0; i < lblocks; i++) ln += sizes[i];
7669: PetscCall(VecCreateSeq(PETSC_COMM_SELF, 2 * ln, &seq));
7670: PetscCall(VecGetArrayWrite(seq, &seqv));
7671: for (PetscInt i = 0; i < lblocks; i++) {
7672: for (PetscInt j = 0; j < sizes[i]; j++) {
7673: seqv[cnt] = starts[i];
7674: seqv[cnt + 1] = starts[i] + sizes[i];
7675: cnt += 2;
7676: }
7677: }
7678: PetscCall(VecRestoreArrayWrite(seq, &seqv));
7679: PetscCallMPI(MPI_Scan(&cnt, &sc, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
7680: sc -= cnt;
7681: PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)mat), 2 * mat->rmap->n, 2 * mat->rmap->N, &par));
7682: PetscCall(ISCreateStride(PETSC_COMM_SELF, cnt, sc, 1, &isglobal));
7683: PetscCall(VecScatterCreate(seq, NULL, par, isglobal, &scatter));
7684: PetscCall(ISDestroy(&isglobal));
7685: PetscCall(VecScatterBegin(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7686: PetscCall(VecScatterEnd(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7687: PetscCall(VecScatterDestroy(&scatter));
7688: PetscCall(VecDestroy(&seq));
7689: PetscCall(MatGetOwnershipRangeColumn(mat, &cstart, &cend));
7690: PetscCall(PetscMalloc2(mat->rmap->n, &diag, mat->rmap->n, &odiag));
7691: PetscCall(VecGetArrayRead(par, &parv));
7692: cnt = 0;
7693: PetscCall(MatGetSize(mat, NULL, &n));
7694: for (PetscInt i = 0; i < mat->rmap->n; i++) {
7695: PetscInt start, end, d = 0, od = 0;
7697: start = (PetscInt)PetscRealPart(parv[cnt]);
7698: end = (PetscInt)PetscRealPart(parv[cnt + 1]);
7699: cnt += 2;
7701: if (start < cstart) {
7702: od += cstart - start + n - cend;
7703: d += cend - cstart;
7704: } else if (start < cend) {
7705: od += n - cend;
7706: d += cend - start;
7707: } else od += n - start;
7708: if (end <= cstart) {
7709: od -= cstart - end + n - cend;
7710: d -= cend - cstart;
7711: } else if (end < cend) {
7712: od -= n - cend;
7713: d -= cend - end;
7714: } else od -= n - end;
7716: odiag[i] = od;
7717: diag[i] = d;
7718: }
7719: PetscCall(VecRestoreArrayRead(par, &parv));
7720: PetscCall(VecDestroy(&par));
7721: PetscCall(MatXAIJSetPreallocation(edata->C, mat->rmap->bs, diag, odiag, NULL, NULL));
7722: PetscCall(PetscFree2(diag, odiag));
7723: PetscCall(PetscFree2(sizes, starts));
7725: PetscCall(PetscContainerCreate(PETSC_COMM_SELF, &container));
7726: PetscCall(PetscContainerSetPointer(container, edata));
7727: PetscCall(PetscContainerSetUserDestroy(container, (PetscErrorCode(*)(void *))EnvelopeDataDestroy));
7728: PetscCall(PetscObjectCompose((PetscObject)mat, "EnvelopeData", (PetscObject)container));
7729: PetscCall(PetscObjectDereference((PetscObject)container));
7730: PetscFunctionReturn(PETSC_SUCCESS);
7731: }
7733: /*@
7734: MatInvertVariableBlockEnvelope - set matrix C to be the inverted block diagonal of matrix A
7736: Collective
7738: Input Parameters:
7739: + A - the matrix
7740: - reuse - indicates if the `C` matrix was obtained from a previous call to this routine
7742: Output Parameter:
7743: . C - matrix with inverted block diagonal of `A`
7745: Level: advanced
7747: Note:
7748: For efficiency the matrix `A` should have all the nonzero entries clustered in smallish blocks along the diagonal.
7750: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatComputeBlockDiagonal()`
7751: @*/
7752: PetscErrorCode MatInvertVariableBlockEnvelope(Mat A, MatReuse reuse, Mat *C)
7753: {
7754: PetscContainer container;
7755: EnvelopeData *edata;
7756: PetscObjectState nonzerostate;
7758: PetscFunctionBegin;
7759: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7760: if (!container) {
7761: PetscCall(MatComputeVariableBlockEnvelope(A));
7762: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7763: }
7764: PetscCall(PetscContainerGetPointer(container, (void **)&edata));
7765: PetscCall(MatGetNonzeroState(A, &nonzerostate));
7766: PetscCheck(nonzerostate <= edata->nonzerostate, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot handle changes to matrix nonzero structure");
7767: PetscCheck(reuse != MAT_REUSE_MATRIX || *C == edata->C, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "C matrix must be the same as previously output");
7769: PetscCall(MatCreateSubMatrices(A, edata->n, edata->is, edata->is, MAT_INITIAL_MATRIX, &edata->mat));
7770: *C = edata->C;
7772: for (PetscInt i = 0; i < edata->n; i++) {
7773: Mat D;
7774: PetscScalar *dvalues;
7776: PetscCall(MatConvert(edata->mat[i], MATSEQDENSE, MAT_INITIAL_MATRIX, &D));
7777: PetscCall(MatSetOption(*C, MAT_ROW_ORIENTED, PETSC_FALSE));
7778: PetscCall(MatSeqDenseInvert(D));
7779: PetscCall(MatDenseGetArray(D, &dvalues));
7780: PetscCall(MatSetValuesIS(*C, edata->is[i], edata->is[i], dvalues, INSERT_VALUES));
7781: PetscCall(MatDestroy(&D));
7782: }
7783: PetscCall(MatDestroySubMatrices(edata->n, &edata->mat));
7784: PetscCall(MatAssemblyBegin(*C, MAT_FINAL_ASSEMBLY));
7785: PetscCall(MatAssemblyEnd(*C, MAT_FINAL_ASSEMBLY));
7786: PetscFunctionReturn(PETSC_SUCCESS);
7787: }
7789: /*@
7790: MatSetVariableBlockSizes - Sets diagonal point-blocks of the matrix that need not be of the same size
7792: Not Collective
7794: Input Parameters:
7795: + mat - the matrix
7796: . nblocks - the number of blocks on this process, each block can only exist on a single process
7797: - bsizes - the block sizes
7799: Level: intermediate
7801: Notes:
7802: Currently used by `PCVPBJACOBI` for `MATAIJ` matrices
7804: Each variable point-block set of degrees of freedom must live on a single MPI process. That is a point block cannot straddle two MPI processes.
7806: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatGetVariableBlockSizes()`,
7807: `MatComputeVariableBlockEnvelope()`, `PCVPBJACOBI`
7808: @*/
7809: PetscErrorCode MatSetVariableBlockSizes(Mat mat, PetscInt nblocks, const PetscInt bsizes[])
7810: {
7811: PetscInt ncnt = 0, nlocal;
7813: PetscFunctionBegin;
7815: PetscCall(MatGetLocalSize(mat, &nlocal, NULL));
7816: PetscCheck(nblocks >= 0 && nblocks <= nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of local blocks %" PetscInt_FMT " is not in [0, %" PetscInt_FMT "]", nblocks, nlocal);
7817: for (PetscInt i = 0; i < nblocks; i++) ncnt += bsizes[i];
7818: PetscCheck(ncnt == nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local block sizes %" PetscInt_FMT " does not equal local size of matrix %" PetscInt_FMT, ncnt, nlocal);
7819: PetscCall(PetscFree(mat->bsizes));
7820: mat->nblocks = nblocks;
7821: PetscCall(PetscMalloc1(nblocks, &mat->bsizes));
7822: PetscCall(PetscArraycpy(mat->bsizes, bsizes, nblocks));
7823: PetscFunctionReturn(PETSC_SUCCESS);
7824: }
7826: /*@C
7827: MatGetVariableBlockSizes - Gets a diagonal blocks of the matrix that need not be of the same size
7829: Not Collective; No Fortran Support
7831: Input Parameter:
7832: . mat - the matrix
7834: Output Parameters:
7835: + nblocks - the number of blocks on this process
7836: - bsizes - the block sizes
7838: Level: intermediate
7840: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatSetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`
7841: @*/
7842: PetscErrorCode MatGetVariableBlockSizes(Mat mat, PetscInt *nblocks, const PetscInt *bsizes[])
7843: {
7844: PetscFunctionBegin;
7846: if (nblocks) *nblocks = mat->nblocks;
7847: if (bsizes) *bsizes = mat->bsizes;
7848: PetscFunctionReturn(PETSC_SUCCESS);
7849: }
7851: /*@
7852: MatSetBlockSizes - Sets the matrix block row and column sizes.
7854: Logically Collective
7856: Input Parameters:
7857: + mat - the matrix
7858: . rbs - row block size
7859: - cbs - column block size
7861: Level: intermediate
7863: Notes:
7864: Block row formats are `MATBAIJ` and `MATSBAIJ`. These formats ALWAYS have square block storage in the matrix.
7865: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7866: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7868: For `MATAIJ` matrix this function can be called at a later stage, provided that the specified block sizes
7869: are compatible with the matrix local sizes.
7871: The row and column block size determine the blocksize of the "row" and "column" vectors returned by `MatCreateVecs()`.
7873: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatGetBlockSizes()`
7874: @*/
7875: PetscErrorCode MatSetBlockSizes(Mat mat, PetscInt rbs, PetscInt cbs)
7876: {
7877: PetscFunctionBegin;
7881: PetscTryTypeMethod(mat, setblocksizes, rbs, cbs);
7882: if (mat->rmap->refcnt) {
7883: ISLocalToGlobalMapping l2g = NULL;
7884: PetscLayout nmap = NULL;
7886: PetscCall(PetscLayoutDuplicate(mat->rmap, &nmap));
7887: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->rmap->mapping, &l2g));
7888: PetscCall(PetscLayoutDestroy(&mat->rmap));
7889: mat->rmap = nmap;
7890: mat->rmap->mapping = l2g;
7891: }
7892: if (mat->cmap->refcnt) {
7893: ISLocalToGlobalMapping l2g = NULL;
7894: PetscLayout nmap = NULL;
7896: PetscCall(PetscLayoutDuplicate(mat->cmap, &nmap));
7897: if (mat->cmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->cmap->mapping, &l2g));
7898: PetscCall(PetscLayoutDestroy(&mat->cmap));
7899: mat->cmap = nmap;
7900: mat->cmap->mapping = l2g;
7901: }
7902: PetscCall(PetscLayoutSetBlockSize(mat->rmap, rbs));
7903: PetscCall(PetscLayoutSetBlockSize(mat->cmap, cbs));
7904: PetscFunctionReturn(PETSC_SUCCESS);
7905: }
7907: /*@
7908: MatSetBlockSizesFromMats - Sets the matrix block row and column sizes to match a pair of matrices
7910: Logically Collective
7912: Input Parameters:
7913: + mat - the matrix
7914: . fromRow - matrix from which to copy row block size
7915: - fromCol - matrix from which to copy column block size (can be same as fromRow)
7917: Level: developer
7919: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`
7920: @*/
7921: PetscErrorCode MatSetBlockSizesFromMats(Mat mat, Mat fromRow, Mat fromCol)
7922: {
7923: PetscFunctionBegin;
7927: if (fromRow->rmap->bs > 0) PetscCall(PetscLayoutSetBlockSize(mat->rmap, fromRow->rmap->bs));
7928: if (fromCol->cmap->bs > 0) PetscCall(PetscLayoutSetBlockSize(mat->cmap, fromCol->cmap->bs));
7929: PetscFunctionReturn(PETSC_SUCCESS);
7930: }
7932: /*@
7933: MatResidual - Default routine to calculate the residual r = b - Ax
7935: Collective
7937: Input Parameters:
7938: + mat - the matrix
7939: . b - the right-hand-side
7940: - x - the approximate solution
7942: Output Parameter:
7943: . r - location to store the residual
7945: Level: developer
7947: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `PCMGSetResidual()`
7948: @*/
7949: PetscErrorCode MatResidual(Mat mat, Vec b, Vec x, Vec r)
7950: {
7951: PetscFunctionBegin;
7957: MatCheckPreallocated(mat, 1);
7958: PetscCall(PetscLogEventBegin(MAT_Residual, mat, 0, 0, 0));
7959: if (!mat->ops->residual) {
7960: PetscCall(MatMult(mat, x, r));
7961: PetscCall(VecAYPX(r, -1.0, b));
7962: } else {
7963: PetscUseTypeMethod(mat, residual, b, x, r);
7964: }
7965: PetscCall(PetscLogEventEnd(MAT_Residual, mat, 0, 0, 0));
7966: PetscFunctionReturn(PETSC_SUCCESS);
7967: }
7969: /*MC
7970: MatGetRowIJF90 - Obtains the compressed row storage i and j indices for the local rows of a sparse matrix
7972: Synopsis:
7973: MatGetRowIJF90(Mat A, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt n, {PetscInt, pointer :: ia(:)}, {PetscInt, pointer :: ja(:)}, PetscBool done,integer ierr)
7975: Not Collective
7977: Input Parameters:
7978: + A - the matrix
7979: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
7980: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
7981: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
7982: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
7983: always used.
7985: Output Parameters:
7986: + n - number of local rows in the (possibly compressed) matrix
7987: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix
7988: . ja - the column indices
7989: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
7990: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
7992: Level: developer
7994: Note:
7995: Use `MatRestoreRowIJF90()` when you no longer need access to the data
7997: .seealso: [](ch_matrices), [](sec_fortranarrays), `Mat`, `MATMPIAIJ`, `MatGetRowIJ()`, `MatRestoreRowIJ()`, `MatRestoreRowIJF90()`
7998: M*/
8000: /*MC
8001: MatRestoreRowIJF90 - restores the compressed row storage i and j indices for the local rows of a sparse matrix obtained with `MatGetRowIJF90()`
8003: Synopsis:
8004: MatRestoreRowIJF90(Mat A, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt n, {PetscInt, pointer :: ia(:)}, {PetscInt, pointer :: ja(:)}, PetscBool done,integer ierr)
8006: Not Collective
8008: Input Parameters:
8009: + A - the matrix
8010: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
8011: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8012: inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8013: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8014: always used.
8015: . n - number of local rows in the (possibly compressed) matrix
8016: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix
8017: . ja - the column indices
8018: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
8019: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
8021: Level: developer
8023: .seealso: [](ch_matrices), [](sec_fortranarrays), `Mat`, `MATMPIAIJ`, `MatGetRowIJ()`, `MatRestoreRowIJ()`, `MatGetRowIJF90()`
8024: M*/
8026: /*@C
8027: MatGetRowIJ - Returns the compressed row storage i and j indices for the local rows of a sparse matrix
8029: Collective
8031: Input Parameters:
8032: + mat - the matrix
8033: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
8034: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8035: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8036: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8037: always used.
8039: Output Parameters:
8040: + n - number of local rows in the (possibly compressed) matrix, use `NULL` if not needed
8041: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix, use `NULL` if not needed
8042: . ja - the column indices, use `NULL` if not needed
8043: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
8044: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
8046: Level: developer
8048: Notes:
8049: You CANNOT change any of the ia[] or ja[] values.
8051: Use `MatRestoreRowIJ()` when you are finished accessing the ia[] and ja[] values.
8053: Fortran Notes:
8054: Use
8055: .vb
8056: PetscInt, pointer :: ia(:),ja(:)
8057: call MatGetRowIJF90(mat,shift,symmetric,inodecompressed,n,ia,ja,done,ierr)
8058: ! Access the ith and jth entries via ia(i) and ja(j)
8059: .ve
8061: `MatGetRowIJ()` Fortran binding is deprecated (since PETSc 3.19), use `MatGetRowIJF90()`
8063: .seealso: [](ch_matrices), `Mat`, `MATAIJ`, `MatGetRowIJF90()`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`, `MatSeqAIJGetArray()`
8064: @*/
8065: PetscErrorCode MatGetRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8066: {
8067: PetscFunctionBegin;
8070: if (n) PetscAssertPointer(n, 5);
8071: if (ia) PetscAssertPointer(ia, 6);
8072: if (ja) PetscAssertPointer(ja, 7);
8073: if (done) PetscAssertPointer(done, 8);
8074: MatCheckPreallocated(mat, 1);
8075: if (!mat->ops->getrowij && done) *done = PETSC_FALSE;
8076: else {
8077: if (done) *done = PETSC_TRUE;
8078: PetscCall(PetscLogEventBegin(MAT_GetRowIJ, mat, 0, 0, 0));
8079: PetscUseTypeMethod(mat, getrowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8080: PetscCall(PetscLogEventEnd(MAT_GetRowIJ, mat, 0, 0, 0));
8081: }
8082: PetscFunctionReturn(PETSC_SUCCESS);
8083: }
8085: /*@C
8086: MatGetColumnIJ - Returns the compressed column storage i and j indices for sequential matrices.
8088: Collective
8090: Input Parameters:
8091: + mat - the matrix
8092: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8093: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be
8094: symmetrized
8095: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8096: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8097: always used.
8098: . n - number of columns in the (possibly compressed) matrix
8099: . ia - the column pointers; that is ia[0] = 0, ia[col] = i[col-1] + number of elements in that col of the matrix
8100: - ja - the row indices
8102: Output Parameter:
8103: . done - `PETSC_TRUE` or `PETSC_FALSE`, indicating whether the values have been returned
8105: Level: developer
8107: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreColumnIJ()`
8108: @*/
8109: PetscErrorCode MatGetColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8110: {
8111: PetscFunctionBegin;
8114: PetscAssertPointer(n, 5);
8115: if (ia) PetscAssertPointer(ia, 6);
8116: if (ja) PetscAssertPointer(ja, 7);
8117: PetscAssertPointer(done, 8);
8118: MatCheckPreallocated(mat, 1);
8119: if (!mat->ops->getcolumnij) *done = PETSC_FALSE;
8120: else {
8121: *done = PETSC_TRUE;
8122: PetscUseTypeMethod(mat, getcolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8123: }
8124: PetscFunctionReturn(PETSC_SUCCESS);
8125: }
8127: /*@C
8128: MatRestoreRowIJ - Call after you are completed with the ia,ja indices obtained with `MatGetRowIJ()`.
8130: Collective
8132: Input Parameters:
8133: + mat - the matrix
8134: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8135: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8136: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8137: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8138: always used.
8139: . n - size of (possibly compressed) matrix
8140: . ia - the row pointers
8141: - ja - the column indices
8143: Output Parameter:
8144: . done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8146: Level: developer
8148: Note:
8149: This routine zeros out `n`, `ia`, and `ja`. This is to prevent accidental
8150: us of the array after it has been restored. If you pass `NULL`, it will
8151: not zero the pointers. Use of ia or ja after `MatRestoreRowIJ()` is invalid.
8153: Fortran Note:
8154: `MatRestoreRowIJ()` Fortran binding is deprecated (since PETSc 3.19), use `MatRestoreRowIJF90()`
8156: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreRowIJF90()`, `MatRestoreColumnIJ()`
8157: @*/
8158: PetscErrorCode MatRestoreRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8159: {
8160: PetscFunctionBegin;
8163: if (ia) PetscAssertPointer(ia, 6);
8164: if (ja) PetscAssertPointer(ja, 7);
8165: if (done) PetscAssertPointer(done, 8);
8166: MatCheckPreallocated(mat, 1);
8168: if (!mat->ops->restorerowij && done) *done = PETSC_FALSE;
8169: else {
8170: if (done) *done = PETSC_TRUE;
8171: PetscUseTypeMethod(mat, restorerowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8172: if (n) *n = 0;
8173: if (ia) *ia = NULL;
8174: if (ja) *ja = NULL;
8175: }
8176: PetscFunctionReturn(PETSC_SUCCESS);
8177: }
8179: /*@C
8180: MatRestoreColumnIJ - Call after you are completed with the ia,ja indices obtained with `MatGetColumnIJ()`.
8182: Collective
8184: Input Parameters:
8185: + mat - the matrix
8186: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8187: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8188: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8189: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8190: always used.
8192: Output Parameters:
8193: + n - size of (possibly compressed) matrix
8194: . ia - the column pointers
8195: . ja - the row indices
8196: - done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8198: Level: developer
8200: .seealso: [](ch_matrices), `Mat`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`
8201: @*/
8202: PetscErrorCode MatRestoreColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8203: {
8204: PetscFunctionBegin;
8207: if (ia) PetscAssertPointer(ia, 6);
8208: if (ja) PetscAssertPointer(ja, 7);
8209: PetscAssertPointer(done, 8);
8210: MatCheckPreallocated(mat, 1);
8212: if (!mat->ops->restorecolumnij) *done = PETSC_FALSE;
8213: else {
8214: *done = PETSC_TRUE;
8215: PetscUseTypeMethod(mat, restorecolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8216: if (n) *n = 0;
8217: if (ia) *ia = NULL;
8218: if (ja) *ja = NULL;
8219: }
8220: PetscFunctionReturn(PETSC_SUCCESS);
8221: }
8223: /*@C
8224: MatColoringPatch - Used inside matrix coloring routines that use `MatGetRowIJ()` and/or
8225: `MatGetColumnIJ()`.
8227: Collective
8229: Input Parameters:
8230: + mat - the matrix
8231: . ncolors - maximum color value
8232: . n - number of entries in colorarray
8233: - colorarray - array indicating color for each column
8235: Output Parameter:
8236: . iscoloring - coloring generated using colorarray information
8238: Level: developer
8240: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatGetColumnIJ()`
8241: @*/
8242: PetscErrorCode MatColoringPatch(Mat mat, PetscInt ncolors, PetscInt n, ISColoringValue colorarray[], ISColoring *iscoloring)
8243: {
8244: PetscFunctionBegin;
8247: PetscAssertPointer(colorarray, 4);
8248: PetscAssertPointer(iscoloring, 5);
8249: MatCheckPreallocated(mat, 1);
8251: if (!mat->ops->coloringpatch) {
8252: PetscCall(ISColoringCreate(PetscObjectComm((PetscObject)mat), ncolors, n, colorarray, PETSC_OWN_POINTER, iscoloring));
8253: } else {
8254: PetscUseTypeMethod(mat, coloringpatch, ncolors, n, colorarray, iscoloring);
8255: }
8256: PetscFunctionReturn(PETSC_SUCCESS);
8257: }
8259: /*@
8260: MatSetUnfactored - Resets a factored matrix to be treated as unfactored.
8262: Logically Collective
8264: Input Parameter:
8265: . mat - the factored matrix to be reset
8267: Level: developer
8269: Notes:
8270: This routine should be used only with factored matrices formed by in-place
8271: factorization via ILU(0) (or by in-place LU factorization for the `MATSEQDENSE`
8272: format). This option can save memory, for example, when solving nonlinear
8273: systems with a matrix-free Newton-Krylov method and a matrix-based, in-place
8274: ILU(0) preconditioner.
8276: One can specify in-place ILU(0) factorization by calling
8277: .vb
8278: PCType(pc,PCILU);
8279: PCFactorSeUseInPlace(pc);
8280: .ve
8281: or by using the options -pc_type ilu -pc_factor_in_place
8283: In-place factorization ILU(0) can also be used as a local
8284: solver for the blocks within the block Jacobi or additive Schwarz
8285: methods (runtime option: -sub_pc_factor_in_place). See Users-Manual: ch_pc
8286: for details on setting local solver options.
8288: Most users should employ the `KSP` interface for linear solvers
8289: instead of working directly with matrix algebra routines such as this.
8290: See, e.g., `KSPCreate()`.
8292: .seealso: [](ch_matrices), `Mat`, `PCFactorSetUseInPlace()`, `PCFactorGetUseInPlace()`
8293: @*/
8294: PetscErrorCode MatSetUnfactored(Mat mat)
8295: {
8296: PetscFunctionBegin;
8299: MatCheckPreallocated(mat, 1);
8300: mat->factortype = MAT_FACTOR_NONE;
8301: if (!mat->ops->setunfactored) PetscFunctionReturn(PETSC_SUCCESS);
8302: PetscUseTypeMethod(mat, setunfactored);
8303: PetscFunctionReturn(PETSC_SUCCESS);
8304: }
8306: /*MC
8307: MatDenseGetArrayF90 - Accesses a matrix array from Fortran
8309: Synopsis:
8310: MatDenseGetArrayF90(Mat x,{Scalar, pointer :: xx_v(:,:)},integer ierr)
8312: Not Collective
8314: Input Parameter:
8315: . x - matrix
8317: Output Parameters:
8318: + xx_v - the Fortran pointer to the array
8319: - ierr - error code
8321: Example of Usage:
8322: .vb
8323: PetscScalar, pointer xx_v(:,:)
8324: ....
8325: call MatDenseGetArrayF90(x,xx_v,ierr)
8326: a = xx_v(3)
8327: call MatDenseRestoreArrayF90(x,xx_v,ierr)
8328: .ve
8330: Level: advanced
8332: .seealso: [](ch_matrices), `Mat`, `MatDenseRestoreArrayF90()`, `MatDenseGetArray()`, `MatDenseRestoreArray()`, `MatSeqAIJGetArrayF90()`
8333: M*/
8335: /*MC
8336: MatDenseRestoreArrayF90 - Restores a matrix array that has been
8337: accessed with `MatDenseGetArrayF90()`.
8339: Synopsis:
8340: MatDenseRestoreArrayF90(Mat x,{Scalar, pointer :: xx_v(:,:)},integer ierr)
8342: Not Collective
8344: Input Parameters:
8345: + x - matrix
8346: - xx_v - the Fortran90 pointer to the array
8348: Output Parameter:
8349: . ierr - error code
8351: Example of Usage:
8352: .vb
8353: PetscScalar, pointer xx_v(:,:)
8354: ....
8355: call MatDenseGetArrayF90(x,xx_v,ierr)
8356: a = xx_v(3)
8357: call MatDenseRestoreArrayF90(x,xx_v,ierr)
8358: .ve
8360: Level: advanced
8362: .seealso: [](ch_matrices), `Mat`, `MatDenseGetArrayF90()`, `MatDenseGetArray()`, `MatDenseRestoreArray()`, `MatSeqAIJRestoreArrayF90()`
8363: M*/
8365: /*MC
8366: MatSeqAIJGetArrayF90 - Accesses a matrix array from Fortran.
8368: Synopsis:
8369: MatSeqAIJGetArrayF90(Mat x,{Scalar, pointer :: xx_v(:)},integer ierr)
8371: Not Collective
8373: Input Parameter:
8374: . x - matrix
8376: Output Parameters:
8377: + xx_v - the Fortran pointer to the array
8378: - ierr - error code
8380: Example of Usage:
8381: .vb
8382: PetscScalar, pointer xx_v(:)
8383: ....
8384: call MatSeqAIJGetArrayF90(x,xx_v,ierr)
8385: a = xx_v(3)
8386: call MatSeqAIJRestoreArrayF90(x,xx_v,ierr)
8387: .ve
8389: Level: advanced
8391: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJRestoreArrayF90()`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArray()`, `MatDenseGetArrayF90()`
8392: M*/
8394: /*MC
8395: MatSeqAIJRestoreArrayF90 - Restores a matrix array that has been
8396: accessed with `MatSeqAIJGetArrayF90()`.
8398: Synopsis:
8399: MatSeqAIJRestoreArrayF90(Mat x,{Scalar, pointer :: xx_v(:)},integer ierr)
8401: Not Collective
8403: Input Parameters:
8404: + x - matrix
8405: - xx_v - the Fortran90 pointer to the array
8407: Output Parameter:
8408: . ierr - error code
8410: Example of Usage:
8411: .vb
8412: PetscScalar, pointer xx_v(:)
8413: ....
8414: call MatSeqAIJGetArrayF90(x,xx_v,ierr)
8415: a = xx_v(3)
8416: call MatSeqAIJRestoreArrayF90(x,xx_v,ierr)
8417: .ve
8419: Level: advanced
8421: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJGetArrayF90()`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArray()`, `MatDenseRestoreArrayF90()`
8422: M*/
8424: /*@
8425: MatCreateSubMatrix - Gets a single submatrix on the same number of processors
8426: as the original matrix.
8428: Collective
8430: Input Parameters:
8431: + mat - the original matrix
8432: . isrow - parallel `IS` containing the rows this processor should obtain
8433: . iscol - parallel `IS` containing all columns you wish to keep. Each process should list the columns that will be in IT's "diagonal part" in the new matrix.
8434: - cll - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
8436: Output Parameter:
8437: . newmat - the new submatrix, of the same type as the original matrix
8439: Level: advanced
8441: Notes:
8442: The submatrix will be able to be multiplied with vectors using the same layout as `iscol`.
8444: Some matrix types place restrictions on the row and column indices, such
8445: as that they be sorted or that they be equal to each other. For `MATBAIJ` and `MATSBAIJ` matrices the indices must include all rows/columns of a block;
8446: for example, if the block size is 3 one cannot select the 0 and 2 rows without selecting the 1 row.
8448: The index sets may not have duplicate entries.
8450: The first time this is called you should use a cll of `MAT_INITIAL_MATRIX`,
8451: the `MatCreateSubMatrix()` routine will create the newmat for you. Any additional calls
8452: to this routine with a mat of the same nonzero structure and with a call of `MAT_REUSE_MATRIX`
8453: will reuse the matrix generated the first time. You should call `MatDestroy()` on `newmat` when
8454: you are finished using it.
8456: The communicator of the newly obtained matrix is ALWAYS the same as the communicator of
8457: the input matrix.
8459: If `iscol` is `NULL` then all columns are obtained (not supported in Fortran).
8461: If `isrow` and `iscol` have a nontrivial block-size, then the resulting matrix has this block-size as well. This feature
8462: is used by `PCFIELDSPLIT` to allow easy nesting of its use.
8464: Example usage:
8465: Consider the following 8x8 matrix with 34 non-zero values, that is
8466: assembled across 3 processors. Let's assume that proc0 owns 3 rows,
8467: proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
8468: as follows
8469: .vb
8470: 1 2 0 | 0 3 0 | 0 4
8471: Proc0 0 5 6 | 7 0 0 | 8 0
8472: 9 0 10 | 11 0 0 | 12 0
8473: -------------------------------------
8474: 13 0 14 | 15 16 17 | 0 0
8475: Proc1 0 18 0 | 19 20 21 | 0 0
8476: 0 0 0 | 22 23 0 | 24 0
8477: -------------------------------------
8478: Proc2 25 26 27 | 0 0 28 | 29 0
8479: 30 0 0 | 31 32 33 | 0 34
8480: .ve
8482: Suppose `isrow` = [0 1 | 4 | 6 7] and `iscol` = [1 2 | 3 4 5 | 6]. The resulting submatrix is
8484: .vb
8485: 2 0 | 0 3 0 | 0
8486: Proc0 5 6 | 7 0 0 | 8
8487: -------------------------------
8488: Proc1 18 0 | 19 20 21 | 0
8489: -------------------------------
8490: Proc2 26 27 | 0 0 28 | 29
8491: 0 0 | 31 32 33 | 0
8492: .ve
8494: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatCreateSubMatricesMPI()`, `MatCreateSubMatrixVirtual()`, `MatSubMatrixVirtualUpdate()`
8495: @*/
8496: PetscErrorCode MatCreateSubMatrix(Mat mat, IS isrow, IS iscol, MatReuse cll, Mat *newmat)
8497: {
8498: PetscMPIInt size;
8499: Mat *local;
8500: IS iscoltmp;
8501: PetscBool flg;
8503: PetscFunctionBegin;
8507: PetscAssertPointer(newmat, 5);
8510: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
8511: PetscCheck(cll != MAT_IGNORE_MATRIX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot use MAT_IGNORE_MATRIX");
8513: MatCheckPreallocated(mat, 1);
8514: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
8516: if (!iscol || isrow == iscol) {
8517: PetscBool stride;
8518: PetscMPIInt grabentirematrix = 0, grab;
8519: PetscCall(PetscObjectTypeCompare((PetscObject)isrow, ISSTRIDE, &stride));
8520: if (stride) {
8521: PetscInt first, step, n, rstart, rend;
8522: PetscCall(ISStrideGetInfo(isrow, &first, &step));
8523: if (step == 1) {
8524: PetscCall(MatGetOwnershipRange(mat, &rstart, &rend));
8525: if (rstart == first) {
8526: PetscCall(ISGetLocalSize(isrow, &n));
8527: if (n == rend - rstart) grabentirematrix = 1;
8528: }
8529: }
8530: }
8531: PetscCall(MPIU_Allreduce(&grabentirematrix, &grab, 1, MPI_INT, MPI_MIN, PetscObjectComm((PetscObject)mat)));
8532: if (grab) {
8533: PetscCall(PetscInfo(mat, "Getting entire matrix as submatrix\n"));
8534: if (cll == MAT_INITIAL_MATRIX) {
8535: *newmat = mat;
8536: PetscCall(PetscObjectReference((PetscObject)mat));
8537: }
8538: PetscFunctionReturn(PETSC_SUCCESS);
8539: }
8540: }
8542: if (!iscol) {
8543: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)mat), mat->cmap->n, mat->cmap->rstart, 1, &iscoltmp));
8544: } else {
8545: iscoltmp = iscol;
8546: }
8548: /* if original matrix is on just one processor then use submatrix generated */
8549: if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1 && cll == MAT_REUSE_MATRIX) {
8550: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_REUSE_MATRIX, &newmat));
8551: goto setproperties;
8552: } else if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1) {
8553: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_INITIAL_MATRIX, &local));
8554: *newmat = *local;
8555: PetscCall(PetscFree(local));
8556: goto setproperties;
8557: } else if (!mat->ops->createsubmatrix) {
8558: /* Create a new matrix type that implements the operation using the full matrix */
8559: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8560: switch (cll) {
8561: case MAT_INITIAL_MATRIX:
8562: PetscCall(MatCreateSubMatrixVirtual(mat, isrow, iscoltmp, newmat));
8563: break;
8564: case MAT_REUSE_MATRIX:
8565: PetscCall(MatSubMatrixVirtualUpdate(*newmat, mat, isrow, iscoltmp));
8566: break;
8567: default:
8568: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Invalid MatReuse, must be either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX");
8569: }
8570: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8571: goto setproperties;
8572: }
8574: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8575: PetscUseTypeMethod(mat, createsubmatrix, isrow, iscoltmp, cll, newmat);
8576: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8578: setproperties:
8579: PetscCall(ISEqualUnsorted(isrow, iscoltmp, &flg));
8580: if (flg) PetscCall(MatPropagateSymmetryOptions(mat, *newmat));
8581: if (!iscol) PetscCall(ISDestroy(&iscoltmp));
8582: if (*newmat && cll == MAT_INITIAL_MATRIX) PetscCall(PetscObjectStateIncrease((PetscObject)*newmat));
8583: PetscFunctionReturn(PETSC_SUCCESS);
8584: }
8586: /*@
8587: MatPropagateSymmetryOptions - Propagates symmetry options set on a matrix to another matrix
8589: Not Collective
8591: Input Parameters:
8592: + A - the matrix we wish to propagate options from
8593: - B - the matrix we wish to propagate options to
8595: Level: beginner
8597: Note:
8598: Propagates the options associated to `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_HERMITIAN`, `MAT_SPD`, `MAT_SYMMETRIC`, and `MAT_STRUCTURAL_SYMMETRY_ETERNAL`
8600: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatIsSymmetricKnown()`, `MatIsSPDKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
8601: @*/
8602: PetscErrorCode MatPropagateSymmetryOptions(Mat A, Mat B)
8603: {
8604: PetscFunctionBegin;
8607: B->symmetry_eternal = A->symmetry_eternal;
8608: B->structural_symmetry_eternal = A->structural_symmetry_eternal;
8609: B->symmetric = A->symmetric;
8610: B->structurally_symmetric = A->structurally_symmetric;
8611: B->spd = A->spd;
8612: B->hermitian = A->hermitian;
8613: PetscFunctionReturn(PETSC_SUCCESS);
8614: }
8616: /*@
8617: MatStashSetInitialSize - sets the sizes of the matrix stash, that is
8618: used during the assembly process to store values that belong to
8619: other processors.
8621: Not Collective
8623: Input Parameters:
8624: + mat - the matrix
8625: . size - the initial size of the stash.
8626: - bsize - the initial size of the block-stash(if used).
8628: Options Database Keys:
8629: + -matstash_initial_size <size> or <size0,size1,...sizep-1> - set initial size
8630: - -matstash_block_initial_size <bsize> or <bsize0,bsize1,...bsizep-1> - set initial block size
8632: Level: intermediate
8634: Notes:
8635: The block-stash is used for values set with `MatSetValuesBlocked()` while
8636: the stash is used for values set with `MatSetValues()`
8638: Run with the option -info and look for output of the form
8639: MatAssemblyBegin_MPIXXX:Stash has MM entries, uses nn mallocs.
8640: to determine the appropriate value, MM, to use for size and
8641: MatAssemblyBegin_MPIXXX:Block-Stash has BMM entries, uses nn mallocs.
8642: to determine the value, BMM to use for bsize
8644: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashGetInfo()`
8645: @*/
8646: PetscErrorCode MatStashSetInitialSize(Mat mat, PetscInt size, PetscInt bsize)
8647: {
8648: PetscFunctionBegin;
8651: PetscCall(MatStashSetInitialSize_Private(&mat->stash, size));
8652: PetscCall(MatStashSetInitialSize_Private(&mat->bstash, bsize));
8653: PetscFunctionReturn(PETSC_SUCCESS);
8654: }
8656: /*@
8657: MatInterpolateAdd - $w = y + A*x$ or $A^T*x$ depending on the shape of
8658: the matrix
8660: Neighbor-wise Collective
8662: Input Parameters:
8663: + A - the matrix
8664: . x - the vector to be multiplied by the interpolation operator
8665: - y - the vector to be added to the result
8667: Output Parameter:
8668: . w - the resulting vector
8670: Level: intermediate
8672: Notes:
8673: `w` may be the same vector as `y`.
8675: This allows one to use either the restriction or interpolation (its transpose)
8676: matrix to do the interpolation
8678: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8679: @*/
8680: PetscErrorCode MatInterpolateAdd(Mat A, Vec x, Vec y, Vec w)
8681: {
8682: PetscInt M, N, Ny;
8684: PetscFunctionBegin;
8689: PetscCall(MatGetSize(A, &M, &N));
8690: PetscCall(VecGetSize(y, &Ny));
8691: if (M == Ny) {
8692: PetscCall(MatMultAdd(A, x, y, w));
8693: } else {
8694: PetscCall(MatMultTransposeAdd(A, x, y, w));
8695: }
8696: PetscFunctionReturn(PETSC_SUCCESS);
8697: }
8699: /*@
8700: MatInterpolate - $y = A*x$ or $A^T*x$ depending on the shape of
8701: the matrix
8703: Neighbor-wise Collective
8705: Input Parameters:
8706: + A - the matrix
8707: - x - the vector to be interpolated
8709: Output Parameter:
8710: . y - the resulting vector
8712: Level: intermediate
8714: Note:
8715: This allows one to use either the restriction or interpolation (its transpose)
8716: matrix to do the interpolation
8718: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8719: @*/
8720: PetscErrorCode MatInterpolate(Mat A, Vec x, Vec y)
8721: {
8722: PetscInt M, N, Ny;
8724: PetscFunctionBegin;
8728: PetscCall(MatGetSize(A, &M, &N));
8729: PetscCall(VecGetSize(y, &Ny));
8730: if (M == Ny) {
8731: PetscCall(MatMult(A, x, y));
8732: } else {
8733: PetscCall(MatMultTranspose(A, x, y));
8734: }
8735: PetscFunctionReturn(PETSC_SUCCESS);
8736: }
8738: /*@
8739: MatRestrict - $y = A*x$ or $A^T*x$
8741: Neighbor-wise Collective
8743: Input Parameters:
8744: + A - the matrix
8745: - x - the vector to be restricted
8747: Output Parameter:
8748: . y - the resulting vector
8750: Level: intermediate
8752: Note:
8753: This allows one to use either the restriction or interpolation (its transpose)
8754: matrix to do the restriction
8756: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatInterpolate()`, `PCMG`
8757: @*/
8758: PetscErrorCode MatRestrict(Mat A, Vec x, Vec y)
8759: {
8760: PetscInt M, N, Nx;
8762: PetscFunctionBegin;
8766: PetscCall(MatGetSize(A, &M, &N));
8767: PetscCall(VecGetSize(x, &Nx));
8768: if (M == Nx) {
8769: PetscCall(MatMultTranspose(A, x, y));
8770: } else {
8771: PetscCall(MatMult(A, x, y));
8772: }
8773: PetscFunctionReturn(PETSC_SUCCESS);
8774: }
8776: /*@
8777: MatMatInterpolateAdd - $Y = W + A*X$ or $W + A^T*X$ depending on the shape of `A`
8779: Neighbor-wise Collective
8781: Input Parameters:
8782: + A - the matrix
8783: . x - the input dense matrix to be multiplied
8784: - w - the input dense matrix to be added to the result
8786: Output Parameter:
8787: . y - the output dense matrix
8789: Level: intermediate
8791: Note:
8792: This allows one to use either the restriction or interpolation (its transpose)
8793: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8794: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8796: .seealso: [](ch_matrices), `Mat`, `MatInterpolateAdd()`, `MatMatInterpolate()`, `MatMatRestrict()`, `PCMG`
8797: @*/
8798: PetscErrorCode MatMatInterpolateAdd(Mat A, Mat x, Mat w, Mat *y)
8799: {
8800: PetscInt M, N, Mx, Nx, Mo, My = 0, Ny = 0;
8801: PetscBool trans = PETSC_TRUE;
8802: MatReuse reuse = MAT_INITIAL_MATRIX;
8804: PetscFunctionBegin;
8810: PetscCall(MatGetSize(A, &M, &N));
8811: PetscCall(MatGetSize(x, &Mx, &Nx));
8812: if (N == Mx) trans = PETSC_FALSE;
8813: else PetscCheck(M == Mx, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx);
8814: Mo = trans ? N : M;
8815: if (*y) {
8816: PetscCall(MatGetSize(*y, &My, &Ny));
8817: if (Mo == My && Nx == Ny) {
8818: reuse = MAT_REUSE_MATRIX;
8819: } else {
8820: PetscCheck(w || *y != w, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot reuse y and w, size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT ", Y %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx, My, Ny);
8821: PetscCall(MatDestroy(y));
8822: }
8823: }
8825: if (w && *y == w) { /* this is to minimize changes in PCMG */
8826: PetscBool flg;
8828: PetscCall(PetscObjectQuery((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject *)&w));
8829: if (w) {
8830: PetscInt My, Ny, Mw, Nw;
8832: PetscCall(PetscObjectTypeCompare((PetscObject)*y, ((PetscObject)w)->type_name, &flg));
8833: PetscCall(MatGetSize(*y, &My, &Ny));
8834: PetscCall(MatGetSize(w, &Mw, &Nw));
8835: if (!flg || My != Mw || Ny != Nw) w = NULL;
8836: }
8837: if (!w) {
8838: PetscCall(MatDuplicate(*y, MAT_COPY_VALUES, &w));
8839: PetscCall(PetscObjectCompose((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject)w));
8840: PetscCall(PetscObjectDereference((PetscObject)w));
8841: } else {
8842: PetscCall(MatCopy(*y, w, UNKNOWN_NONZERO_PATTERN));
8843: }
8844: }
8845: if (!trans) {
8846: PetscCall(MatMatMult(A, x, reuse, PETSC_DEFAULT, y));
8847: } else {
8848: PetscCall(MatTransposeMatMult(A, x, reuse, PETSC_DEFAULT, y));
8849: }
8850: if (w) PetscCall(MatAXPY(*y, 1.0, w, UNKNOWN_NONZERO_PATTERN));
8851: PetscFunctionReturn(PETSC_SUCCESS);
8852: }
8854: /*@
8855: MatMatInterpolate - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
8857: Neighbor-wise Collective
8859: Input Parameters:
8860: + A - the matrix
8861: - x - the input dense matrix
8863: Output Parameter:
8864: . y - the output dense matrix
8866: Level: intermediate
8868: Note:
8869: This allows one to use either the restriction or interpolation (its transpose)
8870: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8871: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8873: .seealso: [](ch_matrices), `Mat`, `MatInterpolate()`, `MatRestrict()`, `MatMatRestrict()`, `PCMG`
8874: @*/
8875: PetscErrorCode MatMatInterpolate(Mat A, Mat x, Mat *y)
8876: {
8877: PetscFunctionBegin;
8878: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
8879: PetscFunctionReturn(PETSC_SUCCESS);
8880: }
8882: /*@
8883: MatMatRestrict - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
8885: Neighbor-wise Collective
8887: Input Parameters:
8888: + A - the matrix
8889: - x - the input dense matrix
8891: Output Parameter:
8892: . y - the output dense matrix
8894: Level: intermediate
8896: Note:
8897: This allows one to use either the restriction or interpolation (its transpose)
8898: matrix to do the restriction. `y` matrix can be reused if already created with the proper sizes,
8899: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8901: .seealso: [](ch_matrices), `Mat`, `MatRestrict()`, `MatInterpolate()`, `MatMatInterpolate()`, `PCMG`
8902: @*/
8903: PetscErrorCode MatMatRestrict(Mat A, Mat x, Mat *y)
8904: {
8905: PetscFunctionBegin;
8906: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
8907: PetscFunctionReturn(PETSC_SUCCESS);
8908: }
8910: /*@
8911: MatGetNullSpace - retrieves the null space of a matrix.
8913: Logically Collective
8915: Input Parameters:
8916: + mat - the matrix
8917: - nullsp - the null space object
8919: Level: developer
8921: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetNullSpace()`, `MatNullSpace`
8922: @*/
8923: PetscErrorCode MatGetNullSpace(Mat mat, MatNullSpace *nullsp)
8924: {
8925: PetscFunctionBegin;
8927: PetscAssertPointer(nullsp, 2);
8928: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->nullsp) ? mat->transnullsp : mat->nullsp;
8929: PetscFunctionReturn(PETSC_SUCCESS);
8930: }
8932: /*@C
8933: MatGetNullSpaces - gets the null spaces, transpose null spaces, and near null spaces from an array of matrices
8935: Logically Collective
8937: Input Parameters:
8938: + n - the number of matrices
8939: - mat - the array of matrices
8941: Output Parameters:
8942: . nullsp - an array of null spaces, `NULL` for each matrix that does not have a null space
8944: Level: developer
8946: Note:
8947: Call `MatRestoreNullspaces()` to provide these to another array of matrices
8949: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
8950: `MatNullSpaceRemove()`, `MatRestoreNullSpaces()`
8951: @*/
8952: PetscErrorCode MatGetNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
8953: {
8954: PetscFunctionBegin;
8955: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
8956: PetscAssertPointer(mat, 2);
8957: PetscAssertPointer(nullsp, 3);
8959: PetscCall(PetscCalloc1(3 * n, nullsp));
8960: for (PetscInt i = 0; i < n; i++) {
8962: (*nullsp)[i] = mat[i]->nullsp;
8963: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[i]));
8964: (*nullsp)[n + i] = mat[i]->nearnullsp;
8965: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[n + i]));
8966: (*nullsp)[2 * n + i] = mat[i]->transnullsp;
8967: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[2 * n + i]));
8968: }
8969: PetscFunctionReturn(PETSC_SUCCESS);
8970: }
8972: /*@C
8973: MatRestoreNullSpaces - sets the null spaces, transpose null spaces, and near null spaces obtained with `MatGetNullSpaces()` for an array of matrices
8975: Logically Collective
8977: Input Parameters:
8978: + n - the number of matrices
8979: . mat - the array of matrices
8980: - nullsp - an array of null spaces, `NULL` if the null space does not exist
8982: Level: developer
8984: Note:
8985: Call `MatGetNullSpaces()` to create `nullsp`
8987: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
8988: `MatNullSpaceRemove()`, `MatGetNullSpaces()`
8989: @*/
8990: PetscErrorCode MatRestoreNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
8991: {
8992: PetscFunctionBegin;
8993: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
8994: PetscAssertPointer(mat, 2);
8995: PetscAssertPointer(nullsp, 3);
8996: PetscAssertPointer(*nullsp, 3);
8998: for (PetscInt i = 0; i < n; i++) {
9000: PetscCall(MatSetNullSpace(mat[i], (*nullsp)[i]));
9001: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[i]));
9002: PetscCall(MatSetNearNullSpace(mat[i], (*nullsp)[n + i]));
9003: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[n + i]));
9004: PetscCall(MatSetTransposeNullSpace(mat[i], (*nullsp)[2 * n + i]));
9005: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[2 * n + i]));
9006: }
9007: PetscCall(PetscFree(*nullsp));
9008: PetscFunctionReturn(PETSC_SUCCESS);
9009: }
9011: /*@
9012: MatSetNullSpace - attaches a null space to a matrix.
9014: Logically Collective
9016: Input Parameters:
9017: + mat - the matrix
9018: - nullsp - the null space object
9020: Level: advanced
9022: Notes:
9023: This null space is used by the `KSP` linear solvers to solve singular systems.
9025: Overwrites any previous null space that may have been attached. You can remove the null space from the matrix object by calling this routine with an nullsp of `NULL`
9027: For inconsistent singular systems (linear systems where the right-hand side is not in the range of the operator) the `KSP` residuals will not converge to
9028: to zero but the linear system will still be solved in a least squares sense.
9030: The fundamental theorem of linear algebra (Gilbert Strang, Introduction to Applied Mathematics, page 72) states that
9031: the domain of a matrix A (from $R^n$ to $R^m$ (m rows, n columns) $R^n$ = the direct sum of the null space of A, n(A), + the range of $A^T$, $R(A^T)$.
9032: Similarly $R^m$ = direct sum n($A^T$) + R(A). Hence the linear system $A x = b$ has a solution only if b in R(A) (or correspondingly b is orthogonal to
9033: n($A^T$)) and if x is a solution then x + alpha n(A) is a solution for any alpha. The minimum norm solution is orthogonal to n(A). For problems without a solution
9034: the solution that minimizes the norm of the residual (the least squares solution) can be obtained by solving A x = \hat{b} where \hat{b} is b orthogonalized to the n($A^T$).
9035: This \hat{b} can be obtained by calling `MatNullSpaceRemove()` with the null space of the transpose of the matrix.
9037: If the matrix is known to be symmetric because it is an `MATSBAIJ` matrix or one as called
9038: `MatSetOption`(mat,`MAT_SYMMETRIC` or possibly `MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`); this
9039: routine also automatically calls `MatSetTransposeNullSpace()`.
9041: The user should call `MatNullSpaceDestroy()`.
9043: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`,
9044: `KSPSetPCSide()`
9045: @*/
9046: PetscErrorCode MatSetNullSpace(Mat mat, MatNullSpace nullsp)
9047: {
9048: PetscFunctionBegin;
9051: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9052: PetscCall(MatNullSpaceDestroy(&mat->nullsp));
9053: mat->nullsp = nullsp;
9054: if (mat->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetTransposeNullSpace(mat, nullsp));
9055: PetscFunctionReturn(PETSC_SUCCESS);
9056: }
9058: /*@
9059: MatGetTransposeNullSpace - retrieves the null space of the transpose of a matrix.
9061: Logically Collective
9063: Input Parameters:
9064: + mat - the matrix
9065: - nullsp - the null space object
9067: Level: developer
9069: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetTransposeNullSpace()`, `MatSetNullSpace()`, `MatGetNullSpace()`
9070: @*/
9071: PetscErrorCode MatGetTransposeNullSpace(Mat mat, MatNullSpace *nullsp)
9072: {
9073: PetscFunctionBegin;
9076: PetscAssertPointer(nullsp, 2);
9077: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->transnullsp) ? mat->nullsp : mat->transnullsp;
9078: PetscFunctionReturn(PETSC_SUCCESS);
9079: }
9081: /*@
9082: MatSetTransposeNullSpace - attaches the null space of a transpose of a matrix to the matrix
9084: Logically Collective
9086: Input Parameters:
9087: + mat - the matrix
9088: - nullsp - the null space object
9090: Level: advanced
9092: Notes:
9093: This allows solving singular linear systems defined by the transpose of the matrix using `KSP` solvers with left preconditioning.
9095: See `MatSetNullSpace()`
9097: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`, `KSPSetPCSide()`
9098: @*/
9099: PetscErrorCode MatSetTransposeNullSpace(Mat mat, MatNullSpace nullsp)
9100: {
9101: PetscFunctionBegin;
9104: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9105: PetscCall(MatNullSpaceDestroy(&mat->transnullsp));
9106: mat->transnullsp = nullsp;
9107: PetscFunctionReturn(PETSC_SUCCESS);
9108: }
9110: /*@
9111: MatSetNearNullSpace - attaches a null space to a matrix, which is often the null space (rigid body modes) of the operator without boundary conditions
9112: This null space will be used to provide near null space vectors to a multigrid preconditioner built from this matrix.
9114: Logically Collective
9116: Input Parameters:
9117: + mat - the matrix
9118: - nullsp - the null space object
9120: Level: advanced
9122: Notes:
9123: Overwrites any previous near null space that may have been attached
9125: You can remove the null space by calling this routine with an `nullsp` of `NULL`
9127: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNullSpace()`, `MatNullSpaceCreateRigidBody()`, `MatGetNearNullSpace()`
9128: @*/
9129: PetscErrorCode MatSetNearNullSpace(Mat mat, MatNullSpace nullsp)
9130: {
9131: PetscFunctionBegin;
9135: MatCheckPreallocated(mat, 1);
9136: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9137: PetscCall(MatNullSpaceDestroy(&mat->nearnullsp));
9138: mat->nearnullsp = nullsp;
9139: PetscFunctionReturn(PETSC_SUCCESS);
9140: }
9142: /*@
9143: MatGetNearNullSpace - Get null space attached with `MatSetNearNullSpace()`
9145: Not Collective
9147: Input Parameter:
9148: . mat - the matrix
9150: Output Parameter:
9151: . nullsp - the null space object, `NULL` if not set
9153: Level: advanced
9155: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatNullSpaceCreate()`
9156: @*/
9157: PetscErrorCode MatGetNearNullSpace(Mat mat, MatNullSpace *nullsp)
9158: {
9159: PetscFunctionBegin;
9162: PetscAssertPointer(nullsp, 2);
9163: MatCheckPreallocated(mat, 1);
9164: *nullsp = mat->nearnullsp;
9165: PetscFunctionReturn(PETSC_SUCCESS);
9166: }
9168: /*@C
9169: MatICCFactor - Performs in-place incomplete Cholesky factorization of matrix.
9171: Collective
9173: Input Parameters:
9174: + mat - the matrix
9175: . row - row/column permutation
9176: - info - information on desired factorization process
9178: Level: developer
9180: Notes:
9181: Probably really in-place only when level of fill is zero, otherwise allocates
9182: new space to store factored matrix and deletes previous memory.
9184: Most users should employ the `KSP` interface for linear solvers
9185: instead of working directly with matrix algebra routines such as this.
9186: See, e.g., `KSPCreate()`.
9188: Developer Note:
9189: The Fortran interface is not autogenerated as the
9190: interface definition cannot be generated correctly [due to `MatFactorInfo`]
9192: .seealso: [](ch_matrices), `Mat`, `MatFactorInfo`, `MatGetFactor()`, `MatICCFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
9193: @*/
9194: PetscErrorCode MatICCFactor(Mat mat, IS row, const MatFactorInfo *info)
9195: {
9196: PetscFunctionBegin;
9200: PetscAssertPointer(info, 3);
9201: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
9202: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
9203: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
9204: MatCheckPreallocated(mat, 1);
9205: PetscUseTypeMethod(mat, iccfactor, row, info);
9206: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9207: PetscFunctionReturn(PETSC_SUCCESS);
9208: }
9210: /*@
9211: MatDiagonalScaleLocal - Scales columns of a matrix given the scaling values including the
9212: ghosted ones.
9214: Not Collective
9216: Input Parameters:
9217: + mat - the matrix
9218: - diag - the diagonal values, including ghost ones
9220: Level: developer
9222: Notes:
9223: Works only for `MATMPIAIJ` and `MATMPIBAIJ` matrices
9225: This allows one to avoid during communication to perform the scaling that must be done with `MatDiagonalScale()`
9227: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
9228: @*/
9229: PetscErrorCode MatDiagonalScaleLocal(Mat mat, Vec diag)
9230: {
9231: PetscMPIInt size;
9233: PetscFunctionBegin;
9238: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must be already assembled");
9239: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
9240: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
9241: if (size == 1) {
9242: PetscInt n, m;
9243: PetscCall(VecGetSize(diag, &n));
9244: PetscCall(MatGetSize(mat, NULL, &m));
9245: PetscCheck(m == n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only supported for sequential matrices when no ghost points/periodic conditions");
9246: PetscCall(MatDiagonalScale(mat, NULL, diag));
9247: } else {
9248: PetscUseMethod(mat, "MatDiagonalScaleLocal_C", (Mat, Vec), (mat, diag));
9249: }
9250: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
9251: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9252: PetscFunctionReturn(PETSC_SUCCESS);
9253: }
9255: /*@
9256: MatGetInertia - Gets the inertia from a factored matrix
9258: Collective
9260: Input Parameter:
9261: . mat - the matrix
9263: Output Parameters:
9264: + nneg - number of negative eigenvalues
9265: . nzero - number of zero eigenvalues
9266: - npos - number of positive eigenvalues
9268: Level: advanced
9270: Note:
9271: Matrix must have been factored by `MatCholeskyFactor()`
9273: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactor()`
9274: @*/
9275: PetscErrorCode MatGetInertia(Mat mat, PetscInt *nneg, PetscInt *nzero, PetscInt *npos)
9276: {
9277: PetscFunctionBegin;
9280: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9281: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Numeric factor mat is not assembled");
9282: PetscUseTypeMethod(mat, getinertia, nneg, nzero, npos);
9283: PetscFunctionReturn(PETSC_SUCCESS);
9284: }
9286: /*@C
9287: MatSolves - Solves $A x = b$, given a factored matrix, for a collection of vectors
9289: Neighbor-wise Collective
9291: Input Parameters:
9292: + mat - the factored matrix obtained with `MatGetFactor()`
9293: - b - the right-hand-side vectors
9295: Output Parameter:
9296: . x - the result vectors
9298: Level: developer
9300: Note:
9301: The vectors `b` and `x` cannot be the same. I.e., one cannot
9302: call `MatSolves`(A,x,x).
9304: .seealso: [](ch_matrices), `Mat`, `Vecs`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`, `MatSolve()`
9305: @*/
9306: PetscErrorCode MatSolves(Mat mat, Vecs b, Vecs x)
9307: {
9308: PetscFunctionBegin;
9311: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
9312: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9313: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
9315: MatCheckPreallocated(mat, 1);
9316: PetscCall(PetscLogEventBegin(MAT_Solves, mat, 0, 0, 0));
9317: PetscUseTypeMethod(mat, solves, b, x);
9318: PetscCall(PetscLogEventEnd(MAT_Solves, mat, 0, 0, 0));
9319: PetscFunctionReturn(PETSC_SUCCESS);
9320: }
9322: /*@
9323: MatIsSymmetric - Test whether a matrix is symmetric
9325: Collective
9327: Input Parameters:
9328: + A - the matrix to test
9329: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact transpose)
9331: Output Parameter:
9332: . flg - the result
9334: Level: intermediate
9336: Notes:
9337: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9339: If the matrix does not yet know if it is symmetric or not this can be an expensive operation, also available `MatIsSymmetricKnown()`
9341: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9342: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9344: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetricKnown()`,
9345: `MAT_SYMMETRIC`, `MAT_SYMMETRY_ETERNAL`
9346: @*/
9347: PetscErrorCode MatIsSymmetric(Mat A, PetscReal tol, PetscBool *flg)
9348: {
9349: PetscFunctionBegin;
9351: PetscAssertPointer(flg, 3);
9352: if (A->symmetric != PETSC_BOOL3_UNKNOWN) *flg = PetscBool3ToBool(A->symmetric);
9353: else {
9354: if (A->ops->issymmetric) PetscUseTypeMethod(A, issymmetric, tol, flg);
9355: else PetscCall(MatIsTranspose(A, A, tol, flg));
9356: if (!tol) PetscCall(MatSetOption(A, MAT_SYMMETRIC, *flg));
9357: }
9358: PetscFunctionReturn(PETSC_SUCCESS);
9359: }
9361: /*@
9362: MatIsHermitian - Test whether a matrix is Hermitian
9364: Collective
9366: Input Parameters:
9367: + A - the matrix to test
9368: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact Hermitian)
9370: Output Parameter:
9371: . flg - the result
9373: Level: intermediate
9375: Notes:
9376: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9378: If the matrix does not yet know if it is Hermitian or not this can be an expensive operation, also available `MatIsHermitianKnown()`
9380: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9381: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMEMTRY_ETERNAL`,`PETSC_TRUE`)
9383: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetric()`, `MatSetOption()`,
9384: `MatIsSymmetricKnown()`, `MatIsSymmetric()`, `MAT_HERMITIAN`, `MAT_SYMMETRY_ETERNAL`
9385: @*/
9386: PetscErrorCode MatIsHermitian(Mat A, PetscReal tol, PetscBool *flg)
9387: {
9388: PetscFunctionBegin;
9390: PetscAssertPointer(flg, 3);
9391: if (A->hermitian != PETSC_BOOL3_UNKNOWN) *flg = PetscBool3ToBool(A->hermitian);
9392: else {
9393: if (A->ops->ishermitian) PetscUseTypeMethod(A, ishermitian, tol, flg);
9394: else PetscCall(MatIsHermitianTranspose(A, A, tol, flg));
9395: if (!tol) PetscCall(MatSetOption(A, MAT_HERMITIAN, *flg));
9396: }
9397: PetscFunctionReturn(PETSC_SUCCESS);
9398: }
9400: /*@
9401: MatIsSymmetricKnown - Checks if a matrix knows if it is symmetric or not and its symmetric state
9403: Not Collective
9405: Input Parameter:
9406: . A - the matrix to check
9408: Output Parameters:
9409: + set - `PETSC_TRUE` if the matrix knows its symmetry state (this tells you if the next flag is valid)
9410: - flg - the result (only valid if set is `PETSC_TRUE`)
9412: Level: advanced
9414: Notes:
9415: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsSymmetric()`
9416: if you want it explicitly checked
9418: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9419: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9421: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9422: @*/
9423: PetscErrorCode MatIsSymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9424: {
9425: PetscFunctionBegin;
9427: PetscAssertPointer(set, 2);
9428: PetscAssertPointer(flg, 3);
9429: if (A->symmetric != PETSC_BOOL3_UNKNOWN) {
9430: *set = PETSC_TRUE;
9431: *flg = PetscBool3ToBool(A->symmetric);
9432: } else {
9433: *set = PETSC_FALSE;
9434: }
9435: PetscFunctionReturn(PETSC_SUCCESS);
9436: }
9438: /*@
9439: MatIsSPDKnown - Checks if a matrix knows if it is symmetric positive definite or not and its symmetric positive definite state
9441: Not Collective
9443: Input Parameter:
9444: . A - the matrix to check
9446: Output Parameters:
9447: + set - `PETSC_TRUE` if the matrix knows its symmetric positive definite state (this tells you if the next flag is valid)
9448: - flg - the result (only valid if set is `PETSC_TRUE`)
9450: Level: advanced
9452: Notes:
9453: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`).
9455: One can declare that a matrix is SPD with `MatSetOption`(mat,`MAT_SPD`,`PETSC_TRUE`) and if it is known to remain SPD
9456: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SPD_ETERNAL`,`PETSC_TRUE`)
9458: .seealso: [](ch_matrices), `Mat`, `MAT_SPD_ETERNAL`, `MAT_SPD`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9459: @*/
9460: PetscErrorCode MatIsSPDKnown(Mat A, PetscBool *set, PetscBool *flg)
9461: {
9462: PetscFunctionBegin;
9464: PetscAssertPointer(set, 2);
9465: PetscAssertPointer(flg, 3);
9466: if (A->spd != PETSC_BOOL3_UNKNOWN) {
9467: *set = PETSC_TRUE;
9468: *flg = PetscBool3ToBool(A->spd);
9469: } else {
9470: *set = PETSC_FALSE;
9471: }
9472: PetscFunctionReturn(PETSC_SUCCESS);
9473: }
9475: /*@
9476: MatIsHermitianKnown - Checks if a matrix knows if it is Hermitian or not and its Hermitian state
9478: Not Collective
9480: Input Parameter:
9481: . A - the matrix to check
9483: Output Parameters:
9484: + set - `PETSC_TRUE` if the matrix knows its Hermitian state (this tells you if the next flag is valid)
9485: - flg - the result (only valid if set is `PETSC_TRUE`)
9487: Level: advanced
9489: Notes:
9490: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsHermitian()`
9491: if you want it explicitly checked
9493: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9494: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9496: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MAT_HERMITIAN`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`
9497: @*/
9498: PetscErrorCode MatIsHermitianKnown(Mat A, PetscBool *set, PetscBool *flg)
9499: {
9500: PetscFunctionBegin;
9502: PetscAssertPointer(set, 2);
9503: PetscAssertPointer(flg, 3);
9504: if (A->hermitian != PETSC_BOOL3_UNKNOWN) {
9505: *set = PETSC_TRUE;
9506: *flg = PetscBool3ToBool(A->hermitian);
9507: } else {
9508: *set = PETSC_FALSE;
9509: }
9510: PetscFunctionReturn(PETSC_SUCCESS);
9511: }
9513: /*@
9514: MatIsStructurallySymmetric - Test whether a matrix is structurally symmetric
9516: Collective
9518: Input Parameter:
9519: . A - the matrix to test
9521: Output Parameter:
9522: . flg - the result
9524: Level: intermediate
9526: Notes:
9527: If the matrix does yet know it is structurally symmetric this can be an expensive operation, also available `MatIsStructurallySymmetricKnown()`
9529: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9530: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9532: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsSymmetric()`, `MatSetOption()`, `MatIsStructurallySymmetricKnown()`
9533: @*/
9534: PetscErrorCode MatIsStructurallySymmetric(Mat A, PetscBool *flg)
9535: {
9536: PetscFunctionBegin;
9538: PetscAssertPointer(flg, 2);
9539: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9540: *flg = PetscBool3ToBool(A->structurally_symmetric);
9541: } else {
9542: PetscUseTypeMethod(A, isstructurallysymmetric, flg);
9543: PetscCall(MatSetOption(A, MAT_STRUCTURALLY_SYMMETRIC, *flg));
9544: }
9545: PetscFunctionReturn(PETSC_SUCCESS);
9546: }
9548: /*@
9549: MatIsStructurallySymmetricKnown - Checks if a matrix knows if it is structurally symmetric or not and its structurally symmetric state
9551: Not Collective
9553: Input Parameter:
9554: . A - the matrix to check
9556: Output Parameters:
9557: + set - PETSC_TRUE if the matrix knows its structurally symmetric state (this tells you if the next flag is valid)
9558: - flg - the result (only valid if set is PETSC_TRUE)
9560: Level: advanced
9562: Notes:
9563: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9564: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9566: Use `MatIsStructurallySymmetric()` to explicitly check if a matrix is structurally symmetric (this is an expensive operation)
9568: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9569: @*/
9570: PetscErrorCode MatIsStructurallySymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9571: {
9572: PetscFunctionBegin;
9574: PetscAssertPointer(set, 2);
9575: PetscAssertPointer(flg, 3);
9576: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9577: *set = PETSC_TRUE;
9578: *flg = PetscBool3ToBool(A->structurally_symmetric);
9579: } else {
9580: *set = PETSC_FALSE;
9581: }
9582: PetscFunctionReturn(PETSC_SUCCESS);
9583: }
9585: /*@
9586: MatStashGetInfo - Gets how many values are currently in the matrix stash, i.e. need
9587: to be communicated to other processors during the `MatAssemblyBegin()`/`MatAssemblyEnd()` process
9589: Not Collective
9591: Input Parameter:
9592: . mat - the matrix
9594: Output Parameters:
9595: + nstash - the size of the stash
9596: . reallocs - the number of additional mallocs incurred.
9597: . bnstash - the size of the block stash
9598: - breallocs - the number of additional mallocs incurred.in the block stash
9600: Level: advanced
9602: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashSetInitialSize()`
9603: @*/
9604: PetscErrorCode MatStashGetInfo(Mat mat, PetscInt *nstash, PetscInt *reallocs, PetscInt *bnstash, PetscInt *breallocs)
9605: {
9606: PetscFunctionBegin;
9607: PetscCall(MatStashGetInfo_Private(&mat->stash, nstash, reallocs));
9608: PetscCall(MatStashGetInfo_Private(&mat->bstash, bnstash, breallocs));
9609: PetscFunctionReturn(PETSC_SUCCESS);
9610: }
9612: /*@C
9613: MatCreateVecs - Get vector(s) compatible with the matrix, i.e. with the same
9614: parallel layout, `PetscLayout` for rows and columns
9616: Collective
9618: Input Parameter:
9619: . mat - the matrix
9621: Output Parameters:
9622: + right - (optional) vector that the matrix can be multiplied against
9623: - left - (optional) vector that the matrix vector product can be stored in
9625: Level: advanced
9627: Notes:
9628: The blocksize of the returned vectors is determined by the row and column block sizes set with `MatSetBlockSizes()` or the single blocksize (same for both) set by `MatSetBlockSize()`.
9630: These are new vectors which are not owned by the mat, they should be destroyed in `VecDestroy()` when no longer needed
9632: .seealso: [](ch_matrices), `Mat`, `Vec`, `VecCreate()`, `VecDestroy()`, `DMCreateGlobalVector()`
9633: @*/
9634: PetscErrorCode MatCreateVecs(Mat mat, Vec *right, Vec *left)
9635: {
9636: PetscFunctionBegin;
9639: if (mat->ops->getvecs) {
9640: PetscUseTypeMethod(mat, getvecs, right, left);
9641: } else {
9642: if (right) {
9643: PetscCheck(mat->cmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for columns not yet setup");
9644: PetscCall(VecCreateWithLayout_Private(mat->cmap, right));
9645: PetscCall(VecSetType(*right, mat->defaultvectype));
9646: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9647: if (mat->boundtocpu && mat->bindingpropagates) {
9648: PetscCall(VecSetBindingPropagates(*right, PETSC_TRUE));
9649: PetscCall(VecBindToCPU(*right, PETSC_TRUE));
9650: }
9651: #endif
9652: }
9653: if (left) {
9654: PetscCheck(mat->rmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for rows not yet setup");
9655: PetscCall(VecCreateWithLayout_Private(mat->rmap, left));
9656: PetscCall(VecSetType(*left, mat->defaultvectype));
9657: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9658: if (mat->boundtocpu && mat->bindingpropagates) {
9659: PetscCall(VecSetBindingPropagates(*left, PETSC_TRUE));
9660: PetscCall(VecBindToCPU(*left, PETSC_TRUE));
9661: }
9662: #endif
9663: }
9664: }
9665: PetscFunctionReturn(PETSC_SUCCESS);
9666: }
9668: /*@C
9669: MatFactorInfoInitialize - Initializes a `MatFactorInfo` data structure
9670: with default values.
9672: Not Collective
9674: Input Parameter:
9675: . info - the `MatFactorInfo` data structure
9677: Level: developer
9679: Notes:
9680: The solvers are generally used through the `KSP` and `PC` objects, for example
9681: `PCLU`, `PCILU`, `PCCHOLESKY`, `PCICC`
9683: Once the data structure is initialized one may change certain entries as desired for the particular factorization to be performed
9685: Developer Note:
9686: The Fortran interface is not autogenerated as the
9687: interface definition cannot be generated correctly [due to `MatFactorInfo`]
9689: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorInfo`
9690: @*/
9691: PetscErrorCode MatFactorInfoInitialize(MatFactorInfo *info)
9692: {
9693: PetscFunctionBegin;
9694: PetscCall(PetscMemzero(info, sizeof(MatFactorInfo)));
9695: PetscFunctionReturn(PETSC_SUCCESS);
9696: }
9698: /*@
9699: MatFactorSetSchurIS - Set indices corresponding to the Schur complement you wish to have computed
9701: Collective
9703: Input Parameters:
9704: + mat - the factored matrix
9705: - is - the index set defining the Schur indices (0-based)
9707: Level: advanced
9709: Notes:
9710: Call `MatFactorSolveSchurComplement()` or `MatFactorSolveSchurComplementTranspose()` after this call to solve a Schur complement system.
9712: You can call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` after this call.
9714: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9716: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorGetSchurComplement()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSolveSchurComplement()`,
9717: `MatFactorSolveSchurComplementTranspose()`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9718: @*/
9719: PetscErrorCode MatFactorSetSchurIS(Mat mat, IS is)
9720: {
9721: PetscErrorCode (*f)(Mat, IS);
9723: PetscFunctionBegin;
9728: PetscCheckSameComm(mat, 1, is, 2);
9729: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
9730: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorSetSchurIS_C", &f));
9731: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "The selected MatSolverType does not support Schur complement computation. You should use MATSOLVERMUMPS or MATSOLVERMKL_PARDISO");
9732: PetscCall(MatDestroy(&mat->schur));
9733: PetscCall((*f)(mat, is));
9734: PetscCheck(mat->schur, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "Schur complement has not been created");
9735: PetscFunctionReturn(PETSC_SUCCESS);
9736: }
9738: /*@
9739: MatFactorCreateSchurComplement - Create a Schur complement matrix object using Schur data computed during the factorization step
9741: Logically Collective
9743: Input Parameters:
9744: + F - the factored matrix obtained by calling `MatGetFactor()`
9745: . S - location where to return the Schur complement, can be `NULL`
9746: - status - the status of the Schur complement matrix, can be `NULL`
9748: Level: advanced
9750: Notes:
9751: You must call `MatFactorSetSchurIS()` before calling this routine.
9753: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9755: The routine provides a copy of the Schur matrix stored within the solver data structures.
9756: The caller must destroy the object when it is no longer needed.
9757: If `MatFactorInvertSchurComplement()` has been called, the routine gets back the inverse.
9759: Use `MatFactorGetSchurComplement()` to get access to the Schur complement matrix inside the factored matrix instead of making a copy of it (which this function does)
9761: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9763: Developer Note:
9764: The reason this routine exists is because the representation of the Schur complement within the factor matrix may be different than a standard PETSc
9765: matrix representation and we normally do not want to use the time or memory to make a copy as a regular PETSc matrix.
9767: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorSchurStatus`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9768: @*/
9769: PetscErrorCode MatFactorCreateSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9770: {
9771: PetscFunctionBegin;
9773: if (S) PetscAssertPointer(S, 2);
9774: if (status) PetscAssertPointer(status, 3);
9775: if (S) {
9776: PetscErrorCode (*f)(Mat, Mat *);
9778: PetscCall(PetscObjectQueryFunction((PetscObject)F, "MatFactorCreateSchurComplement_C", &f));
9779: if (f) {
9780: PetscCall((*f)(F, S));
9781: } else {
9782: PetscCall(MatDuplicate(F->schur, MAT_COPY_VALUES, S));
9783: }
9784: }
9785: if (status) *status = F->schur_status;
9786: PetscFunctionReturn(PETSC_SUCCESS);
9787: }
9789: /*@
9790: MatFactorGetSchurComplement - Gets access to a Schur complement matrix using the current Schur data within a factored matrix
9792: Logically Collective
9794: Input Parameters:
9795: + F - the factored matrix obtained by calling `MatGetFactor()`
9796: . S - location where to return the Schur complement, can be `NULL`
9797: - status - the status of the Schur complement matrix, can be `NULL`
9799: Level: advanced
9801: Notes:
9802: You must call `MatFactorSetSchurIS()` before calling this routine.
9804: Schur complement mode is currently implemented for sequential matrices with factor type of `MATSOLVERMUMPS`
9806: The routine returns a the Schur Complement stored within the data structures of the solver.
9808: If `MatFactorInvertSchurComplement()` has previously been called, the returned matrix is actually the inverse of the Schur complement.
9810: The returned matrix should not be destroyed; the caller should call `MatFactorRestoreSchurComplement()` when the object is no longer needed.
9812: Use `MatFactorCreateSchurComplement()` to create a copy of the Schur complement matrix that is within a factored matrix
9814: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9816: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9817: @*/
9818: PetscErrorCode MatFactorGetSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9819: {
9820: PetscFunctionBegin;
9822: if (S) {
9823: PetscAssertPointer(S, 2);
9824: *S = F->schur;
9825: }
9826: if (status) {
9827: PetscAssertPointer(status, 3);
9828: *status = F->schur_status;
9829: }
9830: PetscFunctionReturn(PETSC_SUCCESS);
9831: }
9833: static PetscErrorCode MatFactorUpdateSchurStatus_Private(Mat F)
9834: {
9835: Mat S = F->schur;
9837: PetscFunctionBegin;
9838: switch (F->schur_status) {
9839: case MAT_FACTOR_SCHUR_UNFACTORED: // fall-through
9840: case MAT_FACTOR_SCHUR_INVERTED:
9841: if (S) {
9842: S->ops->solve = NULL;
9843: S->ops->matsolve = NULL;
9844: S->ops->solvetranspose = NULL;
9845: S->ops->matsolvetranspose = NULL;
9846: S->ops->solveadd = NULL;
9847: S->ops->solvetransposeadd = NULL;
9848: S->factortype = MAT_FACTOR_NONE;
9849: PetscCall(PetscFree(S->solvertype));
9850: }
9851: case MAT_FACTOR_SCHUR_FACTORED: // fall-through
9852: break;
9853: default:
9854: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9855: }
9856: PetscFunctionReturn(PETSC_SUCCESS);
9857: }
9859: /*@
9860: MatFactorRestoreSchurComplement - Restore the Schur complement matrix object obtained from a call to `MatFactorGetSchurComplement()`
9862: Logically Collective
9864: Input Parameters:
9865: + F - the factored matrix obtained by calling `MatGetFactor()`
9866: . S - location where the Schur complement is stored
9867: - status - the status of the Schur complement matrix (see `MatFactorSchurStatus`)
9869: Level: advanced
9871: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9872: @*/
9873: PetscErrorCode MatFactorRestoreSchurComplement(Mat F, Mat *S, MatFactorSchurStatus status)
9874: {
9875: PetscFunctionBegin;
9877: if (S) {
9879: *S = NULL;
9880: }
9881: F->schur_status = status;
9882: PetscCall(MatFactorUpdateSchurStatus_Private(F));
9883: PetscFunctionReturn(PETSC_SUCCESS);
9884: }
9886: /*@
9887: MatFactorSolveSchurComplementTranspose - Solve the transpose of the Schur complement system computed during the factorization step
9889: Logically Collective
9891: Input Parameters:
9892: + F - the factored matrix obtained by calling `MatGetFactor()`
9893: . rhs - location where the right-hand side of the Schur complement system is stored
9894: - sol - location where the solution of the Schur complement system has to be returned
9896: Level: advanced
9898: Notes:
9899: The sizes of the vectors should match the size of the Schur complement
9901: Must be called after `MatFactorSetSchurIS()`
9903: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplement()`
9904: @*/
9905: PetscErrorCode MatFactorSolveSchurComplementTranspose(Mat F, Vec rhs, Vec sol)
9906: {
9907: PetscFunctionBegin;
9914: PetscCheckSameComm(F, 1, rhs, 2);
9915: PetscCheckSameComm(F, 1, sol, 3);
9916: PetscCall(MatFactorFactorizeSchurComplement(F));
9917: switch (F->schur_status) {
9918: case MAT_FACTOR_SCHUR_FACTORED:
9919: PetscCall(MatSolveTranspose(F->schur, rhs, sol));
9920: break;
9921: case MAT_FACTOR_SCHUR_INVERTED:
9922: PetscCall(MatMultTranspose(F->schur, rhs, sol));
9923: break;
9924: default:
9925: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9926: }
9927: PetscFunctionReturn(PETSC_SUCCESS);
9928: }
9930: /*@
9931: MatFactorSolveSchurComplement - Solve the Schur complement system computed during the factorization step
9933: Logically Collective
9935: Input Parameters:
9936: + F - the factored matrix obtained by calling `MatGetFactor()`
9937: . rhs - location where the right-hand side of the Schur complement system is stored
9938: - sol - location where the solution of the Schur complement system has to be returned
9940: Level: advanced
9942: Notes:
9943: The sizes of the vectors should match the size of the Schur complement
9945: Must be called after `MatFactorSetSchurIS()`
9947: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplementTranspose()`
9948: @*/
9949: PetscErrorCode MatFactorSolveSchurComplement(Mat F, Vec rhs, Vec sol)
9950: {
9951: PetscFunctionBegin;
9958: PetscCheckSameComm(F, 1, rhs, 2);
9959: PetscCheckSameComm(F, 1, sol, 3);
9960: PetscCall(MatFactorFactorizeSchurComplement(F));
9961: switch (F->schur_status) {
9962: case MAT_FACTOR_SCHUR_FACTORED:
9963: PetscCall(MatSolve(F->schur, rhs, sol));
9964: break;
9965: case MAT_FACTOR_SCHUR_INVERTED:
9966: PetscCall(MatMult(F->schur, rhs, sol));
9967: break;
9968: default:
9969: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9970: }
9971: PetscFunctionReturn(PETSC_SUCCESS);
9972: }
9974: PETSC_EXTERN PetscErrorCode MatSeqDenseInvertFactors_Private(Mat);
9975: #if PetscDefined(HAVE_CUDA)
9976: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatSeqDenseCUDAInvertFactors_Internal(Mat);
9977: #endif
9979: /* Schur status updated in the interface */
9980: static PetscErrorCode MatFactorInvertSchurComplement_Private(Mat F)
9981: {
9982: Mat S = F->schur;
9984: PetscFunctionBegin;
9985: if (S) {
9986: PetscMPIInt size;
9987: PetscBool isdense, isdensecuda;
9989: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)S), &size));
9990: PetscCheck(size <= 1, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not yet implemented");
9991: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSE, &isdense));
9992: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSECUDA, &isdensecuda));
9993: PetscCheck(isdense || isdensecuda, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not implemented for type %s", ((PetscObject)S)->type_name);
9994: PetscCall(PetscLogEventBegin(MAT_FactorInvS, F, 0, 0, 0));
9995: if (isdense) {
9996: PetscCall(MatSeqDenseInvertFactors_Private(S));
9997: } else if (isdensecuda) {
9998: #if defined(PETSC_HAVE_CUDA)
9999: PetscCall(MatSeqDenseCUDAInvertFactors_Internal(S));
10000: #endif
10001: }
10002: // HIP??????????????
10003: PetscCall(PetscLogEventEnd(MAT_FactorInvS, F, 0, 0, 0));
10004: }
10005: PetscFunctionReturn(PETSC_SUCCESS);
10006: }
10008: /*@
10009: MatFactorInvertSchurComplement - Invert the Schur complement matrix computed during the factorization step
10011: Logically Collective
10013: Input Parameter:
10014: . F - the factored matrix obtained by calling `MatGetFactor()`
10016: Level: advanced
10018: Notes:
10019: Must be called after `MatFactorSetSchurIS()`.
10021: Call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` AFTER this call to actually compute the inverse and get access to it.
10023: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorCreateSchurComplement()`
10024: @*/
10025: PetscErrorCode MatFactorInvertSchurComplement(Mat F)
10026: {
10027: PetscFunctionBegin;
10030: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED) PetscFunctionReturn(PETSC_SUCCESS);
10031: PetscCall(MatFactorFactorizeSchurComplement(F));
10032: PetscCall(MatFactorInvertSchurComplement_Private(F));
10033: F->schur_status = MAT_FACTOR_SCHUR_INVERTED;
10034: PetscFunctionReturn(PETSC_SUCCESS);
10035: }
10037: /*@
10038: MatFactorFactorizeSchurComplement - Factorize the Schur complement matrix computed during the factorization step
10040: Logically Collective
10042: Input Parameter:
10043: . F - the factored matrix obtained by calling `MatGetFactor()`
10045: Level: advanced
10047: Note:
10048: Must be called after `MatFactorSetSchurIS()`
10050: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorInvertSchurComplement()`
10051: @*/
10052: PetscErrorCode MatFactorFactorizeSchurComplement(Mat F)
10053: {
10054: MatFactorInfo info;
10056: PetscFunctionBegin;
10059: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED || F->schur_status == MAT_FACTOR_SCHUR_FACTORED) PetscFunctionReturn(PETSC_SUCCESS);
10060: PetscCall(PetscLogEventBegin(MAT_FactorFactS, F, 0, 0, 0));
10061: PetscCall(PetscMemzero(&info, sizeof(MatFactorInfo)));
10062: if (F->factortype == MAT_FACTOR_CHOLESKY) { /* LDL^t regarded as Cholesky */
10063: PetscCall(MatCholeskyFactor(F->schur, NULL, &info));
10064: } else {
10065: PetscCall(MatLUFactor(F->schur, NULL, NULL, &info));
10066: }
10067: PetscCall(PetscLogEventEnd(MAT_FactorFactS, F, 0, 0, 0));
10068: F->schur_status = MAT_FACTOR_SCHUR_FACTORED;
10069: PetscFunctionReturn(PETSC_SUCCESS);
10070: }
10072: /*@
10073: MatPtAP - Creates the matrix product $C = P^T * A * P$
10075: Neighbor-wise Collective
10077: Input Parameters:
10078: + A - the matrix
10079: . P - the projection matrix
10080: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10081: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(P)), use `PETSC_DEFAULT` if you do not have a good estimate
10082: if the result is a dense matrix this is irrelevant
10084: Output Parameter:
10085: . C - the product matrix
10087: Level: intermediate
10089: Notes:
10090: C will be created and must be destroyed by the user with `MatDestroy()`.
10092: An alternative approach to this function is to use `MatProductCreate()` and set the desired options before the computation is done
10094: Developer Note:
10095: For matrix types without special implementation the function fallbacks to `MatMatMult()` followed by `MatTransposeMatMult()`.
10097: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatRARt()`
10098: @*/
10099: PetscErrorCode MatPtAP(Mat A, Mat P, MatReuse scall, PetscReal fill, Mat *C)
10100: {
10101: PetscFunctionBegin;
10102: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10103: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10105: if (scall == MAT_INITIAL_MATRIX) {
10106: PetscCall(MatProductCreate(A, P, NULL, C));
10107: PetscCall(MatProductSetType(*C, MATPRODUCT_PtAP));
10108: PetscCall(MatProductSetAlgorithm(*C, "default"));
10109: PetscCall(MatProductSetFill(*C, fill));
10111: (*C)->product->api_user = PETSC_TRUE;
10112: PetscCall(MatProductSetFromOptions(*C));
10113: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and P %s", MatProductTypes[MATPRODUCT_PtAP], ((PetscObject)A)->type_name, ((PetscObject)P)->type_name);
10114: PetscCall(MatProductSymbolic(*C));
10115: } else { /* scall == MAT_REUSE_MATRIX */
10116: PetscCall(MatProductReplaceMats(A, P, NULL, *C));
10117: }
10119: PetscCall(MatProductNumeric(*C));
10120: (*C)->symmetric = A->symmetric;
10121: (*C)->spd = A->spd;
10122: PetscFunctionReturn(PETSC_SUCCESS);
10123: }
10125: /*@
10126: MatRARt - Creates the matrix product $C = R * A * R^T$
10128: Neighbor-wise Collective
10130: Input Parameters:
10131: + A - the matrix
10132: . R - the projection matrix
10133: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10134: - fill - expected fill as ratio of nnz(C)/nnz(A), use `PETSC_DEFAULT` if you do not have a good estimate
10135: if the result is a dense matrix this is irrelevant
10137: Output Parameter:
10138: . C - the product matrix
10140: Level: intermediate
10142: Notes:
10143: C will be created and must be destroyed by the user with `MatDestroy()`.
10145: An alternative approach to this function is to use `MatProductCreate()` and set the desired options before the computation is done
10147: This routine is currently only implemented for pairs of `MATAIJ` matrices and classes
10148: which inherit from `MATAIJ`. Due to PETSc sparse matrix block row distribution among processes,
10149: parallel MatRARt is implemented via explicit transpose of R, which could be very expensive.
10150: We recommend using MatPtAP().
10152: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatPtAP()`
10153: @*/
10154: PetscErrorCode MatRARt(Mat A, Mat R, MatReuse scall, PetscReal fill, Mat *C)
10155: {
10156: PetscFunctionBegin;
10157: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10158: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10160: if (scall == MAT_INITIAL_MATRIX) {
10161: PetscCall(MatProductCreate(A, R, NULL, C));
10162: PetscCall(MatProductSetType(*C, MATPRODUCT_RARt));
10163: PetscCall(MatProductSetAlgorithm(*C, "default"));
10164: PetscCall(MatProductSetFill(*C, fill));
10166: (*C)->product->api_user = PETSC_TRUE;
10167: PetscCall(MatProductSetFromOptions(*C));
10168: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and R %s", MatProductTypes[MATPRODUCT_RARt], ((PetscObject)A)->type_name, ((PetscObject)R)->type_name);
10169: PetscCall(MatProductSymbolic(*C));
10170: } else { /* scall == MAT_REUSE_MATRIX */
10171: PetscCall(MatProductReplaceMats(A, R, NULL, *C));
10172: }
10174: PetscCall(MatProductNumeric(*C));
10175: if (A->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10176: PetscFunctionReturn(PETSC_SUCCESS);
10177: }
10179: static PetscErrorCode MatProduct_Private(Mat A, Mat B, MatReuse scall, PetscReal fill, MatProductType ptype, Mat *C)
10180: {
10181: PetscBool flg = PETSC_TRUE;
10183: PetscFunctionBegin;
10184: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX product not supported");
10185: if (scall == MAT_INITIAL_MATRIX) {
10186: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_INITIAL_MATRIX and product type %s\n", MatProductTypes[ptype]));
10187: PetscCall(MatProductCreate(A, B, NULL, C));
10188: PetscCall(MatProductSetAlgorithm(*C, MATPRODUCTALGORITHMDEFAULT));
10189: PetscCall(MatProductSetFill(*C, fill));
10190: } else { /* scall == MAT_REUSE_MATRIX */
10191: Mat_Product *product = (*C)->product;
10193: PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)*C, &flg, MATSEQDENSE, MATMPIDENSE, ""));
10194: if (flg && product && product->type != ptype) {
10195: PetscCall(MatProductClear(*C));
10196: product = NULL;
10197: }
10198: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_REUSE_MATRIX %s product present and product type %s\n", product ? "with" : "without", MatProductTypes[ptype]));
10199: if (!product) { /* user provide the dense matrix *C without calling MatProductCreate() or reusing it from previous calls */
10200: PetscCheck(flg, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "Call MatProductCreate() first");
10201: PetscCall(MatProductCreate_Private(A, B, NULL, *C));
10202: product = (*C)->product;
10203: product->fill = fill;
10204: product->clear = PETSC_TRUE;
10205: } else { /* user may change input matrices A or B when MAT_REUSE_MATRIX */
10206: flg = PETSC_FALSE;
10207: PetscCall(MatProductReplaceMats(A, B, NULL, *C));
10208: }
10209: }
10210: if (flg) {
10211: (*C)->product->api_user = PETSC_TRUE;
10212: PetscCall(MatProductSetType(*C, ptype));
10213: PetscCall(MatProductSetFromOptions(*C));
10214: PetscCall(MatProductSymbolic(*C));
10215: }
10216: PetscCall(MatProductNumeric(*C));
10217: PetscFunctionReturn(PETSC_SUCCESS);
10218: }
10220: /*@
10221: MatMatMult - Performs matrix-matrix multiplication C=A*B.
10223: Neighbor-wise Collective
10225: Input Parameters:
10226: + A - the left matrix
10227: . B - the right matrix
10228: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10229: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DEFAULT` if you do not have a good estimate
10230: if the result is a dense matrix this is irrelevant
10232: Output Parameter:
10233: . C - the product matrix
10235: Notes:
10236: Unless scall is `MAT_REUSE_MATRIX` C will be created.
10238: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call and C was obtained from a previous
10239: call to this function with `MAT_INITIAL_MATRIX`.
10241: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value actually needed.
10243: In the special case where matrix B (and hence C) are dense you can create the correctly sized matrix C yourself and then call this routine with `MAT_REUSE_MATRIX`,
10244: rather than first having `MatMatMult()` create it for you. You can NEVER do this if the matrix C is sparse.
10246: Example of Usage:
10247: .vb
10248: MatProductCreate(A,B,NULL,&C);
10249: MatProductSetType(C,MATPRODUCT_AB);
10250: MatProductSymbolic(C);
10251: MatProductNumeric(C); // compute C=A * B
10252: MatProductReplaceMats(A1,B1,NULL,C); // compute C=A1 * B1
10253: MatProductNumeric(C);
10254: MatProductReplaceMats(A2,NULL,NULL,C); // compute C=A2 * B1
10255: MatProductNumeric(C);
10256: .ve
10258: Level: intermediate
10260: .seealso: [](ch_matrices), `Mat`, `MatProductType`, `MATPRODUCT_AB`, `MatTransposeMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`, `MatProductCreate()`, `MatProductSymbolic()`, `MatProductReplaceMats()`, `MatProductNumeric()`
10261: @*/
10262: PetscErrorCode MatMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10263: {
10264: PetscFunctionBegin;
10265: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AB, C));
10266: PetscFunctionReturn(PETSC_SUCCESS);
10267: }
10269: /*@
10270: MatMatTransposeMult - Performs matrix-matrix multiplication $C = A*B^T$.
10272: Neighbor-wise Collective
10274: Input Parameters:
10275: + A - the left matrix
10276: . B - the right matrix
10277: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10278: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DEFAULT` if not known
10280: Output Parameter:
10281: . C - the product matrix
10283: Options Database Key:
10284: . -matmattransmult_mpidense_mpidense_via {allgatherv,cyclic} - Choose between algorithms for `MATMPIDENSE` matrices: the
10285: first redundantly copies the transposed `B` matrix on each process and requires O(log P) communication complexity;
10286: the second never stores more than one portion of the `B` matrix at a time but requires O(P) communication complexity.
10288: Level: intermediate
10290: Notes:
10291: C will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10293: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call
10295: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10296: actually needed.
10298: This routine is currently only implemented for pairs of `MATSEQAIJ` matrices, for the `MATSEQDENSE` class,
10299: and for pairs of `MATMPIDENSE` matrices.
10301: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_ABt`
10303: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABt`, `MatMatMult()`, `MatTransposeMatMult()` `MatPtAP()`, `MatProductAlgorithm`, `MatProductType`
10304: @*/
10305: PetscErrorCode MatMatTransposeMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10306: {
10307: PetscFunctionBegin;
10308: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_ABt, C));
10309: if (A == B) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10310: PetscFunctionReturn(PETSC_SUCCESS);
10311: }
10313: /*@
10314: MatTransposeMatMult - Performs matrix-matrix multiplication $C = A^T*B$.
10316: Neighbor-wise Collective
10318: Input Parameters:
10319: + A - the left matrix
10320: . B - the right matrix
10321: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10322: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DEFAULT` if not known
10324: Output Parameter:
10325: . C - the product matrix
10327: Level: intermediate
10329: Notes:
10330: `C` will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10332: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call.
10334: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_AtB`
10336: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10337: actually needed.
10339: This routine is currently implemented for pairs of `MATAIJ` matrices and pairs of `MATSEQDENSE` matrices and classes
10340: which inherit from `MATSEQAIJ`. `C` will be of the same type as the input matrices.
10342: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_AtB`, `MatMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`
10343: @*/
10344: PetscErrorCode MatTransposeMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10345: {
10346: PetscFunctionBegin;
10347: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AtB, C));
10348: PetscFunctionReturn(PETSC_SUCCESS);
10349: }
10351: /*@
10352: MatMatMatMult - Performs matrix-matrix-matrix multiplication D=A*B*C.
10354: Neighbor-wise Collective
10356: Input Parameters:
10357: + A - the left matrix
10358: . B - the middle matrix
10359: . C - the right matrix
10360: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10361: - fill - expected fill as ratio of nnz(D)/(nnz(A) + nnz(B)+nnz(C)), use `PETSC_DEFAULT` if you do not have a good estimate
10362: if the result is a dense matrix this is irrelevant
10364: Output Parameter:
10365: . D - the product matrix
10367: Level: intermediate
10369: Notes:
10370: Unless `scall` is `MAT_REUSE_MATRIX` `D` will be created.
10372: `MAT_REUSE_MATRIX` can only be used if the matrices `A`, `B`, and `C` have the same nonzero pattern as in the previous call
10374: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_ABC`
10376: To determine the correct fill value, run with `-info` and search for the string "Fill ratio" to see the value
10377: actually needed.
10379: If you have many matrices with the same non-zero structure to multiply, you
10380: should use `MAT_REUSE_MATRIX` in all calls but the first
10382: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABC`, `MatMatMult`, `MatPtAP()`, `MatMatTransposeMult()`, `MatTransposeMatMult()`
10383: @*/
10384: PetscErrorCode MatMatMatMult(Mat A, Mat B, Mat C, MatReuse scall, PetscReal fill, Mat *D)
10385: {
10386: PetscFunctionBegin;
10387: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*D, 6);
10388: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10390: if (scall == MAT_INITIAL_MATRIX) {
10391: PetscCall(MatProductCreate(A, B, C, D));
10392: PetscCall(MatProductSetType(*D, MATPRODUCT_ABC));
10393: PetscCall(MatProductSetAlgorithm(*D, "default"));
10394: PetscCall(MatProductSetFill(*D, fill));
10396: (*D)->product->api_user = PETSC_TRUE;
10397: PetscCall(MatProductSetFromOptions(*D));
10398: PetscCheck((*D)->ops->productsymbolic, PetscObjectComm((PetscObject)*D), PETSC_ERR_SUP, "MatProduct %s not supported for A %s, B %s and C %s", MatProductTypes[MATPRODUCT_ABC], ((PetscObject)A)->type_name, ((PetscObject)B)->type_name,
10399: ((PetscObject)C)->type_name);
10400: PetscCall(MatProductSymbolic(*D));
10401: } else { /* user may change input matrices when REUSE */
10402: PetscCall(MatProductReplaceMats(A, B, C, *D));
10403: }
10404: PetscCall(MatProductNumeric(*D));
10405: PetscFunctionReturn(PETSC_SUCCESS);
10406: }
10408: /*@
10409: MatCreateRedundantMatrix - Create redundant matrices and put them into processors of subcommunicators.
10411: Collective
10413: Input Parameters:
10414: + mat - the matrix
10415: . nsubcomm - the number of subcommunicators (= number of redundant parallel or sequential matrices)
10416: . subcomm - MPI communicator split from the communicator where mat resides in (or `MPI_COMM_NULL` if nsubcomm is used)
10417: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10419: Output Parameter:
10420: . matredundant - redundant matrix
10422: Level: advanced
10424: Notes:
10425: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
10426: original matrix has not changed from that last call to `MatCreateRedundantMatrix()`.
10428: This routine creates the duplicated matrices in the subcommunicators; you should NOT create them before
10429: calling it.
10431: `PetscSubcommCreate()` can be used to manage the creation of the subcomm but need not be.
10433: .seealso: [](ch_matrices), `Mat`, `MatDestroy()`, `PetscSubcommCreate()`, `PetscSubcomm`
10434: @*/
10435: PetscErrorCode MatCreateRedundantMatrix(Mat mat, PetscInt nsubcomm, MPI_Comm subcomm, MatReuse reuse, Mat *matredundant)
10436: {
10437: MPI_Comm comm;
10438: PetscMPIInt size;
10439: PetscInt mloc_sub, nloc_sub, rstart, rend, M = mat->rmap->N, N = mat->cmap->N, bs = mat->rmap->bs;
10440: Mat_Redundant *redund = NULL;
10441: PetscSubcomm psubcomm = NULL;
10442: MPI_Comm subcomm_in = subcomm;
10443: Mat *matseq;
10444: IS isrow, iscol;
10445: PetscBool newsubcomm = PETSC_FALSE;
10447: PetscFunctionBegin;
10449: if (nsubcomm && reuse == MAT_REUSE_MATRIX) {
10450: PetscAssertPointer(*matredundant, 5);
10452: }
10454: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
10455: if (size == 1 || nsubcomm == 1) {
10456: if (reuse == MAT_INITIAL_MATRIX) {
10457: PetscCall(MatDuplicate(mat, MAT_COPY_VALUES, matredundant));
10458: } else {
10459: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10460: PetscCall(MatCopy(mat, *matredundant, SAME_NONZERO_PATTERN));
10461: }
10462: PetscFunctionReturn(PETSC_SUCCESS);
10463: }
10465: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10466: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10467: MatCheckPreallocated(mat, 1);
10469: PetscCall(PetscLogEventBegin(MAT_RedundantMat, mat, 0, 0, 0));
10470: if (subcomm_in == MPI_COMM_NULL && reuse == MAT_INITIAL_MATRIX) { /* get subcomm if user does not provide subcomm */
10471: /* create psubcomm, then get subcomm */
10472: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
10473: PetscCallMPI(MPI_Comm_size(comm, &size));
10474: PetscCheck(nsubcomm >= 1 && nsubcomm <= size, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "nsubcomm must between 1 and %d", size);
10476: PetscCall(PetscSubcommCreate(comm, &psubcomm));
10477: PetscCall(PetscSubcommSetNumber(psubcomm, nsubcomm));
10478: PetscCall(PetscSubcommSetType(psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
10479: PetscCall(PetscSubcommSetFromOptions(psubcomm));
10480: PetscCall(PetscCommDuplicate(PetscSubcommChild(psubcomm), &subcomm, NULL));
10481: newsubcomm = PETSC_TRUE;
10482: PetscCall(PetscSubcommDestroy(&psubcomm));
10483: }
10485: /* get isrow, iscol and a local sequential matrix matseq[0] */
10486: if (reuse == MAT_INITIAL_MATRIX) {
10487: mloc_sub = PETSC_DECIDE;
10488: nloc_sub = PETSC_DECIDE;
10489: if (bs < 1) {
10490: PetscCall(PetscSplitOwnership(subcomm, &mloc_sub, &M));
10491: PetscCall(PetscSplitOwnership(subcomm, &nloc_sub, &N));
10492: } else {
10493: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &mloc_sub, &M));
10494: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &nloc_sub, &N));
10495: }
10496: PetscCallMPI(MPI_Scan(&mloc_sub, &rend, 1, MPIU_INT, MPI_SUM, subcomm));
10497: rstart = rend - mloc_sub;
10498: PetscCall(ISCreateStride(PETSC_COMM_SELF, mloc_sub, rstart, 1, &isrow));
10499: PetscCall(ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol));
10500: PetscCall(ISSetIdentity(iscol));
10501: } else { /* reuse == MAT_REUSE_MATRIX */
10502: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10503: /* retrieve subcomm */
10504: PetscCall(PetscObjectGetComm((PetscObject)*matredundant, &subcomm));
10505: redund = (*matredundant)->redundant;
10506: isrow = redund->isrow;
10507: iscol = redund->iscol;
10508: matseq = redund->matseq;
10509: }
10510: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscol, reuse, &matseq));
10512: /* get matredundant over subcomm */
10513: if (reuse == MAT_INITIAL_MATRIX) {
10514: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], nloc_sub, reuse, matredundant));
10516: /* create a supporting struct and attach it to C for reuse */
10517: PetscCall(PetscNew(&redund));
10518: (*matredundant)->redundant = redund;
10519: redund->isrow = isrow;
10520: redund->iscol = iscol;
10521: redund->matseq = matseq;
10522: if (newsubcomm) {
10523: redund->subcomm = subcomm;
10524: } else {
10525: redund->subcomm = MPI_COMM_NULL;
10526: }
10527: } else {
10528: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], PETSC_DECIDE, reuse, matredundant));
10529: }
10530: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
10531: if (matseq[0]->boundtocpu && matseq[0]->bindingpropagates) {
10532: PetscCall(MatBindToCPU(*matredundant, PETSC_TRUE));
10533: PetscCall(MatSetBindingPropagates(*matredundant, PETSC_TRUE));
10534: }
10535: #endif
10536: PetscCall(PetscLogEventEnd(MAT_RedundantMat, mat, 0, 0, 0));
10537: PetscFunctionReturn(PETSC_SUCCESS);
10538: }
10540: /*@C
10541: MatGetMultiProcBlock - Create multiple 'parallel submatrices' from
10542: a given `Mat`. Each submatrix can span multiple procs.
10544: Collective
10546: Input Parameters:
10547: + mat - the matrix
10548: . subComm - the sub communicator obtained as if by `MPI_Comm_split(PetscObjectComm((PetscObject)mat))`
10549: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10551: Output Parameter:
10552: . subMat - parallel sub-matrices each spanning a given `subcomm`
10554: Level: advanced
10556: Notes:
10557: The submatrix partition across processors is dictated by `subComm` a
10558: communicator obtained by `MPI_comm_split()` or via `PetscSubcommCreate()`. The `subComm`
10559: is not restricted to be grouped with consecutive original MPI processes.
10561: Due the `MPI_Comm_split()` usage, the parallel layout of the submatrices
10562: map directly to the layout of the original matrix [wrt the local
10563: row,col partitioning]. So the original 'DiagonalMat' naturally maps
10564: into the 'DiagonalMat' of the `subMat`, hence it is used directly from
10565: the `subMat`. However the offDiagMat looses some columns - and this is
10566: reconstructed with `MatSetValues()`
10568: This is used by `PCBJACOBI` when a single block spans multiple MPI processes.
10570: .seealso: [](ch_matrices), `Mat`, `MatCreateRedundantMatrix()`, `MatCreateSubMatrices()`, `PCBJACOBI`
10571: @*/
10572: PetscErrorCode MatGetMultiProcBlock(Mat mat, MPI_Comm subComm, MatReuse scall, Mat *subMat)
10573: {
10574: PetscMPIInt commsize, subCommSize;
10576: PetscFunctionBegin;
10577: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &commsize));
10578: PetscCallMPI(MPI_Comm_size(subComm, &subCommSize));
10579: PetscCheck(subCommSize <= commsize, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "CommSize %d < SubCommZize %d", commsize, subCommSize);
10581: PetscCheck(scall != MAT_REUSE_MATRIX || *subMat != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10582: PetscCall(PetscLogEventBegin(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10583: PetscUseTypeMethod(mat, getmultiprocblock, subComm, scall, subMat);
10584: PetscCall(PetscLogEventEnd(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10585: PetscFunctionReturn(PETSC_SUCCESS);
10586: }
10588: /*@
10589: MatGetLocalSubMatrix - Gets a reference to a submatrix specified in local numbering
10591: Not Collective
10593: Input Parameters:
10594: + mat - matrix to extract local submatrix from
10595: . isrow - local row indices for submatrix
10596: - iscol - local column indices for submatrix
10598: Output Parameter:
10599: . submat - the submatrix
10601: Level: intermediate
10603: Notes:
10604: `submat` should be disposed of with `MatRestoreLocalSubMatrix()`.
10606: Depending on the format of `mat`, the returned `submat` may not implement `MatMult()`. Its communicator may be
10607: the same as `mat`, it may be `PETSC_COMM_SELF`, or some other sub-communictor of `mat`'s.
10609: `submat` always implements `MatSetValuesLocal()`. If `isrow` and `iscol` have the same block size, then
10610: `MatSetValuesBlockedLocal()` will also be implemented.
10612: `mat` must have had a `ISLocalToGlobalMapping` provided to it with `MatSetLocalToGlobalMapping()`.
10613: Matrices obtained with `DMCreateMatrix()` generally already have the local to global mapping provided.
10615: .seealso: [](ch_matrices), `Mat`, `MatRestoreLocalSubMatrix()`, `MatCreateLocalRef()`, `MatSetLocalToGlobalMapping()`
10616: @*/
10617: PetscErrorCode MatGetLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10618: {
10619: PetscFunctionBegin;
10623: PetscCheckSameComm(isrow, 2, iscol, 3);
10624: PetscAssertPointer(submat, 4);
10625: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must have local to global mapping provided before this call");
10627: if (mat->ops->getlocalsubmatrix) {
10628: PetscUseTypeMethod(mat, getlocalsubmatrix, isrow, iscol, submat);
10629: } else {
10630: PetscCall(MatCreateLocalRef(mat, isrow, iscol, submat));
10631: }
10632: PetscFunctionReturn(PETSC_SUCCESS);
10633: }
10635: /*@
10636: MatRestoreLocalSubMatrix - Restores a reference to a submatrix specified in local numbering obtained with `MatGetLocalSubMatrix()`
10638: Not Collective
10640: Input Parameters:
10641: + mat - matrix to extract local submatrix from
10642: . isrow - local row indices for submatrix
10643: . iscol - local column indices for submatrix
10644: - submat - the submatrix
10646: Level: intermediate
10648: .seealso: [](ch_matrices), `Mat`, `MatGetLocalSubMatrix()`
10649: @*/
10650: PetscErrorCode MatRestoreLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10651: {
10652: PetscFunctionBegin;
10656: PetscCheckSameComm(isrow, 2, iscol, 3);
10657: PetscAssertPointer(submat, 4);
10660: if (mat->ops->restorelocalsubmatrix) {
10661: PetscUseTypeMethod(mat, restorelocalsubmatrix, isrow, iscol, submat);
10662: } else {
10663: PetscCall(MatDestroy(submat));
10664: }
10665: *submat = NULL;
10666: PetscFunctionReturn(PETSC_SUCCESS);
10667: }
10669: /*@
10670: MatFindZeroDiagonals - Finds all the rows of a matrix that have zero or no diagonal entry in the matrix
10672: Collective
10674: Input Parameter:
10675: . mat - the matrix
10677: Output Parameter:
10678: . is - if any rows have zero diagonals this contains the list of them
10680: Level: developer
10682: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10683: @*/
10684: PetscErrorCode MatFindZeroDiagonals(Mat mat, IS *is)
10685: {
10686: PetscFunctionBegin;
10689: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10690: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10692: if (!mat->ops->findzerodiagonals) {
10693: Vec diag;
10694: const PetscScalar *a;
10695: PetscInt *rows;
10696: PetscInt rStart, rEnd, r, nrow = 0;
10698: PetscCall(MatCreateVecs(mat, &diag, NULL));
10699: PetscCall(MatGetDiagonal(mat, diag));
10700: PetscCall(MatGetOwnershipRange(mat, &rStart, &rEnd));
10701: PetscCall(VecGetArrayRead(diag, &a));
10702: for (r = 0; r < rEnd - rStart; ++r)
10703: if (a[r] == 0.0) ++nrow;
10704: PetscCall(PetscMalloc1(nrow, &rows));
10705: nrow = 0;
10706: for (r = 0; r < rEnd - rStart; ++r)
10707: if (a[r] == 0.0) rows[nrow++] = r + rStart;
10708: PetscCall(VecRestoreArrayRead(diag, &a));
10709: PetscCall(VecDestroy(&diag));
10710: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nrow, rows, PETSC_OWN_POINTER, is));
10711: } else {
10712: PetscUseTypeMethod(mat, findzerodiagonals, is);
10713: }
10714: PetscFunctionReturn(PETSC_SUCCESS);
10715: }
10717: /*@
10718: MatFindOffBlockDiagonalEntries - Finds all the rows of a matrix that have entries outside of the main diagonal block (defined by the matrix block size)
10720: Collective
10722: Input Parameter:
10723: . mat - the matrix
10725: Output Parameter:
10726: . is - contains the list of rows with off block diagonal entries
10728: Level: developer
10730: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10731: @*/
10732: PetscErrorCode MatFindOffBlockDiagonalEntries(Mat mat, IS *is)
10733: {
10734: PetscFunctionBegin;
10737: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10738: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10740: PetscUseTypeMethod(mat, findoffblockdiagonalentries, is);
10741: PetscFunctionReturn(PETSC_SUCCESS);
10742: }
10744: /*@C
10745: MatInvertBlockDiagonal - Inverts the block diagonal entries.
10747: Collective; No Fortran Support
10749: Input Parameter:
10750: . mat - the matrix
10752: Output Parameter:
10753: . values - the block inverses in column major order (FORTRAN-like)
10755: Level: advanced
10757: Notes:
10758: The size of the blocks is determined by the block size of the matrix.
10760: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10762: The blocks all have the same size, use `MatInvertVariableBlockDiagonal()` for variable block size
10764: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatInvertBlockDiagonalMat()`
10765: @*/
10766: PetscErrorCode MatInvertBlockDiagonal(Mat mat, const PetscScalar **values)
10767: {
10768: PetscFunctionBegin;
10770: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10771: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10772: PetscUseTypeMethod(mat, invertblockdiagonal, values);
10773: PetscFunctionReturn(PETSC_SUCCESS);
10774: }
10776: /*@C
10777: MatInvertVariableBlockDiagonal - Inverts the point block diagonal entries.
10779: Collective; No Fortran Support
10781: Input Parameters:
10782: + mat - the matrix
10783: . nblocks - the number of blocks on the process, set with `MatSetVariableBlockSizes()`
10784: - bsizes - the size of each block on the process, set with `MatSetVariableBlockSizes()`
10786: Output Parameter:
10787: . values - the block inverses in column major order (FORTRAN-like)
10789: Level: advanced
10791: Notes:
10792: Use `MatInvertBlockDiagonal()` if all blocks have the same size
10794: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10796: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatSetVariableBlockSizes()`, `MatInvertVariableBlockEnvelope()`
10797: @*/
10798: PetscErrorCode MatInvertVariableBlockDiagonal(Mat mat, PetscInt nblocks, const PetscInt *bsizes, PetscScalar *values)
10799: {
10800: PetscFunctionBegin;
10802: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10803: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10804: PetscUseTypeMethod(mat, invertvariableblockdiagonal, nblocks, bsizes, values);
10805: PetscFunctionReturn(PETSC_SUCCESS);
10806: }
10808: /*@
10809: MatInvertBlockDiagonalMat - set the values of matrix C to be the inverted block diagonal of matrix A
10811: Collective
10813: Input Parameters:
10814: + A - the matrix
10815: - C - matrix with inverted block diagonal of `A`. This matrix should be created and may have its type set.
10817: Level: advanced
10819: Note:
10820: The blocksize of the matrix is used to determine the blocks on the diagonal of `C`
10822: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`
10823: @*/
10824: PetscErrorCode MatInvertBlockDiagonalMat(Mat A, Mat C)
10825: {
10826: const PetscScalar *vals;
10827: PetscInt *dnnz;
10828: PetscInt m, rstart, rend, bs, i, j;
10830: PetscFunctionBegin;
10831: PetscCall(MatInvertBlockDiagonal(A, &vals));
10832: PetscCall(MatGetBlockSize(A, &bs));
10833: PetscCall(MatGetLocalSize(A, &m, NULL));
10834: PetscCall(MatSetLayouts(C, A->rmap, A->cmap));
10835: PetscCall(PetscMalloc1(m / bs, &dnnz));
10836: for (j = 0; j < m / bs; j++) dnnz[j] = 1;
10837: PetscCall(MatXAIJSetPreallocation(C, bs, dnnz, NULL, NULL, NULL));
10838: PetscCall(PetscFree(dnnz));
10839: PetscCall(MatGetOwnershipRange(C, &rstart, &rend));
10840: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_FALSE));
10841: for (i = rstart / bs; i < rend / bs; i++) PetscCall(MatSetValuesBlocked(C, 1, &i, 1, &i, &vals[(i - rstart / bs) * bs * bs], INSERT_VALUES));
10842: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
10843: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
10844: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_TRUE));
10845: PetscFunctionReturn(PETSC_SUCCESS);
10846: }
10848: /*@C
10849: MatTransposeColoringDestroy - Destroys a coloring context for matrix product $C = A*B^T$ that was created
10850: via `MatTransposeColoringCreate()`.
10852: Collective
10854: Input Parameter:
10855: . c - coloring context
10857: Level: intermediate
10859: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`
10860: @*/
10861: PetscErrorCode MatTransposeColoringDestroy(MatTransposeColoring *c)
10862: {
10863: MatTransposeColoring matcolor = *c;
10865: PetscFunctionBegin;
10866: if (!matcolor) PetscFunctionReturn(PETSC_SUCCESS);
10867: if (--((PetscObject)matcolor)->refct > 0) {
10868: matcolor = NULL;
10869: PetscFunctionReturn(PETSC_SUCCESS);
10870: }
10872: PetscCall(PetscFree3(matcolor->ncolumns, matcolor->nrows, matcolor->colorforrow));
10873: PetscCall(PetscFree(matcolor->rows));
10874: PetscCall(PetscFree(matcolor->den2sp));
10875: PetscCall(PetscFree(matcolor->colorforcol));
10876: PetscCall(PetscFree(matcolor->columns));
10877: if (matcolor->brows > 0) PetscCall(PetscFree(matcolor->lstart));
10878: PetscCall(PetscHeaderDestroy(c));
10879: PetscFunctionReturn(PETSC_SUCCESS);
10880: }
10882: /*@C
10883: MatTransColoringApplySpToDen - Given a symbolic matrix product $C = A*B^T$ for which
10884: a `MatTransposeColoring` context has been created, computes a dense $B^T$ by applying
10885: `MatTransposeColoring` to sparse `B`.
10887: Collective
10889: Input Parameters:
10890: + coloring - coloring context created with `MatTransposeColoringCreate()`
10891: - B - sparse matrix
10893: Output Parameter:
10894: . Btdense - dense matrix $B^T$
10896: Level: developer
10898: Note:
10899: These are used internally for some implementations of `MatRARt()`
10901: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplyDenToSp()`
10902: @*/
10903: PetscErrorCode MatTransColoringApplySpToDen(MatTransposeColoring coloring, Mat B, Mat Btdense)
10904: {
10905: PetscFunctionBegin;
10910: PetscCall((*B->ops->transcoloringapplysptoden)(coloring, B, Btdense));
10911: PetscFunctionReturn(PETSC_SUCCESS);
10912: }
10914: /*@C
10915: MatTransColoringApplyDenToSp - Given a symbolic matrix product $C_{sp} = A*B^T$ for which
10916: a `MatTransposeColoring` context has been created and a dense matrix $C_{den} = A*B^T_{dense}$
10917: in which `B^T_{dens}` is obtained from `MatTransColoringApplySpToDen()`, recover sparse matrix
10918: $C_{sp}$ from $C_{den}$.
10920: Collective
10922: Input Parameters:
10923: + matcoloring - coloring context created with `MatTransposeColoringCreate()`
10924: - Cden - matrix product of a sparse matrix and a dense matrix Btdense
10926: Output Parameter:
10927: . Csp - sparse matrix
10929: Level: developer
10931: Note:
10932: These are used internally for some implementations of `MatRARt()`
10934: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`
10935: @*/
10936: PetscErrorCode MatTransColoringApplyDenToSp(MatTransposeColoring matcoloring, Mat Cden, Mat Csp)
10937: {
10938: PetscFunctionBegin;
10943: PetscCall((*Csp->ops->transcoloringapplydentosp)(matcoloring, Cden, Csp));
10944: PetscCall(MatAssemblyBegin(Csp, MAT_FINAL_ASSEMBLY));
10945: PetscCall(MatAssemblyEnd(Csp, MAT_FINAL_ASSEMBLY));
10946: PetscFunctionReturn(PETSC_SUCCESS);
10947: }
10949: /*@C
10950: MatTransposeColoringCreate - Creates a matrix coloring context for the matrix product $C = A*B^T$.
10952: Collective
10954: Input Parameters:
10955: + mat - the matrix product C
10956: - iscoloring - the coloring of the matrix; usually obtained with `MatColoringCreate()` or `DMCreateColoring()`
10958: Output Parameter:
10959: . color - the new coloring context
10961: Level: intermediate
10963: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`,
10964: `MatTransColoringApplyDenToSp()`
10965: @*/
10966: PetscErrorCode MatTransposeColoringCreate(Mat mat, ISColoring iscoloring, MatTransposeColoring *color)
10967: {
10968: MatTransposeColoring c;
10969: MPI_Comm comm;
10971: PetscFunctionBegin;
10972: PetscCall(PetscLogEventBegin(MAT_TransposeColoringCreate, mat, 0, 0, 0));
10973: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
10974: PetscCall(PetscHeaderCreate(c, MAT_TRANSPOSECOLORING_CLASSID, "MatTransposeColoring", "Matrix product C=A*B^T via coloring", "Mat", comm, MatTransposeColoringDestroy, NULL));
10976: c->ctype = iscoloring->ctype;
10977: PetscUseTypeMethod(mat, transposecoloringcreate, iscoloring, c);
10979: *color = c;
10980: PetscCall(PetscLogEventEnd(MAT_TransposeColoringCreate, mat, 0, 0, 0));
10981: PetscFunctionReturn(PETSC_SUCCESS);
10982: }
10984: /*@
10985: MatGetNonzeroState - Returns a 64-bit integer representing the current state of nonzeros in the matrix. If the
10986: matrix has had new nonzero locations added to (or removed from) the matrix since the previous call, the value will be larger.
10988: Not Collective
10990: Input Parameter:
10991: . mat - the matrix
10993: Output Parameter:
10994: . state - the current state
10996: Level: intermediate
10998: Notes:
10999: You can only compare states from two different calls to the SAME matrix, you cannot compare calls between
11000: different matrices
11002: Use `PetscObjectStateGet()` to check for changes to the numerical values in a matrix
11004: Use the result of `PetscObjectGetId()` to compare if a previously checked matrix is the same as the current matrix, do not compare object pointers.
11006: .seealso: [](ch_matrices), `Mat`, `PetscObjectStateGet()`, `PetscObjectGetId()`
11007: @*/
11008: PetscErrorCode MatGetNonzeroState(Mat mat, PetscObjectState *state)
11009: {
11010: PetscFunctionBegin;
11012: *state = mat->nonzerostate;
11013: PetscFunctionReturn(PETSC_SUCCESS);
11014: }
11016: /*@
11017: MatCreateMPIMatConcatenateSeqMat - Creates a single large PETSc matrix by concatenating sequential
11018: matrices from each processor
11020: Collective
11022: Input Parameters:
11023: + comm - the communicators the parallel matrix will live on
11024: . seqmat - the input sequential matrices
11025: . n - number of local columns (or `PETSC_DECIDE`)
11026: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
11028: Output Parameter:
11029: . mpimat - the parallel matrix generated
11031: Level: developer
11033: Note:
11034: The number of columns of the matrix in EACH processor MUST be the same.
11036: .seealso: [](ch_matrices), `Mat`
11037: @*/
11038: PetscErrorCode MatCreateMPIMatConcatenateSeqMat(MPI_Comm comm, Mat seqmat, PetscInt n, MatReuse reuse, Mat *mpimat)
11039: {
11040: PetscMPIInt size;
11042: PetscFunctionBegin;
11043: PetscCallMPI(MPI_Comm_size(comm, &size));
11044: if (size == 1) {
11045: if (reuse == MAT_INITIAL_MATRIX) {
11046: PetscCall(MatDuplicate(seqmat, MAT_COPY_VALUES, mpimat));
11047: } else {
11048: PetscCall(MatCopy(seqmat, *mpimat, SAME_NONZERO_PATTERN));
11049: }
11050: PetscFunctionReturn(PETSC_SUCCESS);
11051: }
11053: PetscCheck(reuse != MAT_REUSE_MATRIX || seqmat != *mpimat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
11055: PetscCall(PetscLogEventBegin(MAT_Merge, seqmat, 0, 0, 0));
11056: PetscCall((*seqmat->ops->creatempimatconcatenateseqmat)(comm, seqmat, n, reuse, mpimat));
11057: PetscCall(PetscLogEventEnd(MAT_Merge, seqmat, 0, 0, 0));
11058: PetscFunctionReturn(PETSC_SUCCESS);
11059: }
11061: /*@
11062: MatSubdomainsCreateCoalesce - Creates index subdomains by coalescing adjacent MPI processes' ownership ranges.
11064: Collective
11066: Input Parameters:
11067: + A - the matrix to create subdomains from
11068: - N - requested number of subdomains
11070: Output Parameters:
11071: + n - number of subdomains resulting on this MPI process
11072: - iss - `IS` list with indices of subdomains on this MPI process
11074: Level: advanced
11076: Note:
11077: The number of subdomains must be smaller than the communicator size
11079: .seealso: [](ch_matrices), `Mat`, `IS`
11080: @*/
11081: PetscErrorCode MatSubdomainsCreateCoalesce(Mat A, PetscInt N, PetscInt *n, IS *iss[])
11082: {
11083: MPI_Comm comm, subcomm;
11084: PetscMPIInt size, rank, color;
11085: PetscInt rstart, rend, k;
11087: PetscFunctionBegin;
11088: PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
11089: PetscCallMPI(MPI_Comm_size(comm, &size));
11090: PetscCallMPI(MPI_Comm_rank(comm, &rank));
11091: PetscCheck(N >= 1 && N < (PetscInt)size, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of subdomains must be > 0 and < %d, got N = %" PetscInt_FMT, size, N);
11092: *n = 1;
11093: k = ((PetscInt)size) / N + ((PetscInt)size % N > 0); /* There are up to k ranks to a color */
11094: color = rank / k;
11095: PetscCallMPI(MPI_Comm_split(comm, color, rank, &subcomm));
11096: PetscCall(PetscMalloc1(1, iss));
11097: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
11098: PetscCall(ISCreateStride(subcomm, rend - rstart, rstart, 1, iss[0]));
11099: PetscCallMPI(MPI_Comm_free(&subcomm));
11100: PetscFunctionReturn(PETSC_SUCCESS);
11101: }
11103: /*@
11104: MatGalerkin - Constructs the coarse grid problem matrix via Galerkin projection.
11106: If the interpolation and restriction operators are the same, uses `MatPtAP()`.
11107: If they are not the same, uses `MatMatMatMult()`.
11109: Once the coarse grid problem is constructed, correct for interpolation operators
11110: that are not of full rank, which can legitimately happen in the case of non-nested
11111: geometric multigrid.
11113: Input Parameters:
11114: + restrct - restriction operator
11115: . dA - fine grid matrix
11116: . interpolate - interpolation operator
11117: . reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
11118: - fill - expected fill, use `PETSC_DEFAULT` if you do not have a good estimate
11120: Output Parameter:
11121: . A - the Galerkin coarse matrix
11123: Options Database Key:
11124: . -pc_mg_galerkin <both,pmat,mat,none> - for what matrices the Galerkin process should be used
11126: Level: developer
11128: .seealso: [](ch_matrices), `Mat`, `MatPtAP()`, `MatMatMatMult()`
11129: @*/
11130: PetscErrorCode MatGalerkin(Mat restrct, Mat dA, Mat interpolate, MatReuse reuse, PetscReal fill, Mat *A)
11131: {
11132: IS zerorows;
11133: Vec diag;
11135: PetscFunctionBegin;
11136: PetscCheck(reuse != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
11137: /* Construct the coarse grid matrix */
11138: if (interpolate == restrct) {
11139: PetscCall(MatPtAP(dA, interpolate, reuse, fill, A));
11140: } else {
11141: PetscCall(MatMatMatMult(restrct, dA, interpolate, reuse, fill, A));
11142: }
11144: /* If the interpolation matrix is not of full rank, A will have zero rows.
11145: This can legitimately happen in the case of non-nested geometric multigrid.
11146: In that event, we set the rows of the matrix to the rows of the identity,
11147: ignoring the equations (as the RHS will also be zero). */
11149: PetscCall(MatFindZeroRows(*A, &zerorows));
11151: if (zerorows != NULL) { /* if there are any zero rows */
11152: PetscCall(MatCreateVecs(*A, &diag, NULL));
11153: PetscCall(MatGetDiagonal(*A, diag));
11154: PetscCall(VecISSet(diag, zerorows, 1.0));
11155: PetscCall(MatDiagonalSet(*A, diag, INSERT_VALUES));
11156: PetscCall(VecDestroy(&diag));
11157: PetscCall(ISDestroy(&zerorows));
11158: }
11159: PetscFunctionReturn(PETSC_SUCCESS);
11160: }
11162: /*@C
11163: MatSetOperation - Allows user to set a matrix operation for any matrix type
11165: Logically Collective
11167: Input Parameters:
11168: + mat - the matrix
11169: . op - the name of the operation
11170: - f - the function that provides the operation
11172: Level: developer
11174: Example Usage:
11175: .vb
11176: extern PetscErrorCode usermult(Mat, Vec, Vec);
11178: PetscCall(MatCreateXXX(comm, ..., &A));
11179: PetscCall(MatSetOperation(A, MATOP_MULT, (PetscVoidFn *)usermult));
11180: .ve
11182: Notes:
11183: See the file `include/petscmat.h` for a complete list of matrix
11184: operations, which all have the form MATOP_<OPERATION>, where
11185: <OPERATION> is the name (in all capital letters) of the
11186: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11188: All user-provided functions (except for `MATOP_DESTROY`) should have the same calling
11189: sequence as the usual matrix interface routines, since they
11190: are intended to be accessed via the usual matrix interface
11191: routines, e.g.,
11192: .vb
11193: MatMult(Mat, Vec, Vec) -> usermult(Mat, Vec, Vec)
11194: .ve
11196: In particular each function MUST return `PETSC_SUCCESS` on success and
11197: nonzero on failure.
11199: This routine is distinct from `MatShellSetOperation()` in that it can be called on any matrix type.
11201: .seealso: [](ch_matrices), `Mat`, `MatGetOperation()`, `MatCreateShell()`, `MatShellSetContext()`, `MatShellSetOperation()`
11202: @*/
11203: PetscErrorCode MatSetOperation(Mat mat, MatOperation op, void (*f)(void))
11204: {
11205: PetscFunctionBegin;
11207: if (op == MATOP_VIEW && !mat->ops->viewnative && f != (void (*)(void))mat->ops->view) mat->ops->viewnative = mat->ops->view;
11208: (((void (**)(void))mat->ops)[op]) = f;
11209: PetscFunctionReturn(PETSC_SUCCESS);
11210: }
11212: /*@C
11213: MatGetOperation - Gets a matrix operation for any matrix type.
11215: Not Collective
11217: Input Parameters:
11218: + mat - the matrix
11219: - op - the name of the operation
11221: Output Parameter:
11222: . f - the function that provides the operation
11224: Level: developer
11226: Example Usage:
11227: .vb
11228: PetscErrorCode (*usermult)(Mat, Vec, Vec);
11230: MatGetOperation(A, MATOP_MULT, (void (**)(void))&usermult);
11231: .ve
11233: Notes:
11234: See the file include/petscmat.h for a complete list of matrix
11235: operations, which all have the form MATOP_<OPERATION>, where
11236: <OPERATION> is the name (in all capital letters) of the
11237: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11239: This routine is distinct from `MatShellGetOperation()` in that it can be called on any matrix type.
11241: .seealso: [](ch_matrices), `Mat`, `MatSetOperation()`, `MatCreateShell()`, `MatShellGetContext()`, `MatShellGetOperation()`
11242: @*/
11243: PetscErrorCode MatGetOperation(Mat mat, MatOperation op, void (**f)(void))
11244: {
11245: PetscFunctionBegin;
11247: *f = (((void (**)(void))mat->ops)[op]);
11248: PetscFunctionReturn(PETSC_SUCCESS);
11249: }
11251: /*@
11252: MatHasOperation - Determines whether the given matrix supports the particular operation.
11254: Not Collective
11256: Input Parameters:
11257: + mat - the matrix
11258: - op - the operation, for example, `MATOP_GET_DIAGONAL`
11260: Output Parameter:
11261: . has - either `PETSC_TRUE` or `PETSC_FALSE`
11263: Level: advanced
11265: Note:
11266: See `MatSetOperation()` for additional discussion on naming convention and usage of `op`.
11268: .seealso: [](ch_matrices), `Mat`, `MatCreateShell()`, `MatGetOperation()`, `MatSetOperation()`
11269: @*/
11270: PetscErrorCode MatHasOperation(Mat mat, MatOperation op, PetscBool *has)
11271: {
11272: PetscFunctionBegin;
11274: PetscAssertPointer(has, 3);
11275: if (mat->ops->hasoperation) {
11276: PetscUseTypeMethod(mat, hasoperation, op, has);
11277: } else {
11278: if (((void **)mat->ops)[op]) *has = PETSC_TRUE;
11279: else {
11280: *has = PETSC_FALSE;
11281: if (op == MATOP_CREATE_SUBMATRIX) {
11282: PetscMPIInt size;
11284: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
11285: if (size == 1) PetscCall(MatHasOperation(mat, MATOP_CREATE_SUBMATRICES, has));
11286: }
11287: }
11288: }
11289: PetscFunctionReturn(PETSC_SUCCESS);
11290: }
11292: /*@
11293: MatHasCongruentLayouts - Determines whether the rows and columns layouts of the matrix are congruent
11295: Collective
11297: Input Parameter:
11298: . mat - the matrix
11300: Output Parameter:
11301: . cong - either `PETSC_TRUE` or `PETSC_FALSE`
11303: Level: beginner
11305: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatSetSizes()`, `PetscLayout`
11306: @*/
11307: PetscErrorCode MatHasCongruentLayouts(Mat mat, PetscBool *cong)
11308: {
11309: PetscFunctionBegin;
11312: PetscAssertPointer(cong, 2);
11313: if (!mat->rmap || !mat->cmap) {
11314: *cong = mat->rmap == mat->cmap ? PETSC_TRUE : PETSC_FALSE;
11315: PetscFunctionReturn(PETSC_SUCCESS);
11316: }
11317: if (mat->congruentlayouts == PETSC_DECIDE) { /* first time we compare rows and cols layouts */
11318: PetscCall(PetscLayoutSetUp(mat->rmap));
11319: PetscCall(PetscLayoutSetUp(mat->cmap));
11320: PetscCall(PetscLayoutCompare(mat->rmap, mat->cmap, cong));
11321: if (*cong) mat->congruentlayouts = 1;
11322: else mat->congruentlayouts = 0;
11323: } else *cong = mat->congruentlayouts ? PETSC_TRUE : PETSC_FALSE;
11324: PetscFunctionReturn(PETSC_SUCCESS);
11325: }
11327: PetscErrorCode MatSetInf(Mat A)
11328: {
11329: PetscFunctionBegin;
11330: PetscUseTypeMethod(A, setinf);
11331: PetscFunctionReturn(PETSC_SUCCESS);
11332: }
11334: /*@C
11335: MatCreateGraph - create a scalar matrix (that is a matrix with one vertex for each block vertex in the original matrix), for use in graph algorithms
11336: and possibly removes small values from the graph structure.
11338: Collective
11340: Input Parameters:
11341: + A - the matrix
11342: . sym - `PETSC_TRUE` indicates that the graph should be symmetrized
11343: . scale - `PETSC_TRUE` indicates that the graph edge weights should be symmetrically scaled with the diagonal entry
11344: . filter - filter value - < 0: does nothing; == 0: removes only 0.0 entries; otherwise: removes entries with abs(entries) <= value
11345: . num_idx - size of 'index' array
11346: - index - array of block indices to use for graph strength of connection weight
11348: Output Parameter:
11349: . graph - the resulting graph
11351: Level: advanced
11353: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PCGAMG`
11354: @*/
11355: PetscErrorCode MatCreateGraph(Mat A, PetscBool sym, PetscBool scale, PetscReal filter, PetscInt num_idx, PetscInt index[], Mat *graph)
11356: {
11357: PetscFunctionBegin;
11361: PetscAssertPointer(graph, 7);
11362: PetscUseTypeMethod(A, creategraph, sym, scale, filter, num_idx, index, graph);
11363: PetscFunctionReturn(PETSC_SUCCESS);
11364: }
11366: /*@
11367: MatEliminateZeros - eliminate the nondiagonal zero entries in place from the nonzero structure of a sparse `Mat` in place,
11368: meaning the same memory is used for the matrix, and no new memory is allocated.
11370: Collective
11372: Input Parameters:
11373: + A - the matrix
11374: - keep - if for a given row of `A`, the diagonal coefficient is zero, indicates whether it should be left in the structure or eliminated as well
11376: Level: intermediate
11378: Developer Note:
11379: The entries in the sparse matrix data structure are shifted to fill in the unneeded locations in the data. Thus the end
11380: of the arrays in the data structure are unneeded.
11382: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateGraph()`, `MatFilter()`
11383: @*/
11384: PetscErrorCode MatEliminateZeros(Mat A, PetscBool keep)
11385: {
11386: PetscFunctionBegin;
11388: PetscUseTypeMethod(A, eliminatezeros, keep);
11389: PetscFunctionReturn(PETSC_SUCCESS);
11390: }